Compare commits
617 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 6db4d02a91 | |||
| 1354cee4ed | |||
| 72ddb34a1c | |||
| ae2d4bac7f | |||
| 70c47c13a4 | |||
| 8021751d69 | |||
| 6b29b5220d | |||
| 07286005a6 | |||
| 721c49dc71 | |||
| 658a30d836 | |||
| 94688bd1e5 | |||
| 26aaf221c7 | |||
| 530bf3c05e | |||
| dfedc6b025 | |||
| 4e3fe6f35f | |||
| af6706a4c9 | |||
| b6c3001df2 | |||
| b8ff84ed9b | |||
| b83bb636f0 | |||
| 7d6b6c4b4c | |||
| 2629865c59 | |||
| fd546f270b | |||
| 641fb971d1 | |||
| e40cde221c | |||
| f956ef92e9 | |||
| 687e0a7028 | |||
| 7c15cd83f4 | |||
| 8f6ac0660e | |||
| 422d5afa6b | |||
| f653094040 | |||
| 8430a6c40e | |||
| d4fa76463d | |||
| 1e5769df11 | |||
| f1a800d37f | |||
| ee4958e7dd | |||
| dbead4a62e | |||
| 2b2d19e78d | |||
| 9ed9fecc0f | |||
| 06e64dec7a | |||
| 420b58c346 | |||
| 36554ce92f | |||
| c5265b47cf | |||
| c29ac1e6a3 | |||
| 6e86835599 | |||
| f70eb5df40 | |||
| 4650aa4a3f | |||
| 34e3101edd | |||
| fdbdf11bab | |||
| db238e7705 | |||
| a196cfe071 | |||
| 442d319aef | |||
| a48c243e3e | |||
| 5c17cb671c | |||
| 94d004e72c | |||
| 21a9705b78 | |||
| 137182792e | |||
| 3dbb845603 | |||
| ac7a6bce2c | |||
| 9baf3d01d3 | |||
| 1db13030fa | |||
| be2fb57d85 | |||
| 3a99b9adff | |||
| 8cb48034f0 | |||
| 49c96c9202 | |||
| ad2ea9498b | |||
| 3a085e114d | |||
| 3fbb740101 | |||
| fa99ac39b4 | |||
| d0de777452 | |||
| eb9979f048 | |||
| c57c16c499 | |||
| 70284c987e | |||
| 64f2ab6591 | |||
| c739bf29c4 | |||
| bb1093e8b2 | |||
| 51eaeddfba | |||
| 122c234570 | |||
| ddf844b680 | |||
| a245f40bcf | |||
| cafb06c011 | |||
| cbfbb5b6d6 | |||
| 606a75a068 | |||
| 0c26aafc2f | |||
| 8bd4ed2ec2 | |||
| 9c5fb3107e | |||
| 1c86ab66be | |||
| f05e3cf281 | |||
| 177b70b995 | |||
| 3b2b28559a | |||
| 7e89db8fa1 | |||
| 9b3ab25cea | |||
| 54ef25214c | |||
| b9db498199 | |||
| 6503ebfb1a | |||
| baf35dc906 | |||
| f4466918f3 | |||
| 3b6caa38aa | |||
| b1127c2a01 | |||
| ad6429d26f | |||
| 59cdafaf70 | |||
| 34ebc2d179 | |||
| 49c9de63cd | |||
| 502a604a26 | |||
| fca58f2d6f | |||
| b2bbdcc0df | |||
| bdb5de5a2f | |||
| 960d0940bb | |||
| e7a91c2f0a | |||
| e583afdb49 | |||
| 53138d7b34 | |||
| e45acf4613 | |||
| 47e3364332 | |||
| 49760f03a0 | |||
| 2f5afcddd1 | |||
| 67beee1444 | |||
| cd80e92df1 | |||
| e303c92041 | |||
| ccd36dd658 | |||
| 3a9953e25c | |||
| 311ea897cf | |||
| 4cc3e4def3 | |||
| 301a5249a8 | |||
| 4bff89e8af | |||
| fa29f9ff32 | |||
| aeba9eb285 | |||
| 5d2e2d7b67 | |||
| e61c4f722f | |||
| 505d3be9b9 | |||
| bec52734f9 | |||
| 73f7531ed5 | |||
| 96fbca62e0 | |||
| 36aed99813 | |||
| e1cd69e672 | |||
| 41f75282d8 | |||
| aa6ef1d6f2 | |||
| 40fc16b1ea | |||
| db882d350c | |||
| 3edd4f851f | |||
| cd2be9aa9c | |||
| 52e1c189fa | |||
| 0231f021ad | |||
| 25e9fba38c | |||
| 0fc61adb30 | |||
| 84ff166375 | |||
| 04ab9a06e7 | |||
| f8454d500a | |||
| 1904494276 | |||
| a421f6c501 | |||
| 3359910784 | |||
| 1f7bab2faf | |||
| 4d85b56e1e | |||
| 261e46a6d1 | |||
| 556cf508f1 | |||
| a668eafc38 | |||
| a0fdf4b940 | |||
| 6f845f9fa5 | |||
| 7e9c6b2dbc | |||
| d9c19b98b7 | |||
| 841eca0e99 | |||
| 1a819e74c3 | |||
| fcf56710d3 | |||
| c8094915b7 | |||
| 6d0e256f1b | |||
| 8e76e14f4e | |||
| b5bb232044 | |||
| 2503f406b6 | |||
| b04132e341 | |||
| 9b7e2571bd | |||
| 888265b36a | |||
| 6da81d760d | |||
| 08cc94bdb3 | |||
| e010be17a9 | |||
| a898bad02d | |||
| 8e1cbbf967 | |||
| 4a4ba6592b | |||
| b3624743e3 | |||
| e8a284d2e7 | |||
| b557a56613 | |||
| 67321228b1 | |||
| ad6afc0415 | |||
| df6a761324 | |||
| 33b1a53f37 | |||
| 0d6eed588a | |||
| 6e31ad982d | |||
| d19dc5e6c6 | |||
| 6d01bcfe79 | |||
| c0c5d70a6b | |||
| 6e1ab54c6b | |||
| fb637ae5ea | |||
| 89a4c2e572 | |||
| 9c09f92c51 | |||
| 3a01a398e2 | |||
| aaad1c124c | |||
| 7509424226 | |||
| c601c2707f | |||
| 433bfa1549 | |||
| c2255f8b8a | |||
| f7f76b9ec6 | |||
| 59dfeb58b4 | |||
| dd781df6a4 | |||
| f4ea0721dc | |||
| 4feb5ae1ae | |||
| 514117142f | |||
| 51ca3afb67 | |||
| 8c88593be9 | |||
| 4777e0412c | |||
| 8375ffafa1 | |||
| e4f87fd7ef | |||
| 61f32c0294 | |||
| b73744d4ae | |||
| 458fe68871 | |||
| c7ff530834 | |||
| 72acd50b7e | |||
| ec95367677 | |||
| 7f58b6ff11 | |||
| 9f7978da27 | |||
| 05fd02154c | |||
| e0796407e9 | |||
| af1f770b28 | |||
| 4ab1bae8d1 | |||
| dd9b209903 | |||
| 848ae63877 | |||
| bf923a264c | |||
| 7b78855cc1 | |||
| dc4d5272fd | |||
| 807784b414 | |||
| 85a36b8133 | |||
| e1341837ce | |||
| fd1787a6a3 | |||
| cb65e29c34 | |||
| 72d5bc4476 | |||
| e8045a52ad | |||
| 1dce70f548 | |||
| 2e26e7a2fe | |||
| 32e0c3c0b7 | |||
| 1426e918a8 | |||
| 8adc3738e1 | |||
| a4cd2c6568 | |||
| f2a5c5d518 | |||
| 03f7d5d7e9 | |||
| 4043a2b559 | |||
| d495a3f666 | |||
| f21618c824 | |||
| 6cfdc066d9 | |||
| 11f5008b1e | |||
| 726271fdaa | |||
| f214b54f42 | |||
| 75da87ae54 | |||
| bf533183f4 | |||
| e4a72c720c | |||
| 35a66f8c55 | |||
| 9d54df626e | |||
| 3927541c27 | |||
| d9abb1d666 | |||
| 5e1bfd1ed7 | |||
| 76a374d793 | |||
| 8dc6b4d854 | |||
| 7cc0bb1f42 | |||
| 9cf9c187a5 | |||
| 82b14caf89 | |||
| 7fe67a47f5 | |||
| 54e80a162b | |||
| 63a1507d53 | |||
| e20088e58d | |||
| 5ab4ea854b | |||
| 6f667a7727 | |||
| e725ccd6ce | |||
| 0f691e4b50 | |||
| 49b8201b89 | |||
| 9c9b21d47c | |||
| 2c6fcd0bc4 | |||
| 0de867988d | |||
| d8e9511b37 | |||
| 5a84c5ea0a | |||
| b9eeae3329 | |||
| 41f609f00d | |||
| 82f7d27aa6 | |||
| d0d7324b50 | |||
| 57015791a0 | |||
| 488ae87b2c | |||
| 320c119223 | |||
| 336de7e512 | |||
| 646c78df3d | |||
| a1f707e800 | |||
| 8611af0c85 | |||
| 10c4cea0b0 | |||
| 80f59b0112 | |||
| fb56e50ecf | |||
| 6aaea911c5 | |||
| 04858f5f22 | |||
| 8fcbe1388d | |||
| c4c6e7f81d | |||
| f7ce76b832 | |||
| 022c0418f5 | |||
| 9a2acf9f6b | |||
| 136aec22fd | |||
| b1b0304111 | |||
| b8ba2006cd | |||
| 637e0c8fb9 | |||
| 60ba55064f | |||
| 375350a097 | |||
| 66c177c3b2 | |||
| 7249c82d1a | |||
| f19f2dc827 | |||
| 41dad89ea1 | |||
| 618fabfcb4 | |||
| 39b22d5c94 | |||
| 9788be32dd | |||
| 572e6aaac0 | |||
| 729bedc5a8 | |||
| 54431c1d15 | |||
| 14ace06078 | |||
| 7ab10f8da2 | |||
| c3534acab1 | |||
| 8d4d87b9dc | |||
| 23ac08ca24 | |||
| 8ef56b0139 | |||
| 837e68b764 | |||
| 22146fcaba | |||
| 4715993196 | |||
| bfa06d83ee | |||
| 54572307c2 | |||
| 65b659ce14 | |||
| b610bd9a41 | |||
| 0e64e0c9ae | |||
| 34bb2bc711 | |||
| 88ab1421cb | |||
| d19a819acd | |||
| 4220739ada | |||
| a4c244495f | |||
| 492ea96424 | |||
| 296576c2b8 | |||
| ecfcfc04d4 | |||
| 86273c2a1f | |||
| 43b4b0768b | |||
| 335282f334 | |||
| 0c25dd448f | |||
| f510a94214 | |||
| fb1bf8dbab | |||
| 6b8ea5856c | |||
| 41edcb97cd | |||
| f3cc1a432e | |||
| 66e935bcf4 | |||
| 255c2421f4 | |||
| 127efa4802 | |||
| a5e4f46217 | |||
| 850d018c72 | |||
| c127e43cc7 | |||
| 2263dee2d7 | |||
| cc819d5c29 | |||
| adcfac1570 | |||
| 817429d57b | |||
| d41574edb5 | |||
| de8d9f2ed2 | |||
| c939112a9b | |||
| 8e8de60341 | |||
| 001882a57e | |||
| e5afa2d57c | |||
| c14cd3f4f4 | |||
| 6a445c826d | |||
| 615e7651e2 | |||
| cb87d70e19 | |||
| 0f0c1cb162 | |||
| b45eaadf04 | |||
| 957ef3b2c1 | |||
| 78af5dd66a | |||
| 443384797d | |||
| 8d03620b24 | |||
| ee6f44ee67 | |||
| 465594c5a6 | |||
| 70c74813c0 | |||
| 276756ca1e | |||
| e618075797 | |||
| 88b3713c15 | |||
| bd38510760 | |||
| 3ff62faf98 | |||
| 892be3d2fd | |||
| aad3f2c0ea | |||
| f3636bc5ba | |||
| 38b5498206 | |||
| 08891eb3b3 | |||
| ff20aa5998 | |||
| cf92340f26 | |||
| a41b40d0b5 | |||
| b9bfefa563 | |||
| 800bbd0409 | |||
| ed53667225 | |||
| 77b0f4e4ec | |||
| 3158c60716 | |||
| 2d0996210e | |||
| 99e6627817 | |||
| 4a1825ec6c | |||
| 4e2a7c5bcf | |||
| 9194b6ab24 | |||
| d91dc8b2e8 | |||
| 317312b27e | |||
| 06a85d5ce5 | |||
| 577a4612fd | |||
| 768ba6a64f | |||
| 12c0c5f860 | |||
| b25150effe | |||
| cb00b0a0e8 | |||
| 51ebf51b4e | |||
| 057038d20f | |||
| 24787567b6 | |||
| 705e6a0225 | |||
| 7436f39214 | |||
| 1419c127ef | |||
| bf74641fc9 | |||
| a0e738b4e8 | |||
| acec3ae6d2 | |||
| 9fabe2d322 | |||
| 7a064eb6ee | |||
| 23422094e0 | |||
| d8f2b034c0 | |||
| f4e89451b6 | |||
| 82c5e058fa | |||
| a996856b7b | |||
| 05765a33f7 | |||
| 01f7dcdc3f | |||
| 140800bf4d | |||
| d85d47746a | |||
| 9ccb1658f6 | |||
| 46055b400a | |||
| 606d011812 | |||
| 456d5e0f92 | |||
| 3f6d261684 | |||
| f303e953c5 | |||
| 14587fc63b | |||
| e081fff031 | |||
| 7735cf0636 | |||
| c249a34643 | |||
| 6344d31c83 | |||
| 1798f163ef | |||
| 5d24badf38 | |||
| e4794ebd10 | |||
| 7a2ed053e5 | |||
| 592042fd72 | |||
| 4f6f155920 | |||
| 102070269c | |||
| a39144add7 | |||
| f8d1da5017 | |||
| a8e3228625 | |||
| 48582483a1 | |||
| f31f9de405 | |||
| 60442b85c0 | |||
| 9e6de567ee | |||
| cb2ce08004 | |||
| 0609b89162 | |||
| 9c3c6e9ab0 | |||
| be7b483cdc | |||
| dbcc5650c9 | |||
| 4f97399f55 | |||
| 5a73e37160 | |||
| 1971ccc44f | |||
| 69cfb971ee | |||
| c86f127161 | |||
| 41e5a124d0 | |||
| be9a78b18a | |||
| d0464e8982 | |||
| 5a10e31e81 | |||
| ea4e17b623 | |||
| 16dec39f7d | |||
| cc14b642ad | |||
| c4cb36f378 | |||
| 7152e5498c | |||
| 51bc7e73df | |||
| c3fba8a36e | |||
| 1fa65ba938 | |||
| 12ba4308da | |||
| 7ee11fcbe5 | |||
| 3fe87dc64b | |||
| 5363fe0c32 | |||
| 2056e919c4 | |||
| 4eb9e0fce9 | |||
| 632973caed | |||
| 8fcea5c873 | |||
| 76ef7f9f47 | |||
| 7d825331cf | |||
| 53f08d3321 | |||
| 98d404d707 | |||
| bf6a95ee1a | |||
| df04cb1119 | |||
| f80faa6016 | |||
| 111c354d65 | |||
| 87b4faa038 | |||
| b042efc2d0 | |||
| fc5ac99895 | |||
| 02dd432ac1 | |||
| d310f9d326 | |||
| 27e093b365 | |||
| 77fd3105eb | |||
| b7b4b822b5 | |||
| 99d5017c8f | |||
| ead1bd7eef | |||
| 50f2ab4c2b | |||
| f1d965bb4a | |||
| 48adc8a6e1 | |||
| 0b585d8aa7 | |||
| ded5935099 | |||
| 48383c9d08 | |||
| 3ab41ea48a | |||
| 377917c5d0 | |||
| 35dd83397e | |||
| 1a38573bf9 | |||
| 0064a4534d | |||
| 6f927897ec | |||
| 6d267cba83 | |||
| 69183bb271 | |||
| 9f4e44881b | |||
| aaeec07f16 | |||
| 2613c04e01 | |||
| 8f64b36681 | |||
| 9ab64ba3df | |||
| 814e0c18f2 | |||
| 3e6523d915 | |||
| 6977f12e79 | |||
| 5f0e504447 | |||
| eeb006df2a | |||
| 9cd5f3b19c | |||
| 599578f2b5 | |||
| ce91fe670f | |||
| e6d1937e68 | |||
| 8676bcc335 | |||
| 40910e7965 | |||
| 939d6046d3 | |||
| 0a21a1d880 | |||
| c5f061689a | |||
| 8198033f89 | |||
| 45fb3c4254 | |||
| 14c7fb1c8d | |||
| 8509618cb5 | |||
| 4b59d8badf | |||
| f0f952d7a6 | |||
| 9e71fb76e2 | |||
| 23c967f864 | |||
| 6491615bc8 | |||
| 25901bacd1 | |||
| 8ec05b79eb | |||
| e4f88950f6 | |||
| 263a6e4feb | |||
| d41c90bad7 | |||
| afb7105351 | |||
| 14342b5452 | |||
| daaedd96bb | |||
| d7e07a8ff1 | |||
| 4aad27ccf9 | |||
| 51de81b123 | |||
| 4f3bf2d495 | |||
| 06980882f9 | |||
| 5702b8dd10 | |||
| 3ef24295ef | |||
| 6071062407 | |||
| 0680b81028 | |||
| d12508b12d | |||
| 72b1b98bb9 | |||
| 318e14fa32 | |||
| eaef938988 | |||
| e6bd7b7213 | |||
| f22401af41 | |||
| 2ab64cdff5 | |||
| 74831adad8 | |||
| 3c1e52c61c | |||
| a85de8cb68 | |||
| 16a9ed56bc | |||
| 459eefd872 | |||
| 76ebd7f96a | |||
| bcda04288e | |||
| aeb0de0e77 | |||
| 57a0f1f6ee | |||
| 5e0e9df211 | |||
| 8a8066ec02 | |||
| 35869d7679 | |||
| b1eb547275 | |||
| 2c57b78517 | |||
| 5d488f3281 | |||
| db53d1f7c2 | |||
| 39224fb283 | |||
| 7b8854db1a | |||
| a5d6c7046e | |||
| 88c429a1aa | |||
| 9b1a28e251 | |||
| 4aa7b1f3f2 | |||
| 4fda597427 | |||
| 850b9b291f | |||
| 59a4aa00b0 | |||
| 10cec75a6e | |||
| c08dee7157 | |||
| d91c8aaf6b | |||
| eda3a913f9 | |||
| 6c8c4c07ca | |||
| 0bb3cc9f31 | |||
| 02cb718c27 | |||
| b9fff21986 | |||
| 70e966e33c | |||
| 63a911266a | |||
| 88deea49a4 | |||
| acbecf02da | |||
| 96c5f33d29 | |||
| b87fb1685d | |||
| be48f5d745 | |||
| 87dfc31bb9 | |||
| 1766acbffb | |||
| 692956e1cb | |||
| bc101a7105 | |||
| 4f793b3bd9 | |||
| 19b9a328f8 | |||
| a88290173b | |||
| 775cc36de6 | |||
| bd8e56aeea | |||
| 25c54e1a60 | |||
| a7e00b7a0d | |||
| a531e56e96 | |||
| fbec99cbaf | |||
| 8d0decb332 | |||
| cff21ace3a | |||
| 64a66cfb07 |
+4
-12
@@ -1,12 +1,4 @@
|
||||
Makefile
|
||||
!third_party/libjpeg_turbo/yasm/source/config/linux/Makefile
|
||||
*.mk
|
||||
!install/**/*.mk
|
||||
*.Makefile
|
||||
client.timestamp
|
||||
hooks.timestamp
|
||||
build/gyp_helper.pyc
|
||||
build/landmine_utils.pyc
|
||||
out/
|
||||
tools/closure/
|
||||
!devel/Makefile
|
||||
test/tmp
|
||||
psol/
|
||||
psol-*.tar.gz
|
||||
|
||||
|
||||
-113
@@ -1,113 +0,0 @@
|
||||
# The two httpds are the longest syncs, so we'd like to start them first.
|
||||
[submodule "third_party/httpd/src"]
|
||||
path = third_party/httpd/src
|
||||
url = git://git.apache.org/httpd.git
|
||||
[submodule "third_party/httpd24/src"]
|
||||
path = third_party/httpd24/src
|
||||
url = git://git.apache.org/httpd.git
|
||||
[submodule "third_party/brotli/src"]
|
||||
path = third_party/brotli/src
|
||||
url = https://github.com/google/brotli.git
|
||||
[submodule "third_party/giflib"]
|
||||
path = third_party/giflib
|
||||
url = https://github.com/pagespeed/giflib.git
|
||||
[submodule "third_party/closure_library"]
|
||||
path = third_party/closure_library
|
||||
url = https://github.com/google/closure-library.git
|
||||
[submodule "third_party/aprutil/src"]
|
||||
path = third_party/aprutil/src
|
||||
url = git://git.apache.org/apr-util.git
|
||||
[submodule "third_party/re2/src"]
|
||||
path = third_party/re2/src
|
||||
url = https://github.com/google/re2.git
|
||||
[submodule "third_party/icu"]
|
||||
path = third_party/icu
|
||||
url = https://github.com/pagespeed/icu.git
|
||||
[submodule "third_party/libjpeg_turbo/src"]
|
||||
path = third_party/libjpeg_turbo/src
|
||||
url = https://chromium.googlesource.com/chromium/deps/libjpeg_turbo
|
||||
# For gyp generated makefiles.
|
||||
ignore = untracked
|
||||
[submodule "third_party/libjpeg_turbo/yasm/source/patched-yasm"]
|
||||
path = third_party/libjpeg_turbo/yasm/source/patched-yasm
|
||||
url = https://chromium.googlesource.com/chromium/deps/yasm/patched-yasm
|
||||
[submodule "third_party/libpng/src"]
|
||||
path = third_party/libpng/src
|
||||
url = https://github.com/glennrp/libpng.git
|
||||
[submodule "third_party/hiredis/src"]
|
||||
path = third_party/hiredis/src
|
||||
url = https://github.com/redis/hiredis.git
|
||||
[submodule "third_party/apr/src"]
|
||||
path = third_party/apr/src
|
||||
url = git://git.apache.org/apr.git
|
||||
[submodule "third_party/optipng"]
|
||||
path = third_party/optipng
|
||||
url = https://github.com/pagespeed/optipng.git
|
||||
[submodule "third_party/libwebp"]
|
||||
path = third_party/libwebp
|
||||
url = https://chromium.googlesource.com/webm/libwebp.git
|
||||
[submodule "third_party/serf/src"]
|
||||
path = third_party/serf/src
|
||||
url = https://git.apache.org/serf.git
|
||||
[submodule "third_party/grpc/src"]
|
||||
path = third_party/grpc/src
|
||||
url = https://github.com/grpc/grpc.git
|
||||
[submodule "third_party/protobuf/src"]
|
||||
path = third_party/protobuf/src
|
||||
url = https://github.com/google/protobuf.git
|
||||
[submodule "third_party/modp_b64"]
|
||||
path = third_party/modp_b64
|
||||
url = https://chromium.googlesource.com/chromium/src/third_party/modp_b64
|
||||
# For gyp generated makefiles.
|
||||
ignore = untracked
|
||||
[submodule "third_party/boringssl/src"]
|
||||
path = third_party/boringssl/src
|
||||
url = https://boringssl.googlesource.com/boringssl.git
|
||||
[submodule "third_party/zlib"]
|
||||
path = third_party/zlib
|
||||
url = https://github.com/pagespeed/zlib.git
|
||||
[submodule "third_party/chromium/src/base"]
|
||||
path = third_party/chromium/src/base
|
||||
url = https://chromium.googlesource.com/chromium/src/base
|
||||
# For gyp generated makefiles.
|
||||
ignore = untracked
|
||||
[submodule "third_party/chromium/src/build"]
|
||||
path = third_party/chromium/src/build
|
||||
url = https://chromium.googlesource.com/chromium/src/build
|
||||
[submodule "third_party/chromium/src/googleurl"]
|
||||
path = third_party/chromium/src/googleurl
|
||||
url = https://chromium.googlesource.com/external/google-url
|
||||
[submodule "third_party/google-sparsehash/src"]
|
||||
path = third_party/google-sparsehash/src
|
||||
url = https://github.com/google/sparsehash.git
|
||||
[submodule "third_party/domain_registry_provider"]
|
||||
path = third_party/domain_registry_provider
|
||||
url = https://github.com/pagespeed/domain-registry-provider.git
|
||||
[submodule "third_party/jsoncpp/src"]
|
||||
path = third_party/jsoncpp/src
|
||||
url = https://github.com/open-source-parsers/jsoncpp.git
|
||||
[submodule "third_party/gflags/arch"]
|
||||
path = third_party/gflags/arch
|
||||
url = https://chromium.googlesource.com/external/webrtc/trunk/third_party/gflags
|
||||
[submodule "third_party/gflags/src"]
|
||||
path = third_party/gflags/src
|
||||
url = https://chromium.googlesource.com/external/gflags/src
|
||||
[submodule "testing/gtest"]
|
||||
path = testing/gtest
|
||||
url = https://github.com/google/googletest.git
|
||||
[submodule "testing/gmock"]
|
||||
path = testing/gmock
|
||||
url = https://github.com/google/googlemock.git
|
||||
[submodule "tools/clang"]
|
||||
path = tools/clang
|
||||
url = https://chromium.googlesource.com/chromium/src/tools/clang
|
||||
[submodule "tools/gyp"]
|
||||
path = tools/gyp
|
||||
url = https://chromium.googlesource.com/external/gyp
|
||||
# For building a development apache.
|
||||
[submodule "third_party/nghttp2"]
|
||||
path = third_party/nghttp2
|
||||
url = https://github.com/nghttp2/nghttp2
|
||||
[submodule "third_party/mod_fcgid"]
|
||||
path = third_party/mod_fcgid
|
||||
url = https://github.com/pagespeed/mod_fcgid.git
|
||||
-40
@@ -1,40 +0,0 @@
|
||||
language: c++
|
||||
sudo: required
|
||||
compiler:
|
||||
- gcc
|
||||
|
||||
git:
|
||||
# It takes a while to clone our submodules, so we'd like to use --jobs to
|
||||
# speed it up. Here we prevent travis from using git clone --recursive, so
|
||||
# below in before_install we can manually update including --jobs.
|
||||
submodules: false
|
||||
|
||||
before_install:
|
||||
# Unfortunately, the version of git we get by default is too low to support
|
||||
# --jobs on subdmodule, so update git before pulling in the submodules.
|
||||
- sudo add-apt-repository -y ppa:git-core/ppa
|
||||
- sudo apt-get update -q
|
||||
- sudo apt-get install -q -y git
|
||||
- git submodule update --init --recursive --jobs=6
|
||||
|
||||
env:
|
||||
global:
|
||||
- MAKEFLAGS=-j3
|
||||
matrix:
|
||||
- BIT_FLAG=
|
||||
# This would do another build for 32-bit, but we're already borderline
|
||||
# too slow on faster 64-bit, so skip this for now.
|
||||
# - BIT_FLAG=--32bit
|
||||
|
||||
script:
|
||||
# Travis will time out our build if doesn't output anything for > 10 mintes,
|
||||
# but --verbose sometimes outputs more than 4 MB of data, which will also
|
||||
# cause our build to be killed. travis_wait allows the command to be silent
|
||||
# for longer, but has the downside of not producing output if we timeout. See:
|
||||
# https://docs.travis-ci.com/user/common-build-problems/#Build-times-out-because-no-output-was-received
|
||||
# For now, stick with --verbose and keep an eye on the logs.
|
||||
- install/build_release.sh --verbose --skip_psol --debug $BIT_FLAG
|
||||
|
||||
notifications:
|
||||
email:
|
||||
- pagespeed-ci@googlegroups.com
|
||||
@@ -1,36 +1,133 @@
|
||||
# mod_pagespeed
|
||||

|
||||

|
||||
|
||||
|CI|Status|
|
||||
|---|---|
|
||||
|Travis|[](https://travis-ci.org/pagespeed/mod_pagespeed)|
|
||||
|Jenkins (CentOS5)|[](http://104.154.17.78:8080/job/mod_pagespeed)|
|
||||
ngx_pagespeed speeds up your site and reduces page load time by automatically
|
||||
applying web performance best practices to pages and associated assets (CSS,
|
||||
JavaScript, images) without requiring you to modify your existing content or
|
||||
workflow. Features include:
|
||||
|
||||
`mod_pagespeed` is an open-source Apache module created by Google to help Make the Web Faster by rewriting web pages to reduce latency and bandwidth.
|
||||
- Image optimization: stripping meta-data, dynamic resizing, recompression
|
||||
- CSS & JavaScript minification, concatenation, inlining, and outlining
|
||||
- Small resource inlining
|
||||
- Deferring image and JavaScript loading
|
||||
- HTML rewriting
|
||||
- Cache lifetime extension
|
||||
- and
|
||||
[more](https://developers.google.com/speed/docs/mod_pagespeed/config_filters)
|
||||
|
||||
mod_pagespeed releases are available as [precompiled linux packages](https://modpagespeed.com/doc/download) or as [source](https://modpagespeed.com/doc/build_mod_pagespeed_from_source). (See [Release Notes](https://modpagespeed.com/doc/release_notes) for information about bugs fixed)
|
||||
To see ngx_pagespeed in action, with example pages for each of the
|
||||
optimizations, see our <a href="http://ngxpagespeed.com">demonstration site</a>.
|
||||
|
||||
mod_pagespeed is an open-source Apache module which automatically applies web performance best practices to pages, and associated assets (CSS, JavaScript, images) without requiring that you modify your existing content or workflow.
|
||||
## How to build
|
||||
|
||||
mod_pagespeed is built on PageSpeed Optimization Libraries, deployed across 100,000+ web-sites, and provided by popular hosting and CDN providers such as DreamHost, GoDaddy, EdgeCast, and others. There are 40+ available optimizations filters, which include:
|
||||
Because nginx does not support dynamic loading of modules, you need to compile
|
||||
nginx from source to add ngx_pagespeed. Alternatively, if you're using Tengine you can [install ngx_pagespeed without
|
||||
recompiling Tengine](https://github.com/pagespeed/ngx_pagespeed/wiki/Using-ngx_pagespeed-with-Tengine).
|
||||
|
||||
- Image optimization, compression, and resizing
|
||||
- CSS & JavaScript concatenation, minification, and inlining
|
||||
- Cache extension, domain sharding, and domain rewriting
|
||||
- Deferred loading of JavaScript and image resources
|
||||
- and many others...
|
||||
1. Install dependencies:
|
||||
|
||||
[](http://www.youtube.com/watch?v=8moGR2qf994)
|
||||
```bash
|
||||
# These are for RedHat, CentOS, and Fedora.
|
||||
$ sudo yum install gcc-c++ pcre-dev pcre-devel zlib-devel make
|
||||
|
||||
| Try it | [modpagespeed.com](https://modpagespeed.com) |
|
||||
|--- |--- |
|
||||
| Read about it |https://developers.google.com/speed/pagespeed/module |
|
||||
| Download it | https://modpagespeed.com/doc/download |
|
||||
| Check announcements |https://groups.google.com/group/mod-pagespeed-announce |
|
||||
| Discuss it | https://groups.google.com/group/mod-pagespeed-discuss |
|
||||
|FAQ | https://modpagespeed.com/doc/faq |
|
||||
# These are for Debian. Ubuntu will be similar.
|
||||
$ sudo apt-get install build-essential zlib1g-dev libpcre3 libpcre3-dev
|
||||
```
|
||||
|
||||
2. Download ngx_pagespeed:
|
||||
|
||||
Curious to learn more about mod_pagespeed? Check out our GDL episode below, which covers the history of the project, an architectural overview of how mod_pagespeed works under the hood, and a number of operational tips and best practices for deploying mod_pagespeed.
|
||||
```bash
|
||||
$ cd ~
|
||||
$ wget https://github.com/pagespeed/ngx_pagespeed/archive/v1.7.30.3-beta.zip
|
||||
$ unzip v1.7.30.3-beta.zip # or unzip v1.7.30.3-beta
|
||||
$ cd ngx_pagespeed-1.7.30.3-beta/
|
||||
$ wget https://dl.google.com/dl/page-speed/psol/1.7.30.3.tar.gz
|
||||
$ tar -xzvf 1.7.30.3.tar.gz # expands to psol/
|
||||
```
|
||||
|
||||
[](http://www.youtube.com/watch?v=6uCAdQSHhmA)
|
||||
3. Download and build nginx:
|
||||
|
||||
```bash
|
||||
$ # check http://nginx.org/en/download.html for the latest version
|
||||
$ wget http://nginx.org/download/nginx-1.4.4.tar.gz
|
||||
$ tar -xvzf nginx-1.4.4.tar.gz
|
||||
$ cd nginx-1.4.4/
|
||||
$ ./configure --add-module=$HOME/ngx_pagespeed-1.7.30.3-beta
|
||||
$ make
|
||||
$ sudo make install
|
||||
```
|
||||
|
||||
If this doesn't work see the [build
|
||||
troubleshooting](https://github.com/pagespeed/ngx_pagespeed/wiki/Build-Troubleshooting) page.
|
||||
|
||||
This will use a binary PageSpeed Optimization Library, but it's also possible to
|
||||
[build PSOL from
|
||||
source](https://github.com/pagespeed/ngx_pagespeed/wiki/Building-PSOL-From-Source).
|
||||
|
||||
Note: ngx_pagespeed currently doesn't support Windows or MacOS because the
|
||||
underlying PSOL library doesn't.
|
||||
|
||||
## How to use
|
||||
|
||||
In your `nginx.conf`, add to the main or server block:
|
||||
|
||||
```nginx
|
||||
pagespeed on;
|
||||
pagespeed FileCachePath /var/ngx_pagespeed_cache;
|
||||
```
|
||||
|
||||
In every server block where pagespeed is enabled add:
|
||||
|
||||
```apache
|
||||
# Ensure requests for pagespeed optimized resources go to the pagespeed
|
||||
# handler and no extraneous headers get set.
|
||||
location ~ "\.pagespeed\.([a-z]\.)?[a-z]{2}\.[^.]{10}\.[^.]+" { add_header "" ""; }
|
||||
location ~ "^/ngx_pagespeed_static/" { }
|
||||
location ~ "^/ngx_pagespeed_beacon$" { }
|
||||
location /ngx_pagespeed_statistics { allow 127.0.0.1; deny all; }
|
||||
location /ngx_pagespeed_global_statistics { allow 127.0.0.1; deny all; }
|
||||
location /ngx_pagespeed_message { allow 127.0.0.1; deny all; }
|
||||
location /pagespeed_console { allow 127.0.0.1; deny all; }
|
||||
```
|
||||
|
||||
To confirm that the module is loaded, fetch a page and check that you see the
|
||||
`X-Page-Speed` header:
|
||||
|
||||
```bash
|
||||
$ curl -I 'http://localhost:8050/some_page/' | grep X-Page-Speed
|
||||
X-Page-Speed: 1.7.30.3-...
|
||||
```
|
||||
|
||||
Looking at the source of a few pages you should see various changes, such as
|
||||
urls being replaced with new ones like `yellow.css.pagespeed.ce.lzJ8VcVi1l.css`.
|
||||
|
||||
For complete documentation, see [Using
|
||||
PageSpeed](https://developers.google.com/speed/pagespeed/module/using).
|
||||
|
||||
There are extensive system tests which cover most of ngx_pagespeed's
|
||||
functionality. Consider [testing your
|
||||
installation](https://github.com/pagespeed/ngx_pagespeed/wiki/Testing).
|
||||
|
||||
For feedback, questions, and to follow
|
||||
the progress of the project:
|
||||
|
||||
- [ngx-pagespeed-discuss mailing
|
||||
list](https://groups.google.com/forum/#!forum/ngx-pagespeed-discuss)
|
||||
- [ngx-pagespeed-announce mailing
|
||||
list](https://groups.google.com/forum/#!forum/ngx-pagespeed-announce)
|
||||
|
||||
Note: The
|
||||
[canonicalize_javascript_libraries](https://developers.google.com/speed/pagespeed/module/filter-canonicalize-js)
|
||||
depends on `pagespeed_libraries.conf` which is distributed in Apache's format.
|
||||
To convert it to the Nginx format, run:
|
||||
|
||||
```bash
|
||||
$ scripts/pagespeed_libraries_generator.sh > ~/pagespeed_libraries.conf
|
||||
$ sudo mv ~/pagespeed_libraries.conf /etc/nginx/
|
||||
```
|
||||
|
||||
And then include it in your Nginx configuration by reference:
|
||||
|
||||
```nginx
|
||||
include pagespeed_libraries.conf;
|
||||
pagespeed EnableFilters canonicalize_javascript_libraries;
|
||||
```
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# Base was branched from the chromium version to reduce the number of
|
||||
# dependencies of this package. Specifically, we would like to avoid
|
||||
# depending on the chrome directory, which contains the chrome version
|
||||
# and branding information.
|
||||
# TODO(morlovich): push this refactoring to chronium trunk.
|
||||
|
||||
{
|
||||
'variables': {
|
||||
'chromium_code': 1,
|
||||
'chromium_root': '<(DEPTH)/third_party/chromium/src',
|
||||
},
|
||||
'includes': [
|
||||
'base.gypi',
|
||||
],
|
||||
'targets': [
|
||||
{
|
||||
# This is the subset of files from base that should not be used with a
|
||||
# dynamic library. Note that this library cannot depend on base because
|
||||
# base depends on base_static.
|
||||
'target_name': 'base_static',
|
||||
'type': 'static_library',
|
||||
'sources': [
|
||||
'<(chromium_root)/base/base_switches.cc',
|
||||
'<(chromium_root)/base/base_switches.h',
|
||||
'<(chromium_root)/base/win/pe_image.cc',
|
||||
'<(chromium_root)/base/win/pe_image.h',
|
||||
],
|
||||
'include_dirs': [
|
||||
'<(chromium_root)',
|
||||
'<(DEPTH)',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'base_unittests',
|
||||
'type': 'executable',
|
||||
'sources': [
|
||||
'<(chromium_root)/base/string_piece_unittest.cc',
|
||||
'<(chromium_root)/base/win/win_util_unittest.cc',
|
||||
],
|
||||
'dependencies': [
|
||||
'base',
|
||||
'base_static',
|
||||
'<(DEPTH)/testing/gmock.gyp:gmock',
|
||||
'<(DEPTH)/testing/gtest.gyp:gtest',
|
||||
'<(DEPTH)/testing/gtest.gyp:gtest_main',
|
||||
],
|
||||
'include_dirs': [
|
||||
'<(DEPTH)',
|
||||
],
|
||||
'conditions': [
|
||||
['OS != "win"', {
|
||||
'sources!': [
|
||||
'<(chromium_root)/base/win_util_unittest.cc',
|
||||
],
|
||||
}],
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
-298
@@ -1,298 +0,0 @@
|
||||
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
{
|
||||
'target_defaults': {
|
||||
'variables': {
|
||||
'base_target': 0,
|
||||
'chromium_root': '<(DEPTH)/third_party/chromium/src',
|
||||
'conditions': [
|
||||
# The default stack_trace_posix.cc is not compatible with NaCL newlib
|
||||
# toolchain, so we provide a stubbed version when building for NaCL.
|
||||
[ 'build_nacl==1', {
|
||||
'stack_trace_posix_cc': 'nacl_stubs/stack_trace_posix.cc',
|
||||
}, {
|
||||
'stack_trace_posix_cc': '<(chromium_root)/base/debug/stack_trace_posix.cc',
|
||||
}],
|
||||
],
|
||||
},
|
||||
'target_conditions': [
|
||||
# This part is shared between the targets defined below. Only files and
|
||||
# settings relevant for building the Win64 target should be added here.
|
||||
# All the rest should be added to the 'base' target below.
|
||||
['base_target==1', {
|
||||
'sources': [
|
||||
'<(chromium_root)/build/build_config.h',
|
||||
'<(chromium_root)/base/third_party/dmg_fp/dmg_fp.h',
|
||||
'<(chromium_root)/base/third_party/dmg_fp/g_fmt.cc',
|
||||
'<(chromium_root)/base/third_party/dmg_fp/dtoa_wrapper.cc',
|
||||
'<(chromium_root)/base/third_party/icu/icu_utf.cc',
|
||||
'<(chromium_root)/base/third_party/icu/icu_utf.h',
|
||||
'<(chromium_root)/base/third_party/nspr/prtime.cc',
|
||||
'<(chromium_root)/base/third_party/nspr/prtime.h',
|
||||
'<(chromium_root)/base/at_exit.cc',
|
||||
'<(chromium_root)/base/at_exit.h',
|
||||
'<(chromium_root)/base/atomicops.h',
|
||||
'<(chromium_root)/base/atomicops_internals_x86_gcc.cc',
|
||||
'<(chromium_root)/base/atomicops_internals_x86_msvc.h',
|
||||
'<(chromium_root)/base/callback.h',
|
||||
'<(chromium_root)/base/callback_internal.h',
|
||||
'<(chromium_root)/base/callback_internal.cc',
|
||||
'<(chromium_root)/base/command_line.cc',
|
||||
'<(chromium_root)/base/command_line.h',
|
||||
'<(chromium_root)/base/cpu_patched.cc',
|
||||
'<(chromium_root)/base/cpu.h',
|
||||
'<(chromium_root)/base/debug/alias.cc',
|
||||
'<(chromium_root)/base/debug/alias.h',
|
||||
'<(chromium_root)/base/debug/debugger.cc',
|
||||
'<(chromium_root)/base/debug/debugger.h',
|
||||
'<(chromium_root)/base/debug/debugger_posix.cc',
|
||||
'<(chromium_root)/base/debug/debugger_win.cc',
|
||||
'<(chromium_root)/base/debug/profiler.cc',
|
||||
'<(chromium_root)/base/debug/profiler.h',
|
||||
'<(chromium_root)/base/debug/stack_trace.cc',
|
||||
'<(chromium_root)/base/debug/stack_trace.h',
|
||||
'<(stack_trace_posix_cc)',
|
||||
'<(chromium_root)/base/debug/stack_trace_win.cc',
|
||||
'<(chromium_root)/base/files/file_path.cc',
|
||||
'<(chromium_root)/base/files/file_path.h',
|
||||
'<(chromium_root)/base/files/file_path_constants.cc',
|
||||
'<(chromium_root)/base/json/json_parser.cc',
|
||||
'<(chromium_root)/base/json/json_parser.h',
|
||||
'<(chromium_root)/base/json/json_reader.cc',
|
||||
'<(chromium_root)/base/json/json_reader.h',
|
||||
'<(chromium_root)/base/json/json_writer.cc',
|
||||
'<(chromium_root)/base/json/json_writer.h',
|
||||
'<(chromium_root)/base/json/string_escape.cc',
|
||||
'<(chromium_root)/base/json/string_escape.h',
|
||||
'<(chromium_root)/base/lazy_instance.cc',
|
||||
'<(chromium_root)/base/lazy_instance.h',
|
||||
'<(chromium_root)/base/logging.cc',
|
||||
'<(chromium_root)/base/logging.h',
|
||||
'<(chromium_root)/base/logging_win.cc',
|
||||
'<(chromium_root)/base/logging_win.h',
|
||||
'<(chromium_root)/base/location.cc',
|
||||
'<(chromium_root)/base/memory/ref_counted.cc',
|
||||
'<(chromium_root)/base/memory/ref_counted.h',
|
||||
'<(chromium_root)/base/memory/singleton.cc',
|
||||
'<(chromium_root)/base/memory/singleton.h',
|
||||
'<(chromium_root)/base/mac/foundation_util.h',
|
||||
'mac/foundation_util.mm',
|
||||
'<(chromium_root)/base/pickle.cc',
|
||||
'<(chromium_root)/base/pickle.h',
|
||||
'<(chromium_root)/base/process.h',
|
||||
'process_util.cc',
|
||||
'<(chromium_root)/base/safe_strerror_posix.cc',
|
||||
'<(chromium_root)/base/safe_strerror_posix.h',
|
||||
'<(chromium_root)/base/strings/string_number_conversions.cc',
|
||||
'<(chromium_root)/base/strings/string_number_conversions.h',
|
||||
'<(chromium_root)/base/strings/string_piece.cc',
|
||||
'<(chromium_root)/base/strings/string_piece.h',
|
||||
'<(chromium_root)/base/strings/string_split.cc',
|
||||
'<(chromium_root)/base/strings/string_split.h',
|
||||
'<(chromium_root)/base/strings/string_util.cc',
|
||||
'<(chromium_root)/base/strings/string_util.h',
|
||||
'<(chromium_root)/base/strings/string_util_constants.cc',
|
||||
'<(chromium_root)/base/strings/string_util_win.h',
|
||||
'<(chromium_root)/base/strings/stringprintf.cc',
|
||||
'<(chromium_root)/base/strings/stringprintf.h',
|
||||
'<(chromium_root)/base/strings/sys_string_conversions.h',
|
||||
'<(chromium_root)/base/strings/sys_string_conversions_mac.mm',
|
||||
'<(chromium_root)/base/strings/sys_string_conversions_posix.cc',
|
||||
'<(chromium_root)/base/strings/sys_string_conversions_win.cc',
|
||||
'<(chromium_root)/base/strings/utf_string_conversion_utils.cc',
|
||||
'<(chromium_root)/base/strings/utf_string_conversion_utils.h',
|
||||
'<(chromium_root)/base/strings/utf_string_conversions.cc',
|
||||
'<(chromium_root)/base/strings/utf_string_conversions.h',
|
||||
'<(chromium_root)/base/synchronization/cancellation_flag.cc',
|
||||
'<(chromium_root)/base/synchronization/cancellation_flag.h',
|
||||
'<(chromium_root)/base/synchronization/condition_variable.h',
|
||||
'<(chromium_root)/base/synchronization/condition_variable_posix.cc',
|
||||
'<(chromium_root)/base/synchronization/condition_variable_win.cc',
|
||||
'<(chromium_root)/base/synchronization/lock.cc',
|
||||
'<(chromium_root)/base/synchronization/lock.h',
|
||||
'<(chromium_root)/base/synchronization/lock_impl.h',
|
||||
'<(chromium_root)/base/synchronization/lock_impl_posix.cc',
|
||||
'<(chromium_root)/base/synchronization/lock_impl_win.cc',
|
||||
'<(chromium_root)/base/synchronization/spin_wait.h',
|
||||
'<(chromium_root)/base/synchronization/waitable_event.h',
|
||||
'<(chromium_root)/base/synchronization/waitable_event_posix.cc',
|
||||
'<(chromium_root)/base/synchronization/waitable_event_watcher.h',
|
||||
'<(chromium_root)/base/synchronization/waitable_event_watcher_posix.cc',
|
||||
'<(chromium_root)/base/synchronization/waitable_event_watcher_win.cc',
|
||||
'<(chromium_root)/base/synchronization/waitable_event_win.cc',
|
||||
'<(chromium_root)/base/threading/platform_thread.h',
|
||||
'<(chromium_root)/base/threading/platform_thread_linux.cc',
|
||||
'<(chromium_root)/base/threading/platform_thread_mac.mm',
|
||||
'<(chromium_root)/base/threading/platform_thread_posix.cc',
|
||||
'<(chromium_root)/base/threading/platform_thread_win.cc',
|
||||
'<(chromium_root)/base/threading/thread_collision_warner.cc',
|
||||
'<(chromium_root)/base/threading/thread_collision_warner.h',
|
||||
'<(chromium_root)/base/threading/thread_id_name_manager.cc',
|
||||
'<(chromium_root)/base/threading/thread_id_name_manager.h',
|
||||
'<(chromium_root)/base/threading/thread_local.h',
|
||||
'<(chromium_root)/base/threading/thread_local_posix.cc',
|
||||
'<(chromium_root)/base/threading/thread_local_storage.cc',
|
||||
'<(chromium_root)/base/threading/thread_local_storage.h',
|
||||
'<(chromium_root)/base/threading/thread_local_storage_posix.cc',
|
||||
'<(chromium_root)/base/threading/thread_local_storage_win.cc',
|
||||
'<(chromium_root)/base/threading/thread_local_win.cc',
|
||||
'<(chromium_root)/base/threading/thread_restrictions.cc',
|
||||
'<(chromium_root)/base/threading/thread_restrictions.h',
|
||||
'<(chromium_root)/base/time/time.cc',
|
||||
'<(chromium_root)/base/time/time.h',
|
||||
'<(chromium_root)/base/time/time_mac.cc',
|
||||
'<(chromium_root)/base/time/time_posix.cc',
|
||||
'<(chromium_root)/base/time/time_win.cc',
|
||||
'<(chromium_root)/base/tracked_objects.cc',
|
||||
'<(chromium_root)/base/tracked_objects.h',
|
||||
'<(chromium_root)/base/values.cc',
|
||||
'<(chromium_root)/base/values.h',
|
||||
'<(chromium_root)/base/vlog.cc',
|
||||
'<(chromium_root)/base/vlog.h',
|
||||
'<(chromium_root)/base/win/registry.cc',
|
||||
'<(chromium_root)/base/win/registry.h',
|
||||
'<(chromium_root)/base/win/scoped_handle.cc',
|
||||
'<(chromium_root)/base/win/scoped_handle.h',
|
||||
'<(chromium_root)/base/win/win_util.cc',
|
||||
'<(chromium_root)/base/win/win_util.h',
|
||||
'<(chromium_root)/base/win/windows_version.cc',
|
||||
'<(chromium_root)/base/profiler/tracked_time.h',
|
||||
'<(chromium_root)/base/profiler/tracked_time.cc',
|
||||
'<(chromium_root)/base/profiler/alternate_timer.h',
|
||||
'<(chromium_root)/base/profiler/alternate_timer.cc',
|
||||
'<(chromium_root)/base/win/windows_version.h',
|
||||
],
|
||||
'include_dirs': [
|
||||
'<(chromium_root)',
|
||||
'<(DEPTH)',
|
||||
],
|
||||
# These warnings are needed for the files in third_party\dmg_fp.
|
||||
'msvs_disabled_warnings': [
|
||||
4244, 4554, 4018, 4102,
|
||||
],
|
||||
'mac_framework_dirs': [
|
||||
'$(SDKROOT)/System/Library/Frameworks/ApplicationServices.framework/Frameworks',
|
||||
],
|
||||
'conditions': [
|
||||
[ 'OS != "linux" and OS != "freebsd" and OS != "openbsd" and OS != "solaris"', {
|
||||
'sources!': [
|
||||
'<(chromium_root)/base/atomicops_internals_x86_gcc.cc',
|
||||
],
|
||||
},],
|
||||
['OS != "win"', {
|
||||
'sources/': [ ['exclude', '^win/'] ],
|
||||
},
|
||||
],
|
||||
[ 'OS == "win"', {
|
||||
'sources!': [
|
||||
'<(chromium_root)/base/strings/string16.cc',
|
||||
],
|
||||
},],
|
||||
],
|
||||
}],
|
||||
],
|
||||
},
|
||||
'targets': [
|
||||
# Older assemblers don't recognize the xgetbv opcode, and require explicit
|
||||
# bytes instead. These can be found by searching the web; example:
|
||||
# http://lxr.free-electrons.com/source/arch/x86/include/asm/xcr.h#L31
|
||||
{
|
||||
'target_name': 'cpu_patched',
|
||||
'type': 'none',
|
||||
'sources': [
|
||||
'<(chromium_root)/base/cpu.cc',
|
||||
'<(chromium_root)/base/cpu_patched.cc',
|
||||
],
|
||||
'actions': [
|
||||
{
|
||||
'action_name': 'Patch cpu.cc',
|
||||
'inputs': [
|
||||
'<(chromium_root)/base/cpu.cc',
|
||||
],
|
||||
'outputs': [
|
||||
'<(chromium_root)/base/cpu_patched.cc',
|
||||
],
|
||||
'action': [
|
||||
'bash', '-c',
|
||||
'sed \'s/"xgetbv"/".byte 0x0f, 0x01, 0xd0"/\' <@(_inputs) > <@(_outputs)'
|
||||
],
|
||||
'message': 'Attempting to generate patched <@(_outputs) from <@(_inputs)',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'base',
|
||||
'type': '<(component)',
|
||||
'variables': {
|
||||
'base_target': 1,
|
||||
},
|
||||
'dependencies': [
|
||||
'base_static',
|
||||
'cpu_patched',
|
||||
'<(DEPTH)/third_party/modp_b64/modp_b64.gyp:modp_b64',
|
||||
'<(chromium_root)/base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
|
||||
],
|
||||
# TODO(gregoryd): direct_dependent_settings should be shared with the
|
||||
# 64-bit target, but it doesn't work due to a bug in gyp
|
||||
'direct_dependent_settings': {
|
||||
'include_dirs': [
|
||||
'<(chromium_root)',
|
||||
'<(DEPTH)',
|
||||
],
|
||||
},
|
||||
'conditions': [
|
||||
[ 'OS == "linux"', {
|
||||
'cflags': [
|
||||
'-Wno-write-strings',
|
||||
'-Wno-error',
|
||||
],
|
||||
'conditions': [
|
||||
[ 'build_nacl==0', {
|
||||
# We do not need clock_gettime() when building for NaCL newlib.
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
# We need rt for clock_gettime().
|
||||
'-lrt',
|
||||
],
|
||||
},
|
||||
}],
|
||||
],
|
||||
}],
|
||||
[ 'OS == "mac"', {
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
'$(SDKROOT)/System/Library/Frameworks/AppKit.framework',
|
||||
'$(SDKROOT)/System/Library/Frameworks/Carbon.framework',
|
||||
'$(SDKROOT)/System/Library/Frameworks/CoreFoundation.framework',
|
||||
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
|
||||
'$(SDKROOT)/System/Library/Frameworks/IOKit.framework',
|
||||
'$(SDKROOT)/System/Library/Frameworks/Security.framework',
|
||||
],
|
||||
},
|
||||
},],
|
||||
[ 'build_nacl==1', {
|
||||
'defines': [
|
||||
# A super-hack. prtime.cc (and possibly other sources) call
|
||||
# timegm, which is a non-standard function that's
|
||||
# unavailable when compiling using NaCl newlib. mktime is
|
||||
# essentially a drop-in replacement for timegm, modulo time
|
||||
# zone issues, however NaCL will default to UTC which is the
|
||||
# expected behavior for timegm, so the two should behave
|
||||
# identically.
|
||||
'timegm=mktime',
|
||||
],
|
||||
}],
|
||||
],
|
||||
'sources': [
|
||||
'<(chromium_root)/base/base64.cc',
|
||||
'<(chromium_root)/base/base64.h',
|
||||
'<(chromium_root)/base/md5.cc',
|
||||
'<(chromium_root)/base/md5.h',
|
||||
'<(chromium_root)/base/strings/string16.cc',
|
||||
'<(chromium_root)/base/strings/string16.h',
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
// Copyright 2013 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Chromium's foundation_util.cc pulls a lot of mac related functions into the
|
||||
// base package. We don't need them, so strip down all the code.
|
||||
|
||||
#include "base/mac/foundation_util.h"
|
||||
|
||||
namespace base {
|
||||
namespace mac {
|
||||
|
||||
void* CFTypeRefToNSObjectAutorelease(CFTypeRef cf_object) {
|
||||
// When GC is on, NSMakeCollectable marks cf_object for GC and autorelease
|
||||
// is a no-op.
|
||||
//
|
||||
// In the traditional GC-less environment, NSMakeCollectable is a no-op,
|
||||
// and cf_object is autoreleased, balancing out the caller's ownership claim.
|
||||
//
|
||||
// NSMakeCollectable returns nil when used on a NULL object.
|
||||
return [NSMakeCollectable(cf_object) autorelease];
|
||||
}
|
||||
|
||||
} // namespace mac
|
||||
} // namespace base
|
||||
@@ -1,23 +0,0 @@
|
||||
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
//
|
||||
// NaCL newlib is not compatible with the default
|
||||
// stack_trace_posix.cc. So we provide this stubbed out version for
|
||||
// use when building for NaCL.
|
||||
|
||||
#ifndef __native_client__
|
||||
#error This file should only be used when compiling for Native Client.
|
||||
#endif
|
||||
|
||||
#include "base/debug/stack_trace.h"
|
||||
|
||||
namespace base {
|
||||
namespace debug {
|
||||
|
||||
StackTrace::StackTrace() {}
|
||||
void StackTrace::PrintBacktrace() const {}
|
||||
void StackTrace::OutputToStream(std::ostream* os) const {}
|
||||
|
||||
} // namespace debug
|
||||
} // namespace base
|
||||
@@ -1,29 +0,0 @@
|
||||
// Copyright 2013 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Chromium's process.cc pulls a lot of file related functions into the
|
||||
// base package. We don't need them, so strip down all the code.
|
||||
|
||||
#include "base/logging.h"
|
||||
#include "base/process/process.h"
|
||||
|
||||
namespace base {
|
||||
|
||||
// Returns the id of the current process.
|
||||
ProcessId GetCurrentProcId() {
|
||||
DCHECK(false); // we don't actually expect this to be called.
|
||||
return 0;
|
||||
}
|
||||
|
||||
} // namespace base
|
||||
@@ -1,13 +0,0 @@
|
||||
The following files in this directory were copied from chromium's repository at
|
||||
revision 256281 (https://src.chromium.org/svn/trunk/src/build/?p=256281).
|
||||
|
||||
compiler_version.py (with local bugfix decribed at the top)
|
||||
filename_rules.gypi
|
||||
get_landmines.py
|
||||
grit_action.gypi
|
||||
gyp_chromium (with minor local modifications described at the top)
|
||||
gyp_helper.py
|
||||
java.gypi
|
||||
landmine_utils.py
|
||||
landmines.py
|
||||
release.gypi
|
||||
@@ -1,72 +0,0 @@
|
||||
# Copyright 2009 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
{
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'All',
|
||||
'type': 'none',
|
||||
'xcode_create_dependents_test_runner': 1,
|
||||
'dependencies': [
|
||||
'mod_pagespeed',
|
||||
'test',
|
||||
'js_minify',
|
||||
'pagespeed_automatic',
|
||||
],},
|
||||
{
|
||||
'target_name': 'mod_pagespeed',
|
||||
'type': 'none',
|
||||
'dependencies': [
|
||||
'../net/instaweb/instaweb.gyp:instaweb_rewriter',
|
||||
'../net/instaweb/instaweb_apr.gyp:*',
|
||||
'../net/instaweb/mod_pagespeed.gyp:mod_pagespeed',
|
||||
'install.gyp:*',
|
||||
],
|
||||
'conditions': [
|
||||
['use_system_apache_dev==0', {
|
||||
'dependencies+': [
|
||||
'../net/instaweb/mod_pagespeed.gyp:mod_pagespeed_ap24',
|
||||
],
|
||||
}],
|
||||
]},
|
||||
{
|
||||
'target_name': 'pagespeed_automatic',
|
||||
'type': 'none',
|
||||
'dependencies': [
|
||||
'../net/instaweb/test.gyp:pagespeed_automatic_test',
|
||||
'../net/instaweb/instaweb.gyp:automatic_util',
|
||||
],},
|
||||
{
|
||||
'target_name': 'test',
|
||||
'type': 'none',
|
||||
'xcode_create_dependents_test_runner': 1,
|
||||
'dependencies': [
|
||||
'../net/instaweb/instaweb.gyp:*',
|
||||
'../net/instaweb/instaweb_core.gyp:*',
|
||||
'../net/instaweb/instaweb_apr.gyp:*',
|
||||
'../net/instaweb/test.gyp:mod_pagespeed_test',
|
||||
'../net/instaweb/test.gyp:mod_pagespeed_speed_test',
|
||||
'install.gyp:*',
|
||||
'<(DEPTH)/pagespeed/kernel.gyp:redis_cache_cluster_setup',
|
||||
]
|
||||
},
|
||||
{
|
||||
'target_name': 'js_minify',
|
||||
'type': 'none',
|
||||
'dependencies': [
|
||||
'../net/instaweb/instaweb.gyp:js_minify',
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# This file is meant to be included into an action to provide an action that
|
||||
# combines a directory of shared libraries and an incomplete APK into a
|
||||
# standalone APK.
|
||||
#
|
||||
# To use this, create a gyp action with the following form:
|
||||
# {
|
||||
# 'action_name': 'some descriptive action name',
|
||||
# 'variables': {
|
||||
# 'inputs': [ 'input_path1', 'input_path2' ],
|
||||
# 'input_apk_path': '<(unsigned_apk_path)',
|
||||
# 'output_apk_path': '<(unsigned_standalone_apk_path)',
|
||||
# 'libraries_top_dir': '<(libraries_top_dir)',
|
||||
# },
|
||||
# 'includes': [ 'relative/path/to/create_standalone_apk_action.gypi' ],
|
||||
# },
|
||||
|
||||
{
|
||||
'message': 'Creating standalone APK: <(output_apk_path)',
|
||||
'variables': {
|
||||
'inputs': [],
|
||||
},
|
||||
'inputs': [
|
||||
'<(DEPTH)/build/android/gyp/util/build_utils.py',
|
||||
'<(DEPTH)/build/android/gyp/create_standalone_apk.py',
|
||||
'<(input_apk_path)',
|
||||
'>@(inputs)',
|
||||
],
|
||||
'outputs': [
|
||||
'<(output_apk_path)',
|
||||
],
|
||||
'action': [
|
||||
'python', '<(DEPTH)/build/android/gyp/create_standalone_apk.py',
|
||||
'--libraries-top-dir=<(libraries_top_dir)',
|
||||
'--input-apk-path=<(input_apk_path)',
|
||||
'--output-apk-path=<(output_apk_path)',
|
||||
],
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# This file is meant to be included into an action to provide a rule that dexes
|
||||
# compiled java files. If proguard_enabled == "true" and CONFIGURATION_NAME ==
|
||||
# "Release", then it will dex the proguard_enabled_input_path instead of the
|
||||
# normal dex_input_paths/dex_generated_input_paths.
|
||||
#
|
||||
# To use this, create a gyp target with the following form:
|
||||
# {
|
||||
# 'action_name': 'some name for the action'
|
||||
# 'actions': [
|
||||
# 'variables': {
|
||||
# 'dex_input_paths': [ 'files to dex (when proguard is not used) and add to input paths' ],
|
||||
# 'dex_generated_input_dirs': [ 'dirs that contain generated files to dex' ],
|
||||
#
|
||||
# # For targets that use proguard:
|
||||
# 'proguard_enabled': 'true',
|
||||
# 'proguard_enabled_input_path': 'path to dex when using proguard',
|
||||
# },
|
||||
# 'includes': [ 'relative/path/to/dex_action.gypi' ],
|
||||
# ],
|
||||
# },
|
||||
#
|
||||
|
||||
{
|
||||
'message': 'Creating dex file: <(output_path)',
|
||||
'variables': {
|
||||
'dex_input_paths': [],
|
||||
'dex_generated_input_dirs': [],
|
||||
'proguard_enabled%': 'false',
|
||||
'proguard_enabled_input_path%': '',
|
||||
'dex_no_locals%': 0,
|
||||
},
|
||||
'inputs': [
|
||||
'<(DEPTH)/build/android/gyp/util/build_utils.py',
|
||||
'<(DEPTH)/build/android/gyp/util/md5_check.py',
|
||||
'<(DEPTH)/build/android/gyp/dex.py',
|
||||
'>@(dex_input_paths)',
|
||||
],
|
||||
'outputs': [
|
||||
'<(output_path)',
|
||||
],
|
||||
'action': [
|
||||
'python', '<(DEPTH)/build/android/gyp/dex.py',
|
||||
'--dex-path=<(output_path)',
|
||||
'--android-sdk-tools=<(android_sdk_tools)',
|
||||
'--configuration-name=<(CONFIGURATION_NAME)',
|
||||
'--proguard-enabled=<(proguard_enabled)',
|
||||
'--proguard-enabled-input-path=<(proguard_enabled_input_path)',
|
||||
'--no-locals=<(dex_no_locals)',
|
||||
'>@(dex_input_paths)',
|
||||
'>@(dex_generated_input_dirs)',
|
||||
]
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# This file is meant to be included into an action to provide an action that
|
||||
# signs and zipaligns an APK.
|
||||
#
|
||||
# To use this, create a gyp action with the following form:
|
||||
# {
|
||||
# 'action_name': 'some descriptive action name',
|
||||
# 'variables': {
|
||||
# 'input_apk_path': 'relative/path/to/input.apk',
|
||||
# 'output_apk_path': 'relative/path/to/output.apk',
|
||||
# },
|
||||
# 'includes': [ '../../build/android/finalize_apk.gypi' ],
|
||||
# },
|
||||
#
|
||||
|
||||
{
|
||||
'message': 'Signing/aligning <(_target_name) APK: <(input_apk_path)',
|
||||
'variables': {
|
||||
'keystore_path%': '<(DEPTH)/build/android/ant/chromium-debug.keystore',
|
||||
},
|
||||
'inputs': [
|
||||
'<(DEPTH)/build/android/gyp/util/build_utils.py',
|
||||
'<(DEPTH)/build/android/gyp/finalize_apk.py',
|
||||
'<(keystore_path)',
|
||||
'<(input_apk_path)',
|
||||
],
|
||||
'outputs': [
|
||||
'<(output_apk_path)',
|
||||
],
|
||||
'action': [
|
||||
'python', '<(DEPTH)/build/android/gyp/finalize_apk.py',
|
||||
'--android-sdk-root=<(android_sdk_root)',
|
||||
'--unsigned-apk-path=<(input_apk_path)',
|
||||
'--final-apk-path=<(output_apk_path)',
|
||||
'--keystore-path=<(keystore_path)',
|
||||
],
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# This file is meant to be included into an action to provide a rule that
|
||||
# instruments either java class files, or jars.
|
||||
|
||||
{
|
||||
'variables': {
|
||||
'instr_type%': 'jar',
|
||||
'input_path%': '',
|
||||
'output_path%': '',
|
||||
'stamp_path%': '',
|
||||
'extra_instr_args': [
|
||||
'--coverage-file=<(_target_name).em',
|
||||
'--sources-file=<(_target_name)_sources.txt',
|
||||
],
|
||||
'emma_jar': '<(android_sdk_root)/tools/lib/emma.jar',
|
||||
'conditions': [
|
||||
['emma_instrument != 0', {
|
||||
'extra_instr_args': [
|
||||
'--sources=<(java_in_dir)/src >(additional_src_dirs) >(generated_src_dirs)',
|
||||
'--src-root=<(DEPTH)',
|
||||
'--emma-jar=<(emma_jar)',
|
||||
'--filter-string=<(emma_filter)',
|
||||
],
|
||||
'conditions': [
|
||||
['instr_type == "jar"', {
|
||||
'instr_action': 'instrument_jar',
|
||||
}, {
|
||||
'instr_action': 'instrument_classes',
|
||||
}]
|
||||
],
|
||||
}, {
|
||||
'instr_action': 'copy',
|
||||
'extra_instr_args': [],
|
||||
}]
|
||||
]
|
||||
},
|
||||
'inputs': [
|
||||
'<(DEPTH)/build/android/gyp/emma_instr.py',
|
||||
'<(DEPTH)/build/android/gyp/util/build_utils.py',
|
||||
'<(DEPTH)/build/android/pylib/utils/command_option_parser.py',
|
||||
],
|
||||
'action': [
|
||||
'python', '<(DEPTH)/build/android/gyp/emma_instr.py',
|
||||
'<(instr_action)',
|
||||
'--input-path=<(input_path)',
|
||||
'--output-path=<(output_path)',
|
||||
'--stamp=<(stamp_path)',
|
||||
'<@(extra_instr_args)',
|
||||
]
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# This file is meant to be included into an action to provide a rule to
|
||||
# run lint on java/class files.
|
||||
|
||||
{
|
||||
'action_name': 'lint_<(_target_name)',
|
||||
'message': 'Linting <(_target_name)',
|
||||
'variables': {
|
||||
'conditions': [
|
||||
['chromium_code != 0 and android_lint != 0 and never_lint == 0', {
|
||||
'is_enabled': '--enable',
|
||||
}, {
|
||||
'is_enabled': '',
|
||||
}]
|
||||
]
|
||||
},
|
||||
'inputs': [
|
||||
'<(DEPTH)/build/android/gyp/util/build_utils.py',
|
||||
'<(DEPTH)/build/android/gyp/lint.py',
|
||||
'<(DEPTH)/build/android/lint/suppressions.xml',
|
||||
'<(DEPTH)/build/android/AndroidManifest.xml',
|
||||
],
|
||||
'action': [
|
||||
'python', '<(DEPTH)/build/android/gyp/lint.py',
|
||||
'--lint-path=<(android_sdk_root)/tools/lint',
|
||||
'--config-path=<(DEPTH)/build/android/lint/suppressions.xml',
|
||||
'--processed-config-path=<(config_path)',
|
||||
'--manifest-path=<(DEPTH)/build/android/AndroidManifest.xml',
|
||||
'--result-path=<(result_path)',
|
||||
'--product-dir=<(PRODUCT_DIR)',
|
||||
'--src-dirs=>(src_dirs)',
|
||||
'--classes-dir=<(classes_dir)',
|
||||
'--stamp=<(stamp_path)',
|
||||
'<(is_enabled)',
|
||||
],
|
||||
}
|
||||
@@ -1,85 +0,0 @@
|
||||
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
{
|
||||
'variables': {
|
||||
'version_py_path': 'version.py',
|
||||
'instaweb_path': '<(DEPTH)/net/instaweb',
|
||||
'version_path': '<(instaweb_path)/public/VERSION',
|
||||
'version_h_in_path': '<(instaweb_path)/public/version.h.in',
|
||||
'public_path' : 'net/instaweb/public',
|
||||
'version_h_path': '<(SHARED_INTERMEDIATE_DIR)/<(public_path)/version.h',
|
||||
'lastchange_out_path': '<(SHARED_INTERMEDIATE_DIR)/build/LASTCHANGE',
|
||||
},
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'lastchange',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
'default_lastchange_path': '../LASTCHANGE.in',
|
||||
},
|
||||
'actions': [
|
||||
{
|
||||
'action_name': 'lastchange',
|
||||
'inputs': [
|
||||
# Note: <(default_lastchange_path) is optional,
|
||||
# so it doesn't show up in inputs.
|
||||
'<(DEPTH)/build/lastchange.sh',
|
||||
],
|
||||
'outputs': [
|
||||
'<(lastchange_out_path)',
|
||||
],
|
||||
'action': [
|
||||
'/bin/sh', '<@(_inputs)',
|
||||
'<(DEPTH)',
|
||||
'-o', '<(lastchange_out_path)',
|
||||
'-d', '<(default_lastchange_path)',
|
||||
],
|
||||
'message': 'Extracting last change to <(lastchange_out_path)',
|
||||
'process_outputs_as_sources': '1',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'mod_pagespeed_version_header',
|
||||
'type': 'none',
|
||||
'dependencies': [
|
||||
'lastchange',
|
||||
],
|
||||
'actions': [
|
||||
{
|
||||
'action_name': 'version_header',
|
||||
'inputs': [
|
||||
'<(version_path)',
|
||||
'<(lastchange_out_path)',
|
||||
'<(version_h_in_path)',
|
||||
],
|
||||
'outputs': [
|
||||
'<(version_h_path)',
|
||||
],
|
||||
'action': [
|
||||
'python',
|
||||
'<(version_py_path)',
|
||||
'-f', '<(version_path)',
|
||||
'-f', '<(lastchange_out_path)',
|
||||
'<(version_h_in_path)',
|
||||
'<@(_outputs)',
|
||||
],
|
||||
'message': 'Generating version header file: <@(_outputs)',
|
||||
},
|
||||
],
|
||||
'direct_dependent_settings': {
|
||||
'include_dirs': [
|
||||
'<(SHARED_INTERMEDIATE_DIR)',
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
# Local Variables:
|
||||
# tab-width:2
|
||||
# indent-tabs-mode:nil
|
||||
# End:
|
||||
# vim: set expandtab tabstop=2 shiftwidth=2:
|
||||
@@ -1,57 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
#
|
||||
|
||||
"""Compiler version checking tool for clang.
|
||||
|
||||
(Based on corresponding tool for gcc in Chromium build system).
|
||||
|
||||
Prints X*100 + Y if $CXX is pointing to clang X.Y.*. Prints 0 otherwise.
|
||||
Note that this output convention is different from compiler_version.py's. This
|
||||
also never returns a failing status, since we want to run this even on systems
|
||||
without clang, and gyp will complain on a non-successful status.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def GetVersion(compiler):
|
||||
try:
|
||||
compiler = compiler + " --version"
|
||||
pipe = subprocess.Popen(compiler, shell=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
output, error = pipe.communicate()
|
||||
if pipe.returncode:
|
||||
raise subprocess.CalledProcessError(pipe.returncode, compiler)
|
||||
|
||||
result = re.search(r"clang version (\d+)\.?(\d+)?", output)
|
||||
if result is None:
|
||||
return "0"
|
||||
minor_version = result.group(2)
|
||||
if minor_version is None:
|
||||
minor_version = "0"
|
||||
return str(int(result.group(1)) * 100 + int(minor_version))
|
||||
except Exception, e:
|
||||
if error:
|
||||
sys.stderr.write(error)
|
||||
print >> sys.stderr, "clang_version.py failed to execute:", compiler
|
||||
print >> sys.stderr, e
|
||||
return "0"
|
||||
|
||||
|
||||
def main():
|
||||
# Check if CXX environment variable exists, and if it does use that compiler.
|
||||
cxx = os.getenv("CXX", None)
|
||||
if cxx:
|
||||
print GetVersion(cxx)
|
||||
else:
|
||||
print "0"
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
sys.exit(0)
|
||||
@@ -1,178 +0,0 @@
|
||||
# Copyright 2009 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
{
|
||||
'variables': {
|
||||
# Make sure we link statically so everything gets linked into a
|
||||
# single shared object.
|
||||
'library': 'static_library',
|
||||
|
||||
# We're building a shared library, so everything needs to be built
|
||||
# with Position-Independent Code.
|
||||
'linux_fpic': 1,
|
||||
|
||||
'instaweb_src_root': 'net/instaweb',
|
||||
|
||||
# Define the overridable use_system_libs variable in its own
|
||||
# nested block, so it's available for use in the conditions block
|
||||
# below.
|
||||
'variables': {
|
||||
'use_system_libs%': 0,
|
||||
},
|
||||
|
||||
# Which versions development is usually done with. These version will
|
||||
# get -Werror
|
||||
'gcc_devel_version%': '46',
|
||||
'gcc_devel_version2%': '48',
|
||||
|
||||
# We need inter-process mutexes to support POSIX shared memory, and they're
|
||||
# unfortunately not supported on some common systems.
|
||||
'support_posix_shared_mem%': 0,
|
||||
|
||||
# Detect clang being configured via CXX envvar, which is the easiest
|
||||
# way for our users to change the compiler (since gclient gets in
|
||||
# the way of tweaking gyp flags directly).
|
||||
'clang_version':
|
||||
'<!(python <(DEPTH)/build/clang_version.py)',
|
||||
|
||||
'conditions': [
|
||||
# TODO(morlovich): AIX, Solaris, FreeBSD10?
|
||||
['OS == "linux"', {
|
||||
'support_posix_shared_mem': 1
|
||||
}],
|
||||
['use_system_libs==1', {
|
||||
'use_system_apache_dev': 1,
|
||||
'use_system_icu': 1,
|
||||
'use_system_libjpeg': 1,
|
||||
'use_system_libpng': 1,
|
||||
'use_system_opencv': 1,
|
||||
'use_system_openssl': 1,
|
||||
'use_system_zlib': 1,
|
||||
},{
|
||||
'use_system_apache_dev%': 0,
|
||||
}],
|
||||
],
|
||||
},
|
||||
'includes': [
|
||||
# Import base Chromium build system, and pagespeed customizations of it.
|
||||
'../third_party/chromium/src/build/common.gypi',
|
||||
'pagespeed_overrides.gypi',
|
||||
],
|
||||
'target_defaults': {
|
||||
'variables': {
|
||||
# Make this available here as well.
|
||||
'use_system_libs%': 0,
|
||||
},
|
||||
'conditions': [
|
||||
['support_posix_shared_mem == 1', {
|
||||
'defines': [ 'PAGESPEED_SUPPORT_POSIX_SHARED_MEM', ],
|
||||
}],
|
||||
['OS == "linux"', {
|
||||
# Disable -Werror when not using the version of gcc that development
|
||||
# is generally done with, to avoid breaking things for users with
|
||||
# something older or newer (which produces different warnings).
|
||||
'conditions': [
|
||||
['<(gcc_version) != <(gcc_devel_version) and '
|
||||
'<(gcc_version) != <(gcc_devel_version2)', {
|
||||
'cflags!': ['-Werror']
|
||||
}],
|
||||
['<(gcc_version) < 48 and (clang_version == 0)', {
|
||||
'cflags+': '<!(echo gcc \< 4.8 is too old and no longer supported; false)'
|
||||
}]
|
||||
],
|
||||
'cflags': [
|
||||
# Our dependency on OpenCV need us to turn on exceptions.
|
||||
'-fexceptions',
|
||||
# Now we are using exceptions. -fno-asynchronous-unwind-tables is
|
||||
# set in libpagespeed's common.gypi. Now enable it.
|
||||
'-fasynchronous-unwind-tables',
|
||||
# We'd like to add '-Wtype-limits', but this does not work on
|
||||
# earlier versions of g++ on supported operating systems.
|
||||
#
|
||||
# Use -DFORTIFY_SOURCE to add extra checks to functions like printf,
|
||||
# and bounds checking to copies.
|
||||
'-D_FORTIFY_SOURCE=2',
|
||||
],
|
||||
'cflags_cc!': [
|
||||
# Newer Chromium build adds -Wsign-compare which we have some
|
||||
# difficulty with. Remove it for now.
|
||||
'-Wsign-compare',
|
||||
'-fno-rtti', # Same reason as using -frtti below.
|
||||
],
|
||||
'cflags_cc': [
|
||||
'-frtti', # Hardy's g++ 4.2 <trl/function> uses typeid
|
||||
'-D_FORTIFY_SOURCE=2',
|
||||
],
|
||||
'defines!': [
|
||||
# testing/gtest.gyp defines GTEST_HAS_RTTI=0 for itself and all deps.
|
||||
# This breaks when we turn rtti on, so must be removed.
|
||||
'GTEST_HAS_RTTI=0',
|
||||
# third_party/protobuf/protobuf.gyp defines GOOGLE_PROTOBUF_NO_RTTI
|
||||
# for itself and all deps. I assume this is just a ticking time bomb
|
||||
# like GTEST_HAS_RTTI=0 was, so remove it as well.
|
||||
'GOOGLE_PROTOBUF_NO_RTTI',
|
||||
],
|
||||
'defines': [
|
||||
'GTEST_HAS_RTTI=1', # gtest requires this set to indicate RTTI on.
|
||||
],
|
||||
# Disable -z,defs in linker.
|
||||
# This causes mod_pagespeed.so to fail because it doesn't link apache
|
||||
# libraries.
|
||||
'ldflags!': [
|
||||
'-Wl,-z,defs',
|
||||
],
|
||||
}],
|
||||
['OS == "mac"', {
|
||||
'xcode_settings':{
|
||||
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES', # -fexceptions
|
||||
'GCC_ENABLE_CPP_RTTI': 'YES', # -frtti
|
||||
|
||||
# The Google CSS parser escapes from functions without
|
||||
# returning anything. Only with flow analysis that is,
|
||||
# evidently, beyond the scope of the g++ configuration in
|
||||
# MacOS, do we see those paths cannot be reached.
|
||||
'OTHER_CFLAGS': ['-funsigned-char', '-Wno-error'],
|
||||
},
|
||||
}],
|
||||
['use_system_libs == 0', {
|
||||
'ldflags+': [
|
||||
'-static-libstdc++',
|
||||
'-static-libgcc',
|
||||
]
|
||||
}],
|
||||
],
|
||||
|
||||
'defines': [ # See https://gcc.gnu.org/onlinedocs/libstdc++/manual/using_dual_abi.html
|
||||
'_GLIBCXX_USE_CXX11_ABI=0' ],
|
||||
|
||||
'cflags_cc+': [
|
||||
'-std=gnu++0x'
|
||||
],
|
||||
|
||||
# Permit building us with coverage information
|
||||
'configurations': {
|
||||
'Debug_Coverage': {
|
||||
'inherit_from': ['Debug'],
|
||||
'cflags': [
|
||||
'-ftest-coverage',
|
||||
'-fprofile-arcs',
|
||||
],
|
||||
'ldflags': [
|
||||
# takes care of -lgcov for us, but can be in a build configuration
|
||||
'-ftest-coverage -fprofile-arcs',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
#
|
||||
# This version contains a bugfix for the compiler returning a single digit
|
||||
# version number, as is the case for gcc 5.
|
||||
|
||||
"""Compiler version checking tool for gcc
|
||||
|
||||
Print gcc version as XY if you are running gcc X.Y.*.
|
||||
This is used to tweak build flags for gcc 4.4.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
def GetVersion(compiler):
|
||||
try:
|
||||
# Note that compiler could be something tricky like "distcc g++".
|
||||
compiler = compiler + " -dumpversion"
|
||||
pipe = subprocess.Popen(compiler, shell=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
gcc_output, gcc_error = pipe.communicate()
|
||||
if pipe.returncode:
|
||||
raise subprocess.CalledProcessError(pipe.returncode, compiler)
|
||||
|
||||
result = re.match(r"(\d+)\.?(\d+)?", gcc_output)
|
||||
minor_version = result.group(2)
|
||||
if minor_version is None:
|
||||
minor_version = "0"
|
||||
return result.group(1) + minor_version
|
||||
except Exception, e:
|
||||
if gcc_error:
|
||||
sys.stderr.write(gcc_error)
|
||||
print >> sys.stderr, "compiler_version.py failed to execute:", compiler
|
||||
print >> sys.stderr, e
|
||||
return ""
|
||||
|
||||
def main():
|
||||
# Check if CXX environment variable exists and
|
||||
# if it does use that compiler.
|
||||
cxx = os.getenv("CXX", None)
|
||||
if cxx:
|
||||
cxxversion = GetVersion(cxx)
|
||||
if cxxversion != "":
|
||||
print cxxversion
|
||||
return 0
|
||||
else:
|
||||
# Otherwise we check the g++ version.
|
||||
gccversion = GetVersion("g++")
|
||||
if gccversion != "":
|
||||
print gccversion
|
||||
return 0
|
||||
|
||||
return 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Writes True if the argument is a directory."""
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
def main():
|
||||
sys.stdout.write(str(os.path.isdir(sys.argv[1])))
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@@ -1,18 +0,0 @@
|
||||
# Copyright 2009 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Chromium expects this file to be here, but for our (Page Speed) purposes, it
|
||||
# doesn't need to actually do anything.
|
||||
|
||||
{}
|
||||
@@ -1,125 +0,0 @@
|
||||
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# This gypi file defines the patterns used for determining whether a
|
||||
# file is excluded from the build on a given platform. It is
|
||||
# included by common.gypi for chromium_code.
|
||||
|
||||
{
|
||||
'target_conditions': [
|
||||
['OS!="win" or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [ ['exclude', '_win(_browsertest|_unittest)?\\.(h|cc)$'],
|
||||
['exclude', '(^|/)win/'],
|
||||
['exclude', '(^|/)win_[^/]*\\.(h|cc)$'] ],
|
||||
}],
|
||||
['OS!="mac" or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [ ['exclude', '_(cocoa|mac)(_unittest)?\\.(h|cc|mm?)$'],
|
||||
['exclude', '(^|/)(cocoa|mac)/'] ],
|
||||
}],
|
||||
['OS!="ios" or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [ ['exclude', '_ios(_unittest)?\\.(h|cc|mm?)$'],
|
||||
['exclude', '(^|/)ios/'] ],
|
||||
}],
|
||||
['(OS!="mac" and OS!="ios") or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [ ['exclude', '\\.mm?$' ] ],
|
||||
}],
|
||||
# Do not exclude the linux files on *BSD since most of them can be
|
||||
# shared at this point.
|
||||
# In case a file is not needed, it is going to be excluded later on.
|
||||
# TODO(evan): the above is not correct; we shouldn't build _linux
|
||||
# files on non-linux.
|
||||
['OS!="linux" and OS!="openbsd" and OS!="freebsd" or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [
|
||||
['exclude', '_linux(_unittest)?\\.(h|cc)$'],
|
||||
['exclude', '(^|/)linux/'],
|
||||
],
|
||||
}],
|
||||
['OS!="android" or _toolset=="host"', {
|
||||
'sources/': [
|
||||
['exclude', '_android(_unittest)?\\.cc$'],
|
||||
['exclude', '(^|/)android/'],
|
||||
],
|
||||
}],
|
||||
['OS=="win" and >(nacl_untrusted_build)==0', {
|
||||
'sources/': [
|
||||
['exclude', '_posix(_unittest)?\\.(h|cc)$'],
|
||||
['exclude', '(^|/)posix/'],
|
||||
],
|
||||
}],
|
||||
['<(chromeos)!=1 or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [
|
||||
['exclude', '_chromeos(_unittest)?\\.(h|cc)$'],
|
||||
['exclude', '(^|/)chromeos/'],
|
||||
],
|
||||
}],
|
||||
['>(nacl_untrusted_build)==0', {
|
||||
'sources/': [
|
||||
['exclude', '_nacl(_unittest)?\\.(h|cc)$'],
|
||||
],
|
||||
}],
|
||||
['OS!="linux" and OS!="openbsd" and OS!="freebsd" or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [
|
||||
['exclude', '_xdg(_unittest)?\\.(h|cc)$'],
|
||||
],
|
||||
}],
|
||||
['<(use_x11)!=1 or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [
|
||||
['exclude', '_(x|x11)(_unittest)?\\.(h|cc)$'],
|
||||
['exclude', '(^|/)x11_[^/]*\\.(h|cc)$'],
|
||||
['exclude', '(^|/)x11/'],
|
||||
['exclude', '(^|/)x/'],
|
||||
],
|
||||
}],
|
||||
['<(toolkit_uses_gtk)!=1 or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [
|
||||
['exclude', '_gtk(_browsertest|_unittest)?\\.(h|cc)$'],
|
||||
['exclude', '(^|/)gtk/'],
|
||||
['exclude', '(^|/)gtk_[^/]*\\.(h|cc)$'],
|
||||
],
|
||||
}],
|
||||
['<(toolkit_views)==0 or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [ ['exclude', '_views\\.(h|cc)$'] ]
|
||||
}],
|
||||
['<(use_aura)==0 or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [ ['exclude', '_aura(_browsertest|_unittest)?\\.(h|cc)$'],
|
||||
['exclude', '(^|/)aura/'],
|
||||
]
|
||||
}],
|
||||
['<(use_aura)==0 or <(use_x11)==0 or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [ ['exclude', '_aurax11(_browsertest|_unittest)?\\.(h|cc)$'] ]
|
||||
}],
|
||||
['<(use_aura)==0 or OS!="win" or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [ ['exclude', '_aurawin\\.(h|cc)$'] ]
|
||||
}],
|
||||
['<(use_aura)==0 or OS!="linux" or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [ ['exclude', '_auralinux\\.(h|cc)$'] ]
|
||||
}],
|
||||
['<(use_ash)==0 or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [ ['exclude', '_ash(_browsertest|_unittest)?\\.(h|cc)$'],
|
||||
['exclude', '(^|/)ash/'],
|
||||
]
|
||||
}],
|
||||
['<(use_ash)==0 or OS!="win" or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [ ['exclude', '_ashwin\\.(h|cc)$'] ]
|
||||
}],
|
||||
['<(use_ozone)==0 or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [ ['exclude', '_ozone(_browsertest|_unittest)?\\.(h|cc)$'],
|
||||
['exclude', '(^|/)ozone/'],
|
||||
]
|
||||
}],
|
||||
['<(use_ozone_evdev)==0 or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [ ['exclude', '_evdev(_browsertest|_unittest)?\\.(h|cc)$'],
|
||||
['exclude', '(^|/)evdev/'],
|
||||
]
|
||||
}],
|
||||
['<(ozone_platform_dri)==0 or >(nacl_untrusted_build)==1', {
|
||||
'sources/': [ ['exclude', '_dri(_browsertest|_unittest)?\\.(h|cc)$'],
|
||||
['exclude', '(^|/)dri/'],
|
||||
]
|
||||
}],
|
||||
['<(use_pango)==0', {
|
||||
'sources/': [ ['exclude', '(^|_)pango(_util|_browsertest|_unittest)?\\.(h|cc)$'], ],
|
||||
}],
|
||||
]
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -u
|
||||
|
||||
if [ $# -lt 3 ];then
|
||||
echo "Usage: $(basename $0) <proto_in> <proto_out> <protoc_path>"\
|
||||
"[<protoc_opts> ...]" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
proto_in=$1
|
||||
proto_out=$2
|
||||
protoc=$3
|
||||
shift 3
|
||||
|
||||
sed -e 's!"third_party/pagespeed/!"pagespeed/!; s!// \[opensource\] !!' \
|
||||
< "$proto_in" > "$proto_out"
|
||||
|
||||
exec "$protoc" "$@" "$proto_out"
|
||||
@@ -1,71 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
This file emits the list of reasons why a particular build needs to be clobbered
|
||||
(or a list of 'landmines').
|
||||
"""
|
||||
|
||||
import optparse
|
||||
import sys
|
||||
|
||||
import landmine_utils
|
||||
|
||||
|
||||
builder = landmine_utils.builder
|
||||
distributor = landmine_utils.distributor
|
||||
gyp_defines = landmine_utils.gyp_defines
|
||||
gyp_msvs_version = landmine_utils.gyp_msvs_version
|
||||
platform = landmine_utils.platform
|
||||
|
||||
|
||||
def print_landmines(target):
|
||||
"""
|
||||
ALL LANDMINES ARE EMITTED FROM HERE.
|
||||
target can be one of {'Release', 'Debug', 'Debug_x64', 'Release_x64'}.
|
||||
"""
|
||||
if (distributor() == 'goma' and platform() == 'win32' and
|
||||
builder() == 'ninja'):
|
||||
print 'Need to clobber winja goma due to backend cwd cache fix.'
|
||||
if platform() == 'android':
|
||||
print 'Clobber: Autogen java file needs to be removed (issue 159173002)'
|
||||
if platform() == 'win' and builder() == 'ninja':
|
||||
print 'Compile on cc_unittests fails due to symbols removed in r185063.'
|
||||
if platform() == 'linux' and builder() == 'ninja':
|
||||
print 'Builders switching from make to ninja will clobber on this.'
|
||||
if platform() == 'mac':
|
||||
print 'Switching from bundle to unbundled dylib (issue 14743002).'
|
||||
if platform() in ('win', 'mac'):
|
||||
print ('Improper dependency for create_nmf.py broke in r240802, '
|
||||
'fixed in r240860.')
|
||||
if (platform() == 'win' and builder() == 'ninja' and
|
||||
gyp_msvs_version() == '2012' and
|
||||
gyp_defines().get('target_arch') == 'x64' and
|
||||
gyp_defines().get('dcheck_always_on') == '1'):
|
||||
print "Switched win x64 trybots from VS2010 to VS2012."
|
||||
if (platform() == 'win' and builder() == 'ninja' and
|
||||
gyp_msvs_version().startswith('2013')):
|
||||
print "Switched win from VS2010 to VS2013."
|
||||
print 'Need to clobber everything due to an IDL change in r154579 (blink)'
|
||||
if (platform() != 'ios'):
|
||||
print 'Clobber to get rid of obselete test plugin after r248358'
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option('-t', '--target',
|
||||
help=='Target for which the landmines have to be emitted')
|
||||
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if args:
|
||||
parser.error('Unknown arguments %s' % args)
|
||||
|
||||
print_landmines(options.target)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@@ -1,42 +0,0 @@
|
||||
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# This file is meant to be included into an action to invoke grit in a
|
||||
# consistent manner. To use this the following variables need to be
|
||||
# defined:
|
||||
# grit_grd_file: string: grd file path
|
||||
# grit_out_dir: string: the output directory path
|
||||
|
||||
# It would be really nice to do this with a rule instead of actions, but it
|
||||
# would need to determine inputs and outputs via grit_info on a per-file
|
||||
# basis. GYP rules don’t currently support that. They could be extended to
|
||||
# do this, but then every generator would need to be updated to handle this.
|
||||
|
||||
{
|
||||
'variables': {
|
||||
'grit_cmd': ['python', '<(DEPTH)/tools/grit/grit.py'],
|
||||
'grit_resource_ids%': '<(DEPTH)/tools/gritsettings/resource_ids',
|
||||
# This makes it possible to add more defines in specific targets,
|
||||
# instead of build/common.gypi .
|
||||
'grit_additional_defines%': [],
|
||||
'grit_rc_header_format%': [],
|
||||
},
|
||||
'inputs': [
|
||||
'<!@pymod_do_main(grit_info <@(grit_defines) <@(grit_additional_defines) '
|
||||
'--inputs <(grit_grd_file) -f "<(grit_resource_ids)")',
|
||||
],
|
||||
'outputs': [
|
||||
'<!@pymod_do_main(grit_info <@(grit_defines) <@(grit_additional_defines) '
|
||||
'--outputs \'<(grit_out_dir)\' '
|
||||
'<(grit_grd_file) -f "<(grit_resource_ids)")',
|
||||
],
|
||||
'action': ['<@(grit_cmd)',
|
||||
'-i', '<(grit_grd_file)', 'build',
|
||||
'-f', '<(grit_resource_ids)',
|
||||
'-o', '<(grit_out_dir)',
|
||||
'<@(grit_defines)',
|
||||
'<@(grit_additional_defines)',
|
||||
'<@(grit_rc_header_format)'],
|
||||
'message': 'Generating resources from <(grit_grd_file)',
|
||||
}
|
||||
@@ -1,572 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# This script is wrapper for Chromium that adds some support for how GYP
|
||||
# is invoked by Chromium beyond what can be done in the gclient hooks.
|
||||
|
||||
# This was copied from the chromium repository at revision 256281. The only
|
||||
# change was adding back support and making default Makefile generation, instead
|
||||
# of ninja support. This was removed as discussed in crbug.com/348686.
|
||||
|
||||
import glob
|
||||
import gyp_helper
|
||||
import json
|
||||
import os
|
||||
import pipes
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import string
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
|
||||
|
||||
sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
|
||||
import gyp
|
||||
|
||||
# Assume this file is in a one-level-deep subdirectory of the source root.
|
||||
SRC_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
# Add paths so that pymod_do_main(...) can import files.
|
||||
sys.path.insert(1, os.path.join(chrome_src, 'tools'))
|
||||
sys.path.insert(1, os.path.join(chrome_src, 'tools', 'generate_shim_headers'))
|
||||
sys.path.insert(1, os.path.join(chrome_src, 'tools', 'grit'))
|
||||
sys.path.insert(1, os.path.join(chrome_src, 'chrome', 'tools', 'build'))
|
||||
sys.path.insert(1, os.path.join(chrome_src, 'native_client', 'build'))
|
||||
sys.path.insert(1, os.path.join(chrome_src, 'native_client_sdk', 'src',
|
||||
'build_tools'))
|
||||
sys.path.insert(1, os.path.join(chrome_src, 'remoting', 'tools', 'build'))
|
||||
sys.path.insert(1, os.path.join(chrome_src, 'third_party', 'liblouis'))
|
||||
sys.path.insert(1, os.path.join(chrome_src, 'third_party', 'WebKit',
|
||||
'Source', 'build', 'scripts'))
|
||||
|
||||
# On Windows, Psyco shortens warm runs of build/gyp_chromium by about
|
||||
# 20 seconds on a z600 machine with 12 GB of RAM, from 90 down to 70
|
||||
# seconds. Conversely, memory usage of build/gyp_chromium with Psyco
|
||||
# maxes out at about 158 MB vs. 132 MB without it.
|
||||
#
|
||||
# Psyco uses native libraries, so we need to load a different
|
||||
# installation depending on which OS we are running under. It has not
|
||||
# been tested whether using Psyco on our Mac and Linux builds is worth
|
||||
# it (the GYP running time is a lot shorter, so the JIT startup cost
|
||||
# may not be worth it).
|
||||
if sys.platform == 'win32':
|
||||
try:
|
||||
sys.path.insert(0, os.path.join(chrome_src, 'third_party', 'psyco_win32'))
|
||||
import psyco
|
||||
except:
|
||||
psyco = None
|
||||
else:
|
||||
psyco = None
|
||||
|
||||
|
||||
def GetSupplementalFiles():
|
||||
"""Returns a list of the supplemental files that are included in all GYP
|
||||
sources."""
|
||||
return glob.glob(os.path.join(chrome_src, '*', 'supplement.gypi'))
|
||||
|
||||
|
||||
def FormatKeyForGN(key):
|
||||
"""Returns the given GYP key reformatted for GN.
|
||||
|
||||
GYP dictionary keys can be almost anything, but in GN they are identifiers
|
||||
and must follow the same rules. This reformats such keys to be valid GN
|
||||
identifiers."""
|
||||
return ''.join([c if c in string.ascii_letters else '_' for c in key])
|
||||
|
||||
|
||||
def EscapeStringForGN(s):
|
||||
"""Converts a string to a GN string literal."""
|
||||
for old, new in [('\\', '\\\\'), ('$', '\\$'), ('"', '\\"')]:
|
||||
s = s.replace(old, new)
|
||||
return '"' + s + '"'
|
||||
|
||||
|
||||
def ProcessGypDefinesItems(items):
|
||||
"""Converts a list of strings to a list of key-value pairs."""
|
||||
result = []
|
||||
for item in items:
|
||||
tokens = item.split('=', 1)
|
||||
# Some GYP variables have hyphens, which we don't support.
|
||||
key = FormatKeyForGN(tokens[0])
|
||||
if len(tokens) == 2:
|
||||
result += [(key, tokens[1])]
|
||||
else:
|
||||
# No value supplied, treat it as a boolean and set it. Note that we
|
||||
# use the string '1' here so we have a consistent definition whether
|
||||
# you do 'foo=1' or 'foo'.
|
||||
result += [(key, '1')]
|
||||
return result
|
||||
|
||||
|
||||
def GetGypVarsForGN(supplemental_files):
|
||||
"""Returns a dictionary of all GYP vars that we will be passing to GN."""
|
||||
# Find the .gyp directory in the user's home directory.
|
||||
home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
|
||||
if home_dot_gyp:
|
||||
home_dot_gyp = os.path.expanduser(home_dot_gyp)
|
||||
if not home_dot_gyp:
|
||||
home_vars = ['HOME']
|
||||
if sys.platform in ('cygwin', 'win32'):
|
||||
home_vars.append('USERPROFILE')
|
||||
for home_var in home_vars:
|
||||
home = os.getenv(home_var)
|
||||
if home != None:
|
||||
home_dot_gyp = os.path.join(home, '.gyp')
|
||||
if not os.path.exists(home_dot_gyp):
|
||||
home_dot_gyp = None
|
||||
else:
|
||||
break
|
||||
|
||||
if home_dot_gyp:
|
||||
include_gypi = os.path.join(home_dot_gyp, "include.gypi")
|
||||
if os.path.exists(include_gypi):
|
||||
supplemental_files += [include_gypi]
|
||||
|
||||
# GYP defines from the supplemental.gypi files.
|
||||
supp_items = []
|
||||
for supplement in supplemental_files:
|
||||
with open(supplement, 'r') as f:
|
||||
try:
|
||||
file_data = eval(f.read(), {'__builtins__': None}, None)
|
||||
except SyntaxError, e:
|
||||
e.filename = os.path.abspath(supplement)
|
||||
raise
|
||||
variables = file_data.get('variables', [])
|
||||
for v in variables:
|
||||
supp_items += [(FormatKeyForGN(v), str(variables[v]))]
|
||||
|
||||
# GYP defines from the environment.
|
||||
env_items = ProcessGypDefinesItems(
|
||||
shlex.split(os.environ.get('GYP_DEFINES', '')))
|
||||
|
||||
# GYP defines from the command line. We can't use optparse since we want
|
||||
# to ignore all arguments other than "-D".
|
||||
cmdline_input_items = []
|
||||
for i in range(len(sys.argv))[1:]:
|
||||
if sys.argv[i].startswith('-D'):
|
||||
if sys.argv[i] == '-D' and i + 1 < len(sys.argv):
|
||||
cmdline_input_items += [sys.argv[i + 1]]
|
||||
elif len(sys.argv[i]) > 2:
|
||||
cmdline_input_items += [sys.argv[i][2:]]
|
||||
cmdline_items = ProcessGypDefinesItems(cmdline_input_items)
|
||||
|
||||
vars_dict = dict(supp_items + env_items + cmdline_items)
|
||||
# It's not possible to set a default value for cpu_arch in GN, so do it here
|
||||
# for now (http://crbug.com/344767).
|
||||
if vars_dict.get('OS') == 'android' and not 'target_arch' in vars_dict:
|
||||
vars_dict['target_arch'] = 'arm'
|
||||
return vars_dict
|
||||
|
||||
|
||||
def GetOutputDirectory():
|
||||
"""Returns the output directory that GYP will use."""
|
||||
# GYP generator flags from the command line. We can't use optparse since we
|
||||
# want to ignore all arguments other than "-G".
|
||||
needle = '-Goutput_dir='
|
||||
cmdline_input_items = []
|
||||
for item in sys.argv[1:]:
|
||||
if item.startswith(needle):
|
||||
return item[len(needle):]
|
||||
|
||||
env_items = shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', ''))
|
||||
needle = 'output_dir='
|
||||
for item in env_items:
|
||||
if item.startswith(needle):
|
||||
return item[len(needle):]
|
||||
|
||||
return "out"
|
||||
|
||||
|
||||
def GetArgsStringForGN(vars_dict):
|
||||
"""Returns the args to pass to GN.
|
||||
Based on a subset of the GYP variables that have been rewritten a bit."""
|
||||
gn_args = ''
|
||||
|
||||
# Note: These are the additional flags passed to various builds by builders
|
||||
# on the main waterfall. We'll probably need to add these at some point:
|
||||
# mac_strip_release=1 http://crbug.com/330301
|
||||
# linux_dump_symbols=0 http://crbug.com/330300
|
||||
# host_os=linux Probably can skip, GN knows the host OS.
|
||||
# order_text_section=<path> http://crbug.com/330299
|
||||
# chromium_win_pch=0 http://crbug.com/297678
|
||||
# chromium_ios_signing=0 http://crbug.com/330302
|
||||
# use_allocator=tcmalloc http://crbug.com/330303, 345554
|
||||
# release_extra_flags=... http://crbug.com/330305
|
||||
|
||||
# These tuples of (key, value, gn_arg_string) use the gn_arg_string for
|
||||
# gn when the key is set to the given value in the GYP arguments.
|
||||
remap_cases = [
|
||||
('android_webview_build', '1', 'is_android_webview_build=true'),
|
||||
('branding', 'Chrome', 'is_chrome_branded=true'),
|
||||
('build_for_tool', 'drmemory', 'disable_iterator_debugging=true'),
|
||||
('build_for_tool', 'tsan', 'disable_iterator_debugging=true'),
|
||||
('buildtype', 'Official', 'is_official_build=true'),
|
||||
('component', 'shared_library', 'is_component_build=true'),
|
||||
('clang', '1', 'is_clang=true'),
|
||||
('clang_use_chrome_plugins', '0', 'clang_use_chrome_plugins=false'),
|
||||
('disable_glibcxx_debug', '1', 'disable_iterator_debugging=true'),
|
||||
('enable_mdns', '0', 'enable_mdns=false'),
|
||||
('enable_mdns', '1', 'enable_mdns=true'),
|
||||
('enable_plugins', '0', 'enable_plugins=false'),
|
||||
('enable_plugins', '1', 'enable_plugins=true'),
|
||||
('target_arch', 'ia32', 'cpu_arch="x86"'),
|
||||
('target_arch', 'x64', 'cpu_arch="x64" force_win64=true'),
|
||||
('target_arch', 'arm', 'cpu_arch="arm"'),
|
||||
('target_arch', 'mipsel', 'cpu_arch="mipsel"'),
|
||||
('fastbuild', '0', 'symbol_level=2'),
|
||||
('fastbuild', '1', 'symbol_level=1'),
|
||||
('fastbuild', '2', 'symbol_level=0'),
|
||||
('OS', 'ios', 'os="ios"'),
|
||||
('OS', 'android', 'os="android"'),
|
||||
('chromeos', '1', 'os="chromeos"'),
|
||||
('use_aura', '1', 'use_aura=true'),
|
||||
('use_goma', '1', 'use_goma=true'),
|
||||
('use_openssl', '0', 'use_openssl=false'),
|
||||
('use_openssl', '1', 'use_openssl=true'),
|
||||
('asan', '1', 'is_asan=true'),
|
||||
('lsan', '1', 'is_lsan=true'),
|
||||
('msan', '1', 'is_msan=true'),
|
||||
('tsan', '1', 'is_tsan=true'),
|
||||
]
|
||||
for i in remap_cases:
|
||||
if i[0] in vars_dict and vars_dict[i[0]] == i[1]:
|
||||
gn_args += ' ' + i[2]
|
||||
|
||||
# These string arguments get passed directly as GN strings.
|
||||
for v in ['android_src', 'arm_float_abi', 'ios_deployment_target',
|
||||
'ios_sdk_path', 'windows_sdk_path']:
|
||||
if v in vars_dict:
|
||||
gn_args += ' ' + v + '=' + EscapeStringForGN(vars_dict[v])
|
||||
|
||||
# gomadir is renamed goma_dir in the GN build.
|
||||
if 'gomadir' in vars_dict:
|
||||
gn_args += ' goma_dir=%s' % EscapeStringForGN(vars_dict['gomadir'])
|
||||
|
||||
# Set the "use_ios_simulator" flag if the ios_sdk_path is set.
|
||||
if 'ios_sdk_path' in vars_dict:
|
||||
if os.path.basename(vars_dict['ios_sdk_path']).lower().startswith(
|
||||
'iphonesimulator'):
|
||||
gn_args += ' use_ios_simulator=true'
|
||||
else:
|
||||
gn_args += ' use_ios_simulator=false'
|
||||
|
||||
# These arguments get passed directly as integers (avoiding the quoting and
|
||||
# escaping of the string ones above).
|
||||
for v in ['arm_version']:
|
||||
if v in vars_dict:
|
||||
gn_args += ' %s=%s' % (v, vars_dict[v])
|
||||
|
||||
# Some other flags come from GYP environment variables.
|
||||
gyp_msvs_version = os.environ.get('GYP_MSVS_VERSION', '')
|
||||
if gyp_msvs_version:
|
||||
gn_args += ' visual_studio_version=' + EscapeStringForGN(gyp_msvs_version)
|
||||
gyp_msvs_override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH', '')
|
||||
if gyp_msvs_override_path:
|
||||
gn_args += ' visual_studio_path=' + \
|
||||
EscapeStringForGN(gyp_msvs_override_path)
|
||||
|
||||
# Set the GYP flag so BUILD files know they're being invoked in GYP mode.
|
||||
gn_args += ' is_gyp=true'
|
||||
|
||||
gyp_outdir = GetOutputDirectory()
|
||||
gn_args += ' gyp_output_dir=\"%s\"' % gyp_outdir
|
||||
|
||||
return gn_args.strip()
|
||||
|
||||
|
||||
def additional_include_files(supplemental_files, args=[]):
|
||||
"""
|
||||
Returns a list of additional (.gypi) files to include, without duplicating
|
||||
ones that are already specified on the command line. The list of supplemental
|
||||
include files is passed in as an argument.
|
||||
"""
|
||||
# Determine the include files specified on the command line.
|
||||
# This doesn't cover all the different option formats you can use,
|
||||
# but it's mainly intended to avoid duplicating flags on the automatic
|
||||
# makefile regeneration which only uses this format.
|
||||
specified_includes = set()
|
||||
for arg in args:
|
||||
if arg.startswith('-I') and len(arg) > 2:
|
||||
specified_includes.add(os.path.realpath(arg[2:]))
|
||||
|
||||
result = []
|
||||
def AddInclude(path):
|
||||
if os.path.realpath(path) not in specified_includes:
|
||||
result.append(path)
|
||||
|
||||
# Always include common.gypi.
|
||||
AddInclude(os.path.join(script_dir, 'common.gypi'))
|
||||
|
||||
# Optionally add supplemental .gypi files if present.
|
||||
for supplement in supplemental_files:
|
||||
AddInclude(supplement)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def RunGN(vars_dict):
|
||||
"""Runs GN, returning True if it succeeded, printing an error and returning
|
||||
false if not."""
|
||||
|
||||
# The binaries in platform-specific subdirectories in src/tools/gn/bin.
|
||||
gnpath = SRC_DIR + '/tools/gn/bin/'
|
||||
if sys.platform in ('cygwin', 'win32'):
|
||||
gnpath += 'win/gn.exe'
|
||||
elif sys.platform.startswith('linux'):
|
||||
# On Linux we have 32-bit and 64-bit versions.
|
||||
if subprocess.check_output(["getconf", "LONG_BIT"]).find("64") >= 0:
|
||||
gnpath += 'linux/gn'
|
||||
else:
|
||||
gnpath += 'linux/gn32'
|
||||
elif sys.platform == 'darwin':
|
||||
gnpath += 'mac/gn'
|
||||
else:
|
||||
print 'Unknown platform for GN: ', sys.platform
|
||||
return False
|
||||
|
||||
print 'Generating gyp files from GN...'
|
||||
|
||||
# Need to pass both the source root (the bots don't run this command from
|
||||
# within the source tree) as well as set the is_gyp value so the BUILD files
|
||||
# to know they're being run under GYP.
|
||||
args = [gnpath, 'gyp', '-q',
|
||||
'--root=' + chrome_src,
|
||||
'--args=' + GetArgsStringForGN(vars_dict),
|
||||
'--output=//' + GetOutputDirectory() + '/gn_build/']
|
||||
return subprocess.call(args) == 0
|
||||
|
||||
|
||||
def GetDesiredVsToolchainHashes():
|
||||
"""Load a list of SHA1s corresponding to the toolchains that we want installed
|
||||
to build with."""
|
||||
sha1path = os.path.join(script_dir, 'toolchain_vs2013.hash')
|
||||
with open(sha1path, 'rb') as f:
|
||||
return f.read().strip().splitlines()
|
||||
|
||||
|
||||
def DownloadVsToolChain():
|
||||
"""Download the Visual Studio toolchain on Windows.
|
||||
|
||||
If on Windows, request that depot_tools install/update the automatic
|
||||
toolchain, and then use it (unless opted-out) and return a tuple containing
|
||||
the x64 and x86 paths. Otherwise return None.
|
||||
"""
|
||||
vs2013_runtime_dll_dirs = None
|
||||
depot_tools_win_toolchain = \
|
||||
bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
|
||||
if sys.platform in ('win32', 'cygwin') and depot_tools_win_toolchain:
|
||||
import find_depot_tools
|
||||
depot_tools_path = find_depot_tools.add_depot_tools_to_path()
|
||||
temp_handle, data_file = tempfile.mkstemp(suffix='.json')
|
||||
os.close(temp_handle)
|
||||
get_toolchain_args = [
|
||||
sys.executable,
|
||||
os.path.join(depot_tools_path,
|
||||
'win_toolchain',
|
||||
'get_toolchain_if_necessary.py'),
|
||||
'--output-json', data_file,
|
||||
] + GetDesiredVsToolchainHashes()
|
||||
subprocess.check_call(get_toolchain_args)
|
||||
|
||||
with open(data_file, 'r') as tempf:
|
||||
toolchain_data = json.load(tempf)
|
||||
os.unlink(data_file)
|
||||
|
||||
toolchain = toolchain_data['path']
|
||||
version = toolchain_data['version']
|
||||
version_is_pro = version[-1] != 'e'
|
||||
win8sdk = toolchain_data['win8sdk']
|
||||
wdk = toolchain_data['wdk']
|
||||
# TODO(scottmg): The order unfortunately matters in these. They should be
|
||||
# split into separate keys for x86 and x64. (See CopyVsRuntimeDlls call
|
||||
# below). http://crbug.com/345992
|
||||
vs2013_runtime_dll_dirs = toolchain_data['runtime_dirs']
|
||||
|
||||
os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain
|
||||
os.environ['GYP_MSVS_VERSION'] = version
|
||||
# We need to make sure windows_sdk_path is set to the automated
|
||||
# toolchain values in GYP_DEFINES, but don't want to override any
|
||||
# otheroptions.express
|
||||
# values there.
|
||||
gyp_defines_dict = gyp.NameValueListToDict(gyp.ShlexEnv('GYP_DEFINES'))
|
||||
gyp_defines_dict['windows_sdk_path'] = win8sdk
|
||||
os.environ['GYP_DEFINES'] = ' '.join('%s=%s' % (k, pipes.quote(str(v)))
|
||||
for k, v in gyp_defines_dict.iteritems())
|
||||
os.environ['WINDOWSSDKDIR'] = win8sdk
|
||||
os.environ['WDK_DIR'] = wdk
|
||||
# Include the VS runtime in the PATH in case it's not machine-installed.
|
||||
runtime_path = ';'.join(vs2013_runtime_dll_dirs)
|
||||
os.environ['PATH'] = runtime_path + ';' + os.environ['PATH']
|
||||
print('Using automatic toolchain in %s (%s edition).' % (
|
||||
toolchain, 'Pro' if version_is_pro else 'Express'))
|
||||
return vs2013_runtime_dll_dirs
|
||||
|
||||
|
||||
def CopyVsRuntimeDlls(output_dir, runtime_dirs):
|
||||
"""Copies the VS runtime DLLs from the given |runtime_dirs| to the output
|
||||
directory so that even if not system-installed, built binaries are likely to
|
||||
be able to run.
|
||||
|
||||
This needs to be run after gyp has been run so that the expected target
|
||||
output directories are already created.
|
||||
"""
|
||||
assert sys.platform.startswith(('win32', 'cygwin'))
|
||||
|
||||
def copy_runtime(target_dir, source_dir, dll_pattern):
|
||||
"""Copy both the msvcr and msvcp runtime DLLs, only if the target doesn't
|
||||
exist, but the target directory does exist."""
|
||||
for which in ('p', 'r'):
|
||||
dll = dll_pattern % which
|
||||
target = os.path.join(target_dir, dll)
|
||||
source = os.path.join(source_dir, dll)
|
||||
# If gyp generated to that output dir, and the runtime isn't already
|
||||
# there, then copy it over.
|
||||
if (os.path.isdir(target_dir) and
|
||||
(not os.path.isfile(target) or
|
||||
os.stat(target).st_mtime != os.stat(source).st_mtime)):
|
||||
print 'Copying %s to %s...' % (source, target)
|
||||
if os.path.exists(target):
|
||||
os.unlink(target)
|
||||
shutil.copy2(source, target)
|
||||
|
||||
x86, x64 = runtime_dirs
|
||||
out_debug = os.path.join(output_dir, 'Debug')
|
||||
out_debug_nacl64 = os.path.join(output_dir, 'Debug', 'x64')
|
||||
out_release = os.path.join(output_dir, 'Release')
|
||||
out_release_nacl64 = os.path.join(output_dir, 'Release', 'x64')
|
||||
out_debug_x64 = os.path.join(output_dir, 'Debug_x64')
|
||||
out_release_x64 = os.path.join(output_dir, 'Release_x64')
|
||||
|
||||
if os.path.exists(out_debug) and not os.path.exists(out_debug_nacl64):
|
||||
os.makedirs(out_debug_nacl64)
|
||||
if os.path.exists(out_release) and not os.path.exists(out_release_nacl64):
|
||||
os.makedirs(out_release_nacl64)
|
||||
copy_runtime(out_debug, x86, 'msvc%s120d.dll')
|
||||
copy_runtime(out_release, x86, 'msvc%s120.dll')
|
||||
copy_runtime(out_debug_x64, x64, 'msvc%s120d.dll')
|
||||
copy_runtime(out_release_x64, x64, 'msvc%s120.dll')
|
||||
copy_runtime(out_debug_nacl64, x64, 'msvc%s120d.dll')
|
||||
copy_runtime(out_release_nacl64, x64, 'msvc%s120.dll')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = sys.argv[1:]
|
||||
|
||||
if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)):
|
||||
print 'Skipping gyp_chromium due to GYP_CHROMIUM_NO_ACTION env var.'
|
||||
sys.exit(0)
|
||||
|
||||
# Use the Psyco JIT if available.
|
||||
if psyco:
|
||||
psyco.profile()
|
||||
print "Enabled Psyco JIT."
|
||||
|
||||
# Fall back on hermetic python if we happen to get run under cygwin.
|
||||
# TODO(bradnelson): take this out once this issue is fixed:
|
||||
# http://code.google.com/p/gyp/issues/detail?id=177
|
||||
if sys.platform == 'cygwin':
|
||||
import find_depot_tools
|
||||
depot_tools_path = find_depot_tools.add_depot_tools_to_path()
|
||||
python_dir = sorted(glob.glob(os.path.join(depot_tools_path,
|
||||
'python2*_bin')))[-1]
|
||||
env = os.environ.copy()
|
||||
env['PATH'] = python_dir + os.pathsep + env.get('PATH', '')
|
||||
p = subprocess.Popen(
|
||||
[os.path.join(python_dir, 'python.exe')] + sys.argv,
|
||||
env=env, shell=False)
|
||||
p.communicate()
|
||||
sys.exit(p.returncode)
|
||||
|
||||
gyp_helper.apply_chromium_gyp_env()
|
||||
|
||||
# This could give false positives since it doesn't actually do real option
|
||||
# parsing. Oh well.
|
||||
gyp_file_specified = False
|
||||
for arg in args:
|
||||
if arg.endswith('.gyp'):
|
||||
gyp_file_specified = True
|
||||
break
|
||||
|
||||
# If we didn't get a file, check an env var, and then fall back to
|
||||
# assuming 'all.gyp' from the same directory as the script.
|
||||
if not gyp_file_specified:
|
||||
gyp_file = os.environ.get('CHROMIUM_GYP_FILE')
|
||||
if gyp_file:
|
||||
# Note that CHROMIUM_GYP_FILE values can't have backslashes as
|
||||
# path separators even on Windows due to the use of shlex.split().
|
||||
args.extend(shlex.split(gyp_file))
|
||||
else:
|
||||
args.append(os.path.join(script_dir, 'all.gyp'))
|
||||
|
||||
# There shouldn't be a circular dependency relationship between .gyp files,
|
||||
# but in Chromium's .gyp files, on non-Mac platforms, circular relationships
|
||||
# currently exist. The check for circular dependencies is currently
|
||||
# bypassed on other platforms, but is left enabled on the Mac, where a
|
||||
# violation of the rule causes Xcode to misbehave badly.
|
||||
# TODO(mark): Find and kill remaining circular dependencies, and remove this
|
||||
# option. http://crbug.com/35878.
|
||||
# TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the
|
||||
# list.
|
||||
if sys.platform not in ('darwin',):
|
||||
args.append('--no-circular-check')
|
||||
|
||||
# Default to make if no generator has
|
||||
# explicitly been set.
|
||||
if not os.environ.get('GYP_GENERATORS'):
|
||||
os.environ['GYP_GENERATORS'] = 'make'
|
||||
elif sys.platform == 'darwin' and not os.environ.get('GYP_GENERATORS') and \
|
||||
not 'OS=ios' in os.environ.get('GYP_DEFINES', []):
|
||||
os.environ['GYP_GENERATORS'] = 'make'
|
||||
|
||||
vs2013_runtime_dll_dirs = DownloadVsToolChain()
|
||||
|
||||
# If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check
|
||||
# to enfore syntax checking.
|
||||
syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK')
|
||||
if syntax_check and int(syntax_check):
|
||||
args.append('--check')
|
||||
|
||||
supplemental_includes = GetSupplementalFiles()
|
||||
gn_vars_dict = GetGypVarsForGN(supplemental_includes)
|
||||
|
||||
# Automatically turn on crosscompile support for platforms that need it.
|
||||
# (The Chrome OS build sets CC_host / CC_target which implicitly enables
|
||||
# this mode.)
|
||||
if all(('ninja' in os.environ.get('GYP_GENERATORS', ''),
|
||||
gn_vars_dict.get('OS') in ['android', 'ios'],
|
||||
'GYP_CROSSCOMPILE' not in os.environ)):
|
||||
os.environ['GYP_CROSSCOMPILE'] = '1'
|
||||
|
||||
# TODO(brettw) bug 350974 either turn back on GN or delete all of this code.
|
||||
#if not RunGN(gn_vars_dict):
|
||||
# sys.exit(1)
|
||||
args.extend(
|
||||
['-I' + i for i in additional_include_files(supplemental_includes, args)])
|
||||
|
||||
args.extend(['-D', 'gyp_output_dir=' + GetOutputDirectory()])
|
||||
|
||||
print 'Updating projects from gyp files...'
|
||||
sys.stdout.flush()
|
||||
|
||||
# Off we go...
|
||||
gyp_rc = gyp.main(args)
|
||||
|
||||
# Check for landmines (reasons to clobber the build). This must be run here,
|
||||
# rather than a separate runhooks step so that any environment modifications
|
||||
# from above are picked up.
|
||||
print 'Running build/landmines.py...'
|
||||
subprocess.check_call(
|
||||
[sys.executable, os.path.join(script_dir, 'landmines.py')])
|
||||
|
||||
if vs2013_runtime_dll_dirs:
|
||||
x64_runtime, x86_runtime = vs2013_runtime_dll_dirs
|
||||
CopyVsRuntimeDlls(os.path.join(chrome_src, GetOutputDirectory()),
|
||||
(x86_runtime, x64_runtime))
|
||||
|
||||
sys.exit(gyp_rc)
|
||||
@@ -1,54 +0,0 @@
|
||||
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# This file helps gyp_chromium and landmines correctly set up the gyp
|
||||
# environment from chromium.gyp_env on disk
|
||||
|
||||
import os
|
||||
|
||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
CHROME_SRC = os.path.dirname(SCRIPT_DIR)
|
||||
|
||||
|
||||
def apply_gyp_environment_from_file(file_path):
|
||||
"""Reads in a *.gyp_env file and applies the valid keys to os.environ."""
|
||||
if not os.path.exists(file_path):
|
||||
return
|
||||
with open(file_path, 'rU') as f:
|
||||
file_contents = f.read()
|
||||
try:
|
||||
file_data = eval(file_contents, {'__builtins__': None}, None)
|
||||
except SyntaxError, e:
|
||||
e.filename = os.path.abspath(file_path)
|
||||
raise
|
||||
supported_vars = (
|
||||
'CC',
|
||||
'CC_wrapper',
|
||||
'CHROMIUM_GYP_FILE',
|
||||
'CHROMIUM_GYP_SYNTAX_CHECK',
|
||||
'CXX',
|
||||
'CXX_wrapper',
|
||||
'GYP_DEFINES',
|
||||
'GYP_GENERATOR_FLAGS',
|
||||
'GYP_CROSSCOMPILE',
|
||||
'GYP_GENERATOR_OUTPUT',
|
||||
'GYP_GENERATORS',
|
||||
'GYP_MSVS_VERSION',
|
||||
)
|
||||
for var in supported_vars:
|
||||
file_val = file_data.get(var)
|
||||
if file_val:
|
||||
if var in os.environ:
|
||||
print 'INFO: Environment value for "%s" overrides value in %s.' % (
|
||||
var, os.path.abspath(file_path)
|
||||
)
|
||||
else:
|
||||
os.environ[var] = file_val
|
||||
|
||||
|
||||
def apply_chromium_gyp_env():
|
||||
if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
|
||||
# Update the environment based on chromium.gyp_env
|
||||
path = os.path.join(os.path.dirname(CHROME_SRC), 'chromium.gyp_env')
|
||||
apply_gyp_environment_from_file(path)
|
||||
@@ -1,166 +0,0 @@
|
||||
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
{
|
||||
'variables': {
|
||||
'install_path': '<(DEPTH)/install',
|
||||
'version_py_path': '<(DEPTH)/build/version.py',
|
||||
'version_path': '<(DEPTH)/net/instaweb/public/VERSION',
|
||||
'lastchange_path': '<(SHARED_INTERMEDIATE_DIR)/build/LASTCHANGE',
|
||||
'branding_dir': '<(install_path)/common',
|
||||
},
|
||||
'conditions': [
|
||||
['OS=="linux"', {
|
||||
'variables': {
|
||||
'version' : '<!(python <(version_py_path) -f <(version_path) -t "@MAJOR@.@MINOR@.@BUILD@.@PATCH@")',
|
||||
'revision' : '<!(if [ -f <(DEPTH)/LASTCHANGE.in ]; then cat <(DEPTH)/LASTCHANGE.in | cut -d= -f2; else git rev-list --all --count; fi)',
|
||||
'packaging_files_common': [
|
||||
'<(install_path)/common/apt.include',
|
||||
'<(install_path)/common/mod-pagespeed/mod-pagespeed.info',
|
||||
'<(install_path)/common/installer.include',
|
||||
'<(install_path)/common/repo.cron',
|
||||
'<(install_path)/common/rpm.include',
|
||||
'<(install_path)/common/rpmrepo.cron',
|
||||
'<(install_path)/common/updater',
|
||||
'<(install_path)/common/variables.include',
|
||||
'<(install_path)/common/BRANDING',
|
||||
'<(install_path)/common/pagespeed.load.template',
|
||||
'<(install_path)/common/pagespeed.conf.template',
|
||||
],
|
||||
'packaging_files_deb': [
|
||||
'<(install_path)/debian/build.sh',
|
||||
'<(install_path)/debian/changelog.template',
|
||||
'<(install_path)/debian/conffiles',
|
||||
'<(install_path)/debian/control.template',
|
||||
'<(install_path)/debian/postinst',
|
||||
'<(install_path)/debian/postrm',
|
||||
'<(install_path)/debian/prerm',
|
||||
],
|
||||
'packaging_files_rpm': [
|
||||
'<(install_path)/rpm/build.sh',
|
||||
'<(install_path)/rpm/mod-pagespeed.spec.template',
|
||||
],
|
||||
'packaging_files_binaries': [
|
||||
'<(PRODUCT_DIR)/libmod_pagespeed.so',
|
||||
'<(PRODUCT_DIR)/libmod_pagespeed_ap24.so',
|
||||
],
|
||||
'flock_bash': ['flock', '--', '/tmp/linux_package_lock', 'bash'],
|
||||
'deb_build': '<(PRODUCT_DIR)/install/debian/build.sh',
|
||||
'rpm_build': '<(PRODUCT_DIR)/install/rpm/build.sh',
|
||||
'deb_cmd': ['<@(flock_bash)', '<(deb_build)', '-o' '<(PRODUCT_DIR)',
|
||||
'-b', '<(PRODUCT_DIR)', '-a', '<(target_arch)'],
|
||||
'rpm_cmd': ['<@(flock_bash)', '<(rpm_build)', '-o' '<(PRODUCT_DIR)',
|
||||
'-b', '<(PRODUCT_DIR)', '-a', '<(target_arch)'],
|
||||
'conditions': [
|
||||
['target_arch=="ia32"', {
|
||||
'deb_arch': 'i386',
|
||||
'rpm_arch': 'i386',
|
||||
}],
|
||||
['target_arch=="x64"', {
|
||||
'deb_arch': 'amd64',
|
||||
'rpm_arch': 'x86_64',
|
||||
}],
|
||||
],
|
||||
},
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'linux_installer_configs',
|
||||
'type': 'none',
|
||||
# Add these files to the build output so the build archives will be
|
||||
# "hermetic" for packaging.
|
||||
'copies': [
|
||||
{
|
||||
'destination': '<(PRODUCT_DIR)/install/debian/',
|
||||
'files': [
|
||||
'<@(packaging_files_deb)',
|
||||
]
|
||||
},
|
||||
{
|
||||
'destination': '<(PRODUCT_DIR)/install/rpm/',
|
||||
'files': [
|
||||
'<@(packaging_files_rpm)',
|
||||
]
|
||||
},
|
||||
{
|
||||
'destination': '<(PRODUCT_DIR)/install/common/',
|
||||
'files': [
|
||||
'<@(packaging_files_common)',
|
||||
]
|
||||
},
|
||||
],
|
||||
'actions': [
|
||||
{
|
||||
'action_name': 'save_build_info',
|
||||
'inputs': [
|
||||
'<(branding_dir)/BRANDING',
|
||||
'<(version_path)',
|
||||
'<(lastchange_path)',
|
||||
],
|
||||
'outputs': [
|
||||
'<(PRODUCT_DIR)/installer/version.txt',
|
||||
],
|
||||
# Just output the default version info variables.
|
||||
'action': [
|
||||
'python', '<(version_py_path)',
|
||||
'-f', '<(branding_dir)/BRANDING',
|
||||
'-f', '<(version_path)',
|
||||
'-f', '<(lastchange_path)',
|
||||
'-o', '<@(_outputs)'
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'linux_package_deb_stable',
|
||||
'suppress_wildcard': 1,
|
||||
'variables': {
|
||||
'channel': 'stable',
|
||||
},
|
||||
'includes': [
|
||||
'linux_package_deb.gypi',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'linux_package_deb_beta',
|
||||
'suppress_wildcard': 1,
|
||||
'variables': {
|
||||
'channel': 'beta',
|
||||
},
|
||||
'includes': [
|
||||
'linux_package_deb.gypi',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'linux_package_rpm_stable',
|
||||
'suppress_wildcard': 1,
|
||||
'variables': {
|
||||
'channel': 'stable',
|
||||
},
|
||||
'includes': [
|
||||
'linux_package_rpm.gypi',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'linux_package_rpm_beta',
|
||||
'suppress_wildcard': 1,
|
||||
'variables': {
|
||||
'channel': 'beta',
|
||||
},
|
||||
'includes': [
|
||||
'linux_package_rpm.gypi',
|
||||
],
|
||||
},
|
||||
],
|
||||
},{
|
||||
'targets': [
|
||||
],
|
||||
}],
|
||||
],
|
||||
}
|
||||
|
||||
# Local Variables:
|
||||
# tab-width:2
|
||||
# indent-tabs-mode:nil
|
||||
# End:
|
||||
# vim: set expandtab tabstop=2 shiftwidth=2:
|
||||
-398
@@ -1,398 +0,0 @@
|
||||
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# This file is meant to be included into a target to provide a rule
|
||||
# to build Java in a consistent manner.
|
||||
#
|
||||
# To use this, create a gyp target with the following form:
|
||||
# {
|
||||
# 'target_name': 'my-package_java',
|
||||
# 'type': 'none',
|
||||
# 'variables': {
|
||||
# 'java_in_dir': 'path/to/package/root',
|
||||
# },
|
||||
# 'includes': ['path/to/this/gypi/file'],
|
||||
# }
|
||||
#
|
||||
# Required variables:
|
||||
# java_in_dir - The top-level java directory. The src should be in
|
||||
# <java_in_dir>/src.
|
||||
# Optional/automatic variables:
|
||||
# additional_input_paths - These paths will be included in the 'inputs' list to
|
||||
# ensure that this target is rebuilt when one of these paths changes.
|
||||
# additional_src_dirs - Additional directories with .java files to be compiled
|
||||
# and included in the output of this target.
|
||||
# generated_src_dirs - Same as additional_src_dirs except used for .java files
|
||||
# that are generated at build time. This should be set automatically by a
|
||||
# target's dependencies. The .java files in these directories are not
|
||||
# included in the 'inputs' list (unlike additional_src_dirs).
|
||||
# input_jars_paths - The path to jars to be included in the classpath. This
|
||||
# should be filled automatically by depending on the appropriate targets.
|
||||
# javac_includes - A list of specific files to include. This is by default
|
||||
# empty, which leads to inclusion of all files specified. May include
|
||||
# wildcard, and supports '**/' for recursive path wildcards, ie.:
|
||||
# '**/MyFileRegardlessOfDirectory.java', '**/IncludedPrefix*.java'.
|
||||
# has_java_resources - Set to 1 if the java target contains an
|
||||
# Android-compatible resources folder named res. If 1, R_package and
|
||||
# R_package_relpath must also be set.
|
||||
# R_package - The java package in which the R class (which maps resources to
|
||||
# integer IDs) should be generated, e.g. org.chromium.content.
|
||||
# R_package_relpath - Same as R_package, but replace each '.' with '/'.
|
||||
# java_strings_grd - The name of the grd file from which to generate localized
|
||||
# strings.xml files, if any.
|
||||
# res_extra_dirs - A list of extra directories containing Android resources.
|
||||
# These directories may be generated at build time.
|
||||
# res_extra_files - A list of the files in res_extra_dirs.
|
||||
# never_lint - Set to 1 to not run lint on this target.
|
||||
|
||||
{
|
||||
'dependencies': [
|
||||
'<(DEPTH)/build/android/setup.gyp:build_output_dirs'
|
||||
],
|
||||
'variables': {
|
||||
'android_jar': '<(android_sdk)/android.jar',
|
||||
'input_jars_paths': [ '<(android_jar)' ],
|
||||
'additional_src_dirs': [],
|
||||
'javac_includes': [],
|
||||
'jar_name': '<(_target_name).jar',
|
||||
'jar_dir': '<(PRODUCT_DIR)/lib.java',
|
||||
'jar_path': '<(intermediate_dir)/<(jar_name)',
|
||||
'jar_final_path': '<(jar_dir)/<(jar_name)',
|
||||
'jar_excluded_classes': [ '*/R.class', '*/R##*.class' ],
|
||||
'instr_stamp': '<(intermediate_dir)/instr.stamp',
|
||||
'additional_input_paths': [],
|
||||
'dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar',
|
||||
'generated_src_dirs': ['>@(generated_R_dirs)'],
|
||||
'generated_R_dirs': [],
|
||||
'has_java_resources%': 0,
|
||||
'java_strings_grd%': '',
|
||||
'res_extra_dirs': [],
|
||||
'res_extra_files': [],
|
||||
'res_v14_verify_only%': 0,
|
||||
'resource_input_paths': ['>@(res_extra_files)'],
|
||||
'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
|
||||
'classes_dir': '<(intermediate_dir)/classes',
|
||||
'compile_stamp': '<(intermediate_dir)/compile.stamp',
|
||||
'lint_stamp': '<(intermediate_dir)/lint.stamp',
|
||||
'lint_result': '<(intermediate_dir)/lint_result.xml',
|
||||
'lint_config': '<(intermediate_dir)/lint_config.xml',
|
||||
'never_lint%': 0,
|
||||
'proguard_config%': '',
|
||||
'proguard_preprocess%': '0',
|
||||
'variables': {
|
||||
'variables': {
|
||||
'proguard_preprocess%': 0,
|
||||
'emma_never_instrument%': 0,
|
||||
},
|
||||
'conditions': [
|
||||
['proguard_preprocess == 1', {
|
||||
'javac_jar_path': '<(intermediate_dir)/<(_target_name).pre.jar'
|
||||
}, {
|
||||
'javac_jar_path': '<(jar_path)'
|
||||
}],
|
||||
['chromium_code != 0 and emma_coverage != 0 and emma_never_instrument == 0', {
|
||||
'emma_instrument': 1,
|
||||
}, {
|
||||
'emma_instrument': 0,
|
||||
}],
|
||||
],
|
||||
},
|
||||
'emma_instrument': '<(emma_instrument)',
|
||||
'javac_jar_path': '<(javac_jar_path)',
|
||||
},
|
||||
# This all_dependent_settings is used for java targets only. This will add the
|
||||
# jar path to the classpath of dependent java targets.
|
||||
'all_dependent_settings': {
|
||||
'variables': {
|
||||
'input_jars_paths': ['<(jar_final_path)'],
|
||||
'library_dexed_jars_paths': ['<(dex_path)'],
|
||||
},
|
||||
},
|
||||
'conditions': [
|
||||
['has_java_resources == 1', {
|
||||
'variables': {
|
||||
'res_dir': '<(java_in_dir)/res',
|
||||
'res_crunched_dir': '<(intermediate_dir)/res_crunched',
|
||||
'res_v14_compatibility_stamp': '<(intermediate_dir)/res_v14_compatibility.stamp',
|
||||
'res_v14_compatibility_dir': '<(intermediate_dir)/res_v14_compatibility',
|
||||
'res_input_dirs': ['<(res_dir)', '<@(res_extra_dirs)'],
|
||||
'resource_input_paths': ['<!@(find <(res_dir) -type f)'],
|
||||
'R_dir': '<(intermediate_dir)/java_R',
|
||||
'R_text_file': '<(R_dir)/R.txt',
|
||||
'R_stamp': '<(intermediate_dir)/resources.stamp',
|
||||
'generated_src_dirs': ['<(R_dir)'],
|
||||
'additional_input_paths': ['<(R_stamp)',
|
||||
'<(res_v14_compatibility_stamp)',],
|
||||
'additional_res_dirs': [],
|
||||
'dependencies_res_input_dirs': [],
|
||||
'dependencies_res_files': [],
|
||||
},
|
||||
'all_dependent_settings': {
|
||||
'variables': {
|
||||
# Dependent jars include this target's R.java file via
|
||||
# generated_R_dirs and include its resources via
|
||||
# dependencies_res_files.
|
||||
'generated_R_dirs': ['<(R_dir)'],
|
||||
'additional_input_paths': ['<(R_stamp)',
|
||||
'<(res_v14_compatibility_stamp)',],
|
||||
'dependencies_res_files': ['<@(resource_input_paths)'],
|
||||
|
||||
'dependencies_res_input_dirs': ['<@(res_input_dirs)'],
|
||||
|
||||
# Dependent APKs include this target's resources via
|
||||
# additional_res_dirs, additional_res_packages, and
|
||||
# additional_R_text_files.
|
||||
'additional_res_dirs': ['<(res_crunched_dir)',
|
||||
'<(res_v14_compatibility_dir)',
|
||||
'<@(res_input_dirs)'],
|
||||
'additional_res_packages': ['<(R_package)'],
|
||||
'additional_R_text_files': ['<(R_text_file)'],
|
||||
},
|
||||
},
|
||||
'conditions': [
|
||||
['java_strings_grd != ""', {
|
||||
'variables': {
|
||||
'res_grit_dir': '<(intermediate_dir)/res_grit',
|
||||
'res_input_dirs': ['<(res_grit_dir)'],
|
||||
'grit_grd_file': '<(java_in_dir)/strings/<(java_strings_grd)',
|
||||
'resource_input_paths': ['<!@pymod_do_main(grit_info <@(grit_defines) --outputs "<(res_grit_dir)" <(grit_grd_file))'],
|
||||
},
|
||||
'actions': [
|
||||
{
|
||||
'action_name': 'generate_localized_strings_xml',
|
||||
'variables': {
|
||||
'grit_additional_defines': ['-E', 'ANDROID_JAVA_TAGGED_ONLY=false'],
|
||||
'grit_out_dir': '<(res_grit_dir)',
|
||||
# resource_ids is unneeded since we don't generate .h headers.
|
||||
'grit_resource_ids': '',
|
||||
},
|
||||
'includes': ['../build/grit_action.gypi'],
|
||||
},
|
||||
],
|
||||
}],
|
||||
],
|
||||
'actions': [
|
||||
# Generate R.java and crunch image resources.
|
||||
{
|
||||
'action_name': 'process_resources',
|
||||
'message': 'processing resources for <(_target_name)',
|
||||
'variables': {
|
||||
'android_manifest': '<(DEPTH)/build/android/AndroidManifest.xml',
|
||||
# Include the dependencies' res dirs so that references to
|
||||
# resources in dependencies can be resolved.
|
||||
'all_res_dirs': ['<@(res_input_dirs)',
|
||||
'>@(dependencies_res_input_dirs)',],
|
||||
# Write the inputs list to a file, so that the action command
|
||||
# line won't exceed the OS limits when calculating the checksum
|
||||
# of the list.
|
||||
'inputs_list_file': '>|(inputs_list.<(_target_name).gypcmd >@(_inputs))'
|
||||
},
|
||||
'inputs': [
|
||||
'<(DEPTH)/build/android/gyp/util/build_utils.py',
|
||||
'<(DEPTH)/build/android/gyp/process_resources.py',
|
||||
'>@(resource_input_paths)',
|
||||
'>@(dependencies_res_files)',
|
||||
],
|
||||
'outputs': [
|
||||
'<(R_stamp)',
|
||||
],
|
||||
'action': [
|
||||
'python', '<(DEPTH)/build/android/gyp/process_resources.py',
|
||||
'--android-sdk', '<(android_sdk)',
|
||||
'--android-sdk-tools', '<(android_sdk_tools)',
|
||||
'--R-dir', '<(R_dir)',
|
||||
'--res-dirs', '>(all_res_dirs)',
|
||||
'--crunch-input-dir', '>(res_dir)',
|
||||
'--crunch-output-dir', '<(res_crunched_dir)',
|
||||
'--android-manifest', '<(android_manifest)',
|
||||
'--non-constant-id',
|
||||
'--custom-package', '<(R_package)',
|
||||
'--stamp', '<(R_stamp)',
|
||||
|
||||
# Add hash of inputs to the command line, so if inputs change
|
||||
# (e.g. if a resource if removed), the command will be re-run.
|
||||
# TODO(newt): remove this once crbug.com/177552 is fixed in ninja.
|
||||
'--ignore=>!(md5sum >(inputs_list_file))',
|
||||
],
|
||||
},
|
||||
# Generate API 14 resources.
|
||||
{
|
||||
'action_name': 'generate_api_14_resources_<(_target_name)',
|
||||
'message': 'Generating Android API 14 resources <(_target_name)',
|
||||
'variables' : {
|
||||
'res_v14_additional_options': [],
|
||||
},
|
||||
'conditions': [
|
||||
['res_v14_verify_only == 1', {
|
||||
'variables': {
|
||||
'res_v14_additional_options': ['--verify-only']
|
||||
},
|
||||
}],
|
||||
],
|
||||
'inputs': [
|
||||
'<(DEPTH)/build/android/gyp/util/build_utils.py',
|
||||
'<(DEPTH)/build/android/gyp/generate_v14_compatible_resources.py',
|
||||
'>@(resource_input_paths)',
|
||||
],
|
||||
'outputs': [
|
||||
'<(res_v14_compatibility_stamp)',
|
||||
],
|
||||
'action': [
|
||||
'python', '<(DEPTH)/build/android/gyp/generate_v14_compatible_resources.py',
|
||||
'--res-dir=<(res_dir)',
|
||||
'--res-v14-compatibility-dir=<(res_v14_compatibility_dir)',
|
||||
'--stamp', '<(res_v14_compatibility_stamp)',
|
||||
'<@(res_v14_additional_options)',
|
||||
]
|
||||
},
|
||||
],
|
||||
}],
|
||||
['proguard_preprocess == 1', {
|
||||
'actions': [
|
||||
{
|
||||
'action_name': 'proguard_<(_target_name)',
|
||||
'message': 'Proguard preprocessing <(_target_name) jar',
|
||||
'inputs': [
|
||||
'<(android_sdk_root)/tools/proguard/bin/proguard.sh',
|
||||
'<(DEPTH)/build/android/gyp/util/build_utils.py',
|
||||
'<(DEPTH)/build/android/gyp/proguard.py',
|
||||
'<(javac_jar_path)',
|
||||
'<(proguard_config)',
|
||||
],
|
||||
'outputs': [
|
||||
'<(jar_path)',
|
||||
],
|
||||
'action': [
|
||||
'python', '<(DEPTH)/build/android/gyp/proguard.py',
|
||||
'--proguard-path=<(android_sdk_root)/tools/proguard/bin/proguard.sh',
|
||||
'--input-path=<(javac_jar_path)',
|
||||
'--output-path=<(jar_path)',
|
||||
'--proguard-config=<(proguard_config)',
|
||||
'--classpath=<(android_sdk_jar) >(input_jars_paths)',
|
||||
]
|
||||
},
|
||||
],
|
||||
}],
|
||||
],
|
||||
'actions': [
|
||||
{
|
||||
'action_name': 'javac_<(_target_name)',
|
||||
'message': 'Compiling <(_target_name) java sources',
|
||||
'variables': {
|
||||
'all_src_dirs': [
|
||||
'>(java_in_dir)/src',
|
||||
'>@(additional_src_dirs)',
|
||||
'>@(generated_src_dirs)',
|
||||
],
|
||||
},
|
||||
'inputs': [
|
||||
'<(DEPTH)/build/android/gyp/util/build_utils.py',
|
||||
'<(DEPTH)/build/android/gyp/javac.py',
|
||||
'>!@(find >(java_in_dir)/src >(additional_src_dirs) -name "*.java")',
|
||||
'>@(input_jars_paths)',
|
||||
'>@(additional_input_paths)',
|
||||
],
|
||||
'outputs': [
|
||||
'<(compile_stamp)',
|
||||
],
|
||||
'action': [
|
||||
'python', '<(DEPTH)/build/android/gyp/javac.py',
|
||||
'--output-dir=<(classes_dir)',
|
||||
'--classpath=>(input_jars_paths)',
|
||||
'--src-dirs=>(all_src_dirs)',
|
||||
'--javac-includes=<(javac_includes)',
|
||||
'--chromium-code=<(chromium_code)',
|
||||
'--stamp=<(compile_stamp)',
|
||||
|
||||
# TODO(newt): remove this once http://crbug.com/177552 is fixed in ninja.
|
||||
'--ignore=>!(echo \'>(_inputs)\' | md5sum)',
|
||||
]
|
||||
},
|
||||
{
|
||||
'variables': {
|
||||
'src_dirs': [
|
||||
'<(java_in_dir)/src',
|
||||
'>@(additional_src_dirs)',
|
||||
],
|
||||
'stamp_path': '<(lint_stamp)',
|
||||
'result_path': '<(lint_result)',
|
||||
'config_path': '<(lint_config)',
|
||||
},
|
||||
'inputs': [
|
||||
'<(compile_stamp)',
|
||||
],
|
||||
'outputs': [
|
||||
'<(lint_stamp)',
|
||||
],
|
||||
'includes': [ 'android/lint_action.gypi' ],
|
||||
},
|
||||
{
|
||||
'action_name': 'jar_<(_target_name)',
|
||||
'message': 'Creating <(_target_name) jar',
|
||||
'inputs': [
|
||||
'<(DEPTH)/build/android/gyp/util/build_utils.py',
|
||||
'<(DEPTH)/build/android/gyp/util/md5_check.py',
|
||||
'<(DEPTH)/build/android/gyp/jar.py',
|
||||
'<(compile_stamp)',
|
||||
],
|
||||
'outputs': [
|
||||
'<(javac_jar_path)',
|
||||
],
|
||||
'action': [
|
||||
'python', '<(DEPTH)/build/android/gyp/jar.py',
|
||||
'--classes-dir=<(classes_dir)',
|
||||
'--jar-path=<(javac_jar_path)',
|
||||
'--excluded-classes=<(jar_excluded_classes)',
|
||||
]
|
||||
},
|
||||
{
|
||||
'action_name': 'instr_jar_<(_target_name)',
|
||||
'message': 'Instrumenting <(_target_name) jar',
|
||||
'variables': {
|
||||
'input_path': '<(jar_path)',
|
||||
'output_path': '<(jar_final_path)',
|
||||
'stamp_path': '<(instr_stamp)',
|
||||
'instr_type': 'jar',
|
||||
},
|
||||
'outputs': [
|
||||
'<(jar_final_path)',
|
||||
],
|
||||
'inputs': [
|
||||
'<(jar_path)',
|
||||
],
|
||||
'includes': [ 'android/instr_action.gypi' ],
|
||||
},
|
||||
{
|
||||
'action_name': 'jar_toc_<(_target_name)',
|
||||
'message': 'Creating <(_target_name) jar.TOC',
|
||||
'inputs': [
|
||||
'<(DEPTH)/build/android/gyp/util/build_utils.py',
|
||||
'<(DEPTH)/build/android/gyp/util/md5_check.py',
|
||||
'<(DEPTH)/build/android/gyp/jar_toc.py',
|
||||
'<(jar_final_path)',
|
||||
],
|
||||
'outputs': [
|
||||
'<(jar_final_path).TOC',
|
||||
],
|
||||
'action': [
|
||||
'python', '<(DEPTH)/build/android/gyp/jar_toc.py',
|
||||
'--jar-path=<(jar_final_path)',
|
||||
'--toc-path=<(jar_final_path).TOC',
|
||||
]
|
||||
},
|
||||
{
|
||||
'action_name': 'dex_<(_target_name)',
|
||||
'variables': {
|
||||
'conditions': [
|
||||
['emma_instrument != 0', {
|
||||
'dex_no_locals': 1,
|
||||
}],
|
||||
],
|
||||
'dex_input_paths': [ '<(jar_final_path)' ],
|
||||
'output_path': '<(dex_path)',
|
||||
},
|
||||
'includes': [ 'android/dex_action.gypi' ],
|
||||
},
|
||||
],
|
||||
}
|
||||
@@ -1,114 +0,0 @@
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
|
||||
def memoize(default=None):
|
||||
"""This decorator caches the return value of a parameterless pure function"""
|
||||
def memoizer(func):
|
||||
val = []
|
||||
@functools.wraps(func)
|
||||
def inner():
|
||||
if not val:
|
||||
ret = func()
|
||||
val.append(ret if ret is not None else default)
|
||||
if logging.getLogger().isEnabledFor(logging.INFO):
|
||||
print '%s -> %r' % (func.__name__, val[0])
|
||||
return val[0]
|
||||
return inner
|
||||
return memoizer
|
||||
|
||||
|
||||
@memoize()
|
||||
def IsWindows():
|
||||
return sys.platform in ['win32', 'cygwin']
|
||||
|
||||
|
||||
@memoize()
|
||||
def IsLinux():
|
||||
return sys.platform.startswith(('linux', 'freebsd'))
|
||||
|
||||
|
||||
@memoize()
|
||||
def IsMac():
|
||||
return sys.platform == 'darwin'
|
||||
|
||||
|
||||
@memoize()
|
||||
def gyp_defines():
|
||||
"""Parses and returns GYP_DEFINES env var as a dictionary."""
|
||||
return dict(arg.split('=', 1)
|
||||
for arg in shlex.split(os.environ.get('GYP_DEFINES', '')))
|
||||
|
||||
@memoize()
|
||||
def gyp_msvs_version():
|
||||
return os.environ.get('GYP_MSVS_VERSION', '')
|
||||
|
||||
@memoize()
|
||||
def distributor():
|
||||
"""
|
||||
Returns a string which is the distributed build engine in use (if any).
|
||||
Possible values: 'goma', 'ib', ''
|
||||
"""
|
||||
if 'goma' in gyp_defines():
|
||||
return 'goma'
|
||||
elif IsWindows():
|
||||
if 'CHROME_HEADLESS' in os.environ:
|
||||
return 'ib' # use (win and !goma and headless) as approximation of ib
|
||||
|
||||
|
||||
@memoize()
|
||||
def platform():
|
||||
"""
|
||||
Returns a string representing the platform this build is targetted for.
|
||||
Possible values: 'win', 'mac', 'linux', 'ios', 'android'
|
||||
"""
|
||||
if 'OS' in gyp_defines():
|
||||
if 'android' in gyp_defines()['OS']:
|
||||
return 'android'
|
||||
else:
|
||||
return gyp_defines()['OS']
|
||||
elif IsWindows():
|
||||
return 'win'
|
||||
elif IsLinux():
|
||||
return 'linux'
|
||||
else:
|
||||
return 'mac'
|
||||
|
||||
|
||||
@memoize()
|
||||
def builder():
|
||||
"""
|
||||
Returns a string representing the build engine (not compiler) to use.
|
||||
Possible values: 'make', 'ninja', 'xcode', 'msvs', 'scons'
|
||||
"""
|
||||
if 'GYP_GENERATORS' in os.environ:
|
||||
# for simplicity, only support the first explicit generator
|
||||
generator = os.environ['GYP_GENERATORS'].split(',')[0]
|
||||
if generator.endswith('-android'):
|
||||
return generator.split('-')[0]
|
||||
elif generator.endswith('-ninja'):
|
||||
return 'ninja'
|
||||
else:
|
||||
return generator
|
||||
else:
|
||||
if platform() == 'android':
|
||||
# Good enough for now? Do any android bots use make?
|
||||
return 'ninja'
|
||||
elif platform() == 'ios':
|
||||
return 'xcode'
|
||||
elif IsWindows():
|
||||
return 'ninja'
|
||||
elif IsLinux():
|
||||
return 'ninja'
|
||||
elif IsMac():
|
||||
return 'ninja'
|
||||
else:
|
||||
assert False, 'Don\'t know what builder we\'re using!'
|
||||
@@ -1,132 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
This script runs every build as a hook. If it detects that the build should
|
||||
be clobbered, it will touch the file <build_dir>/.landmine_triggered. The
|
||||
various build scripts will then check for the presence of this file and clobber
|
||||
accordingly. The script will also emit the reasons for the clobber to stdout.
|
||||
|
||||
A landmine is tripped when a builder checks out a different revision, and the
|
||||
diff between the new landmines and the old ones is non-null. At this point, the
|
||||
build is clobbered.
|
||||
"""
|
||||
|
||||
import difflib
|
||||
import logging
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
import landmine_utils
|
||||
|
||||
|
||||
SRC_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
|
||||
|
||||
def get_target_build_dir(build_tool, target, is_iphone=False):
|
||||
"""
|
||||
Returns output directory absolute path dependent on build and targets.
|
||||
Examples:
|
||||
r'c:\b\build\slave\win\build\src\out\Release'
|
||||
'/mnt/data/b/build/slave/linux/build/src/out/Debug'
|
||||
'/b/build/slave/ios_rel_device/build/src/xcodebuild/Release-iphoneos'
|
||||
|
||||
Keep this function in sync with tools/build/scripts/slave/compile.py
|
||||
"""
|
||||
ret = None
|
||||
if build_tool == 'xcode':
|
||||
ret = os.path.join(SRC_DIR, 'xcodebuild',
|
||||
target + ('-iphoneos' if is_iphone else ''))
|
||||
elif build_tool in ['make', 'ninja', 'ninja-ios']: # TODO: Remove ninja-ios.
|
||||
ret = os.path.join(SRC_DIR, 'out', target)
|
||||
elif build_tool in ['msvs', 'vs', 'ib']:
|
||||
ret = os.path.join(SRC_DIR, 'build', target)
|
||||
else:
|
||||
raise NotImplementedError('Unexpected GYP_GENERATORS (%s)' % build_tool)
|
||||
return os.path.abspath(ret)
|
||||
|
||||
|
||||
def set_up_landmines(target, new_landmines):
|
||||
"""Does the work of setting, planting, and triggering landmines."""
|
||||
out_dir = get_target_build_dir(landmine_utils.builder(), target,
|
||||
landmine_utils.platform() == 'ios')
|
||||
|
||||
landmines_path = os.path.join(out_dir, '.landmines')
|
||||
if not os.path.exists(out_dir):
|
||||
os.makedirs(out_dir)
|
||||
|
||||
if not os.path.exists(landmines_path):
|
||||
with open(landmines_path, 'w') as f:
|
||||
f.writelines(new_landmines)
|
||||
else:
|
||||
triggered = os.path.join(out_dir, '.landmines_triggered')
|
||||
with open(landmines_path, 'r') as f:
|
||||
old_landmines = f.readlines()
|
||||
if old_landmines != new_landmines:
|
||||
old_date = time.ctime(os.stat(landmines_path).st_ctime)
|
||||
diff = difflib.unified_diff(old_landmines, new_landmines,
|
||||
fromfile='old_landmines', tofile='new_landmines',
|
||||
fromfiledate=old_date, tofiledate=time.ctime(), n=0)
|
||||
|
||||
with open(triggered, 'w') as f:
|
||||
f.writelines(diff)
|
||||
elif os.path.exists(triggered):
|
||||
# Remove false triggered landmines.
|
||||
os.remove(triggered)
|
||||
|
||||
|
||||
def process_options():
|
||||
"""Returns a list of landmine emitting scripts."""
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option(
|
||||
'-s', '--landmine-scripts', action='append',
|
||||
default=[os.path.join(SRC_DIR, 'build', 'get_landmines.py')],
|
||||
help='Path to the script which emits landmines to stdout. The target '
|
||||
'is passed to this script via option -t. Note that an extra '
|
||||
'script can be specified via an env var EXTRA_LANDMINES_SCRIPT.')
|
||||
parser.add_option('-v', '--verbose', action='store_true',
|
||||
default=('LANDMINES_VERBOSE' in os.environ),
|
||||
help=('Emit some extra debugging information (default off). This option '
|
||||
'is also enabled by the presence of a LANDMINES_VERBOSE environment '
|
||||
'variable.'))
|
||||
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if args:
|
||||
parser.error('Unknown arguments %s' % args)
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG if options.verbose else logging.ERROR)
|
||||
|
||||
extra_script = os.environ.get('EXTRA_LANDMINES_SCRIPT')
|
||||
if extra_script:
|
||||
return options.landmine_scripts + [extra_script]
|
||||
else:
|
||||
return options.landmine_scripts
|
||||
|
||||
|
||||
def main():
|
||||
landmine_scripts = process_options()
|
||||
|
||||
if landmine_utils.builder() == 'dump_dependency_json':
|
||||
return 0
|
||||
|
||||
for target in ('Debug', 'Release', 'Debug_x64', 'Release_x64'):
|
||||
landmines = []
|
||||
for s in landmine_scripts:
|
||||
proc = subprocess.Popen([sys.executable, s, '-t', target],
|
||||
stdout=subprocess.PIPE)
|
||||
output, _ = proc.communicate()
|
||||
landmines.extend([('%s\n' % l.strip()) for l in output.splitlines()])
|
||||
set_up_landmines(target, landmines)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@@ -1,55 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright 2013 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Author: morlovich@google.com (Maksim Orlovich)
|
||||
#
|
||||
# Determine last git revision containing an actual change on a given branch
|
||||
# Usage: lastchange.sh gitpath [-d default_file] [-o out_file]
|
||||
set -e
|
||||
set -u
|
||||
|
||||
SVN_PATH=$1
|
||||
shift 1
|
||||
DEFAULT_FILE=
|
||||
OUT_FILE=/dev/stdout
|
||||
|
||||
while [ $# -ge 2 ]; do
|
||||
case $1 in
|
||||
-d)
|
||||
# -d has no effect if file doesn't exist.
|
||||
if [ -f $2 ]; then
|
||||
DEFAULT_FILE=$2
|
||||
fi
|
||||
shift 2
|
||||
;;
|
||||
-o)
|
||||
OUT_FILE=$2
|
||||
shift 2
|
||||
;;
|
||||
*)
|
||||
echo "Usage: lastchange.sh gitpath [-d default_file] [-o out_file]"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -z $DEFAULT_FILE ]; then
|
||||
KEY='Last Changed Rev: '
|
||||
REVISION=$(git rev-list --all --count)
|
||||
echo LASTCHANGE=$REVISION > $OUT_FILE
|
||||
else
|
||||
cat $DEFAULT_FILE > $OUT_FILE
|
||||
fi
|
||||
@@ -1,25 +0,0 @@
|
||||
# Copyright 2013 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Author: morlovich@google.com (Maksim Orlovich)
|
||||
#
|
||||
# This simply forwards to the Chromium's lastchange.py script, but runs it
|
||||
# from the mod_pagespeed repo so it gets the mod_pagespeed revision and not
|
||||
# the chromium one.
|
||||
import sys
|
||||
sys.path.append('util')
|
||||
from lastchange import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@@ -1,170 +0,0 @@
|
||||
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
{
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'libwebp_dec',
|
||||
'type': 'static_library',
|
||||
'dependencies' : [
|
||||
'libwebp_dsp',
|
||||
'libwebp_dsp_neon',
|
||||
'libwebp_mux',
|
||||
'libwebp_utils',
|
||||
],
|
||||
'sources': [
|
||||
'<(DEPTH)/third_party/libwebp/src/dec/alpha.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dec/buffer.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dec/frame.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dec/idec.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dec/io.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dec/quant.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dec/tree.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dec/vp8.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dec/vp8l.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dec/webp.c',
|
||||
],
|
||||
'direct_dependent_settings': {
|
||||
'include_dirs': [
|
||||
'<(DEPTH)/third_party/libwebp/src/webp'
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
'target_name': 'libwebp_dsp',
|
||||
'type': 'static_library',
|
||||
'sources': [
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/alpha_processing.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/alpha_processing_mips_dsp_r2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/alpha_processing_sse2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/alpha_processing_sse41.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/argb.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/argb_mips_dsp_r2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/argb_sse2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/cost.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/cost_mips32.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/cost_mips_dsp_r2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/cost_sse2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/cpu.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/dec.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/dec_clip_tables.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/dec_mips32.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/dec_mips_dsp_r2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/dec_sse2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/dec_sse41.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/enc.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/enc_avx2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/enc_mips32.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/enc_mips_dsp_r2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/enc_sse2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/enc_sse41.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/filters.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/filters_mips_dsp_r2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/filters_sse2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/lossless.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/lossless_enc.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/lossless_enc_mips32.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/lossless_enc_mips_dsp_r2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/lossless_enc_sse2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/lossless_enc_sse41.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/lossless_mips_dsp_r2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/lossless_sse2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/rescaler.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/rescaler_mips32.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/rescaler_mips_dsp_r2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/rescaler_sse2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/upsampling.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/upsampling_mips_dsp_r2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/upsampling_sse2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/yuv.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/yuv_mips32.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/yuv_mips_dsp_r2.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/yuv_sse2.c',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'libwebp_dsp_neon',
|
||||
'type': 'static_library',
|
||||
'sources': [
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/dec_neon.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/enc_neon.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/lossless_enc_neon.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/lossless_neon.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/rescaler_neon.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/dsp/upsampling_neon.c',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'libwebp_enc',
|
||||
'type': 'static_library',
|
||||
# note these dependencies are shared with libwebp_dec, if they are merged
|
||||
# into each lib causing duplicate symbol issues when both are used then
|
||||
# the deps could be split to enc/dec parts or a combined libwebp target
|
||||
# could be added similar to chrome.
|
||||
'dependencies' : [
|
||||
'libwebp_dsp',
|
||||
'libwebp_dsp_neon',
|
||||
'libwebp_mux',
|
||||
'libwebp_utils',
|
||||
],
|
||||
'sources': [
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/alpha.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/analysis.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/backward_references.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/config.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/cost.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/delta_palettization.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/filter.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/frame.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/histogram.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/iterator.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/near_lossless.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/picture.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/picture_csp.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/picture_psnr.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/picture_rescale.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/picture_tools.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/quant.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/syntax.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/token.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/tree.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/vp8l.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/enc/webpenc.c',
|
||||
],
|
||||
'direct_dependent_settings': {
|
||||
'include_dirs': [
|
||||
'<(DEPTH)/third_party/libwebp/src/webp'
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
'target_name': 'libwebp_utils',
|
||||
'type': 'static_library',
|
||||
'sources': [
|
||||
'<(DEPTH)/third_party/libwebp/src/utils/bit_reader.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/utils/bit_writer.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/utils/color_cache.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/utils/filters.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/utils/huffman.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/utils/huffman_encode.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/utils/quant_levels.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/utils/quant_levels_dec.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/utils/random.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/utils/rescaler.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/utils/thread.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/utils/utils.c',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'libwebp_mux',
|
||||
'type': 'static_library',
|
||||
'sources': [
|
||||
'<(DEPTH)/third_party/libwebp/src/mux/anim_encode.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/mux/muxedit.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/mux/muxinternal.c',
|
||||
'<(DEPTH)/third_party/libwebp/src/mux/muxread.c',
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Outputs host CPU architecture in format recognized by gyp."""
|
||||
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
host_arch = platform.machine()
|
||||
|
||||
# Convert machine type to format recognized by gyp.
|
||||
if re.match(r'i.86', host_arch) or host_arch == 'i86pc':
|
||||
host_arch = 'ia32'
|
||||
elif host_arch in ['x86_64', 'amd64']:
|
||||
host_arch = 'x64'
|
||||
elif host_arch.startswith('arm'):
|
||||
host_arch = 'arm'
|
||||
|
||||
# platform.machine is based on running kernel. It's possible to use 64-bit
|
||||
# kernel with 32-bit userland, e.g. to give linker slightly more memory.
|
||||
# Distinguish between different userland bitness by querying
|
||||
# the python binary.
|
||||
if host_arch == 'x64' and platform.architecture()[0] == '32bit':
|
||||
host_arch = 'ia32'
|
||||
|
||||
print host_arch
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@@ -1,33 +0,0 @@
|
||||
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
{
|
||||
'type': 'none',
|
||||
'dependencies': [
|
||||
'all.gyp:All',
|
||||
'linux_installer_configs',
|
||||
],
|
||||
'actions': [
|
||||
{
|
||||
'action_name': 'linux_package_deb_<(channel)_action',
|
||||
'process_outputs_as_sources': 1,
|
||||
'inputs': [
|
||||
'<(deb_build)',
|
||||
'<@(packaging_files_binaries)',
|
||||
'<@(packaging_files_common)',
|
||||
'<@(packaging_files_deb)',
|
||||
],
|
||||
'outputs': [
|
||||
'<(PRODUCT_DIR)/mod-pagespeed-<(channel)-<(version)-r<(revision)_<(deb_arch).deb',
|
||||
],
|
||||
'action': [ '<@(deb_cmd)', '-c', '<(channel)', ],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
# Local Variables:
|
||||
# tab-width:2
|
||||
# indent-tabs-mode:nil
|
||||
# End:
|
||||
# vim: set expandtab tabstop=2 shiftwidth=2:
|
||||
@@ -1,34 +0,0 @@
|
||||
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
{
|
||||
'type': 'none',
|
||||
'dependencies': [
|
||||
'all.gyp:All',
|
||||
'linux_installer_configs',
|
||||
],
|
||||
'actions': [
|
||||
{
|
||||
'action_name': 'linux_package_deb_<(channel)_action',
|
||||
'process_outputs_as_sources': 1,
|
||||
'inputs': [
|
||||
'<(rpm_build)',
|
||||
'<(PRODUCT_DIR)/install/rpm/mod-pagespeed.spec.template',
|
||||
'<@(packaging_files_binaries)',
|
||||
'<@(packaging_files_common)',
|
||||
'<@(packaging_files_rpm)',
|
||||
],
|
||||
'outputs': [
|
||||
'<(PRODUCT_DIR)/mod-pagespeed-<(channel)-<(version)-r<(revision).<(rpm_arch).rpm',
|
||||
],
|
||||
'action': [ '<@(rpm_cmd)', '-c', '<(channel)', ],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
# Local Variables:
|
||||
# tab-width:2
|
||||
# indent-tabs-mode:nil
|
||||
# End:
|
||||
# vim: set expandtab tabstop=2 shiftwidth=2:
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
/* Make sure we don't export anything unneeded */
|
||||
global: pagespeed_module;
|
||||
local: *;
|
||||
};
|
||||
@@ -1,17 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<VisualStudioToolFile
|
||||
Name="Output DLL copy"
|
||||
Version="8.00"
|
||||
>
|
||||
<Rules>
|
||||
<CustomBuildRule
|
||||
Name="Output DLL copy"
|
||||
CommandLine="xcopy /R /C /Y $(InputPath) $(OutDir)"
|
||||
Outputs="$(OutDir)\$(InputFileName)"
|
||||
FileExtensions="*.dll"
|
||||
>
|
||||
<Properties>
|
||||
</Properties>
|
||||
</CustomBuildRule>
|
||||
</Rules>
|
||||
</VisualStudioToolFile>
|
||||
@@ -1,139 +0,0 @@
|
||||
# Copyright 2013 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# PageSpeed overrides for Chromium's common.gypi.
|
||||
{
|
||||
'variables': {
|
||||
# Putting a variables dict inside another variables dict looks
|
||||
# kind of weird. This is done so that some variables are defined
|
||||
# as variables within the outer variables dict here. This is
|
||||
# necessary to get these variables defined for the conditions
|
||||
# within this variables dict that operate on these variables.
|
||||
'variables': {
|
||||
# Whether or not we are building for native client.
|
||||
'build_nacl%': 0,
|
||||
},
|
||||
|
||||
# Copy conditionally-set variables out one scope.
|
||||
'build_nacl%': '<(build_nacl)',
|
||||
|
||||
# Conditions that operate on our variables defined above.
|
||||
'conditions': [
|
||||
['build_nacl==1', {
|
||||
# Disable position-independent code when building under Native
|
||||
# Client.
|
||||
'linux_fpic': 0,
|
||||
}],
|
||||
],
|
||||
|
||||
|
||||
# Override a few Chromium variables:
|
||||
|
||||
# Chromium uses system shared libraries on Linux by default
|
||||
# (Chromium already has transitive dependencies on these libraries
|
||||
# via gtk). We want to link these libraries into our binaries so
|
||||
# we change the default behavior.
|
||||
'use_system_libjpeg': 0,
|
||||
'use_system_libpng': 0,
|
||||
'use_system_zlib': 0,
|
||||
|
||||
# We don't use google API keys in the PageSpeed build, so disable them.
|
||||
'use_official_google_api_keys': 0,
|
||||
|
||||
# Disable the chromium linting plugins since our code doesn't
|
||||
# (yet) meet their requirements.
|
||||
'clang_use_chrome_plugins': 0,
|
||||
|
||||
# Disable use of special ld gold flags, since it isn't installed
|
||||
# by default.
|
||||
'linux_use_gold_binary': 0,
|
||||
'linux_use_gold_flags': 0,
|
||||
},
|
||||
'target_defaults': {
|
||||
# Make sure our shadow view of chromium source is available to
|
||||
# targets that don't explicitly declare their dependencies and
|
||||
# assume chromium source headers are available from the root
|
||||
# (third_party/modp_b64 is one such target).
|
||||
'include_dirs': [
|
||||
'<(DEPTH)/third_party/chromium/src',
|
||||
],
|
||||
|
||||
# ABI-incompatible changes are trouble when you have a library, so turn off
|
||||
# _GLIBCXX_DEBUG --- it makes various STL objects have different types and
|
||||
# sizes.
|
||||
'defines!': [
|
||||
'_GLIBCXX_DEBUG=1'
|
||||
],
|
||||
},
|
||||
'conditions': [
|
||||
['build_nacl==1', {
|
||||
'target_defaults': {
|
||||
'defines': [
|
||||
# NaCL newlib's libpthread.a provides the
|
||||
# GetRunningOnValgrind symbol already, so we should not
|
||||
# provide it.
|
||||
'DYNAMIC_ANNOTATIONS_PROVIDE_RUNNING_ON_VALGRIND=0',
|
||||
],
|
||||
'include_dirs': [
|
||||
'<(DEPTH)/build/nacl_header_stubs',
|
||||
],
|
||||
},
|
||||
}],
|
||||
['os_posix==1 and OS!="mac"', {
|
||||
'target_defaults': {
|
||||
'ldflags': [
|
||||
# Fail to link if there are any undefined symbols.
|
||||
'-Wl,-z,defs',
|
||||
],
|
||||
}
|
||||
}],
|
||||
['OS=="mac"', {
|
||||
'target_defaults': {
|
||||
'xcode_settings': {
|
||||
'conditions': [
|
||||
['clang==1', {
|
||||
# Chromium's common.gypi does not currently scope the
|
||||
# clang binary paths relative to DEPTH, so we must
|
||||
# override the paths here.
|
||||
'CC': '$(SOURCE_ROOT)/<(DEPTH)/third_party/llvm-build/Release+Asserts/bin/clang',
|
||||
'LDPLUSPLUS': '$(SOURCE_ROOT)/<(DEPTH)/third_party/llvm-build/Release+Asserts/bin/clang++',
|
||||
}],
|
||||
]
|
||||
},
|
||||
},
|
||||
}],
|
||||
['OS=="win"', {
|
||||
'target_defaults': {
|
||||
# Remove the following defines, which are normally defined by
|
||||
# Chromium's common.gypi.
|
||||
'defines!': [
|
||||
# Chromium's common.gypi disables tr1. We need it for tr1
|
||||
# regex so remove their define to disable it.
|
||||
'_HAS_TR1=0',
|
||||
|
||||
# Chromium disables exceptions in some environments, but our
|
||||
# use of tr1 regex requires exception support, so we have to
|
||||
# re-enable it here.
|
||||
'_HAS_EXCEPTIONS=0',
|
||||
],
|
||||
'msvs_settings': {
|
||||
'VCCLCompilerTool': {
|
||||
'ExceptionHandling': '1', # /EHsc
|
||||
},
|
||||
},
|
||||
},
|
||||
}]
|
||||
],
|
||||
}
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
'conditions': [
|
||||
# Handle build types.
|
||||
['buildtype=="Dev"', {
|
||||
'includes': ['internal/release_impl.gypi'],
|
||||
}],
|
||||
['buildtype=="Official"', {
|
||||
'includes': ['internal/release_impl_official.gypi'],
|
||||
}],
|
||||
# TODO(bradnelson): may also need:
|
||||
# checksenabled
|
||||
# coverage
|
||||
# dom_stats
|
||||
# pgo_instrument
|
||||
# pgo_optimize
|
||||
],
|
||||
}
|
||||
@@ -1,201 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
# (See http://src.chromium.org/viewvc/chrome/trunk/src/LICENSE)
|
||||
# This file itself is from
|
||||
# http://src.chromium.org/viewvc/chrome/trunk/src/build/util/ as of
|
||||
# revision r252481
|
||||
|
||||
"""
|
||||
version.py -- Chromium version string substitution utility.
|
||||
"""
|
||||
|
||||
import getopt
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
class Usage(Exception):
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
|
||||
|
||||
def fetch_values_from_file(values_dict, file_name):
|
||||
"""
|
||||
Fetches KEYWORD=VALUE settings from the specified file.
|
||||
|
||||
Everything to the left of the first '=' is the keyword,
|
||||
everything to the right is the value. No stripping of
|
||||
white space, so beware.
|
||||
|
||||
The file must exist, otherwise you get the Python exception from open().
|
||||
"""
|
||||
for line in open(file_name, 'r').readlines():
|
||||
key, val = line.rstrip('\r\n').split('=', 1)
|
||||
values_dict[key] = val
|
||||
|
||||
|
||||
def fetch_values(file_list):
|
||||
"""
|
||||
Returns a dictionary of values to be used for substitution, populating
|
||||
the dictionary with KEYWORD=VALUE settings from the files in 'file_list'.
|
||||
|
||||
Explicitly adds the following value from internal calculations:
|
||||
|
||||
OFFICIAL_BUILD
|
||||
"""
|
||||
CHROME_BUILD_TYPE = os.environ.get('CHROME_BUILD_TYPE')
|
||||
if CHROME_BUILD_TYPE == '_official':
|
||||
official_build = '1'
|
||||
else:
|
||||
official_build = '0'
|
||||
|
||||
values = dict(
|
||||
OFFICIAL_BUILD = official_build,
|
||||
)
|
||||
|
||||
for file_name in file_list:
|
||||
fetch_values_from_file(values, file_name)
|
||||
|
||||
return values
|
||||
|
||||
|
||||
def subst_template(contents, values):
|
||||
"""
|
||||
Returns the template with substituted values from the specified dictionary.
|
||||
|
||||
Keywords to be substituted are surrounded by '@': @KEYWORD@.
|
||||
|
||||
No attempt is made to avoid recursive substitution. The order
|
||||
of evaluation is random based on the order of the keywords returned
|
||||
by the Python dictionary. So do NOT substitute a value that
|
||||
contains any @KEYWORD@ strings expecting them to be recursively
|
||||
substituted, okay?
|
||||
"""
|
||||
for key, val in values.iteritems():
|
||||
try:
|
||||
contents = contents.replace('@' + key + '@', val)
|
||||
except TypeError:
|
||||
print repr(key), repr(val)
|
||||
return contents
|
||||
|
||||
|
||||
def subst_file(file_name, values):
|
||||
"""
|
||||
Returns the contents of the specified file_name with substited
|
||||
values from the specified dictionary.
|
||||
|
||||
This is like subst_template, except it operates on a file.
|
||||
"""
|
||||
template = open(file_name, 'r').read()
|
||||
return subst_template(template, values);
|
||||
|
||||
|
||||
def write_if_changed(file_name, contents):
|
||||
"""
|
||||
Writes the specified contents to the specified file_name
|
||||
iff the contents are different than the current contents.
|
||||
"""
|
||||
try:
|
||||
old_contents = open(file_name, 'r').read()
|
||||
except EnvironmentError:
|
||||
pass
|
||||
else:
|
||||
if contents == old_contents:
|
||||
return
|
||||
os.unlink(file_name)
|
||||
open(file_name, 'w').write(contents)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
short_options = 'e:f:i:o:t:h'
|
||||
long_options = ['eval=', 'file=', 'help']
|
||||
|
||||
helpstr = """\
|
||||
Usage: version.py [-h] [-f FILE] ([[-i] FILE] | -t TEMPLATE) [[-o] FILE]
|
||||
|
||||
-f FILE, --file=FILE Read variables from FILE.
|
||||
-i FILE, --input=FILE Read strings to substitute from FILE.
|
||||
-o FILE, --output=FILE Write substituted strings to FILE.
|
||||
-t TEMPLATE, --template=TEMPLATE Use TEMPLATE as the strings to substitute.
|
||||
-e VAR=VAL, --eval=VAR=VAL Evaluate VAL after reading variables. Can
|
||||
be used to synthesize variables. e.g.
|
||||
-e 'PATCH_HI=int(PATCH)/256'.
|
||||
-h, --help Print this help and exit.
|
||||
"""
|
||||
|
||||
evals = {}
|
||||
variable_files = []
|
||||
in_file = None
|
||||
out_file = None
|
||||
template = None
|
||||
|
||||
try:
|
||||
try:
|
||||
opts, args = getopt.getopt(argv[1:], short_options, long_options)
|
||||
except getopt.error, msg:
|
||||
raise Usage(msg)
|
||||
for o, a in opts:
|
||||
if o in ('-e', '--eval'):
|
||||
try:
|
||||
evals.update(dict([a.split('=',1)]))
|
||||
except ValueError:
|
||||
raise Usage("-e requires VAR=VAL")
|
||||
elif o in ('-f', '--file'):
|
||||
variable_files.append(a)
|
||||
elif o in ('-i', '--input'):
|
||||
in_file = a
|
||||
elif o in ('-o', '--output'):
|
||||
out_file = a
|
||||
elif o in ('-t', '--template'):
|
||||
template = a
|
||||
elif o in ('-h', '--help'):
|
||||
print helpstr
|
||||
return 0
|
||||
while len(args) and (in_file is None or out_file is None or
|
||||
template is None):
|
||||
if in_file is None:
|
||||
in_file = args.pop(0)
|
||||
elif out_file is None:
|
||||
out_file = args.pop(0)
|
||||
if args:
|
||||
msg = 'Unexpected arguments: %r' % args
|
||||
raise Usage(msg)
|
||||
except Usage, err:
|
||||
sys.stderr.write(err.msg)
|
||||
sys.stderr.write('; Use -h to get help.\n')
|
||||
return 2
|
||||
|
||||
values = fetch_values(variable_files)
|
||||
for key, val in evals.iteritems():
|
||||
values[key] = str(eval(val, globals(), values))
|
||||
|
||||
if template is not None:
|
||||
contents = subst_template(template, values)
|
||||
elif in_file:
|
||||
contents = subst_file(in_file, values)
|
||||
else:
|
||||
# Generate a default set of version information.
|
||||
contents = """MAJOR=%(MAJOR)s
|
||||
MINOR=%(MINOR)s
|
||||
BUILD=%(BUILD)s
|
||||
PATCH=%(PATCH)s
|
||||
LASTCHANGE=%(LASTCHANGE)s
|
||||
OFFICIAL_BUILD=%(OFFICIAL_BUILD)s
|
||||
""" % values
|
||||
|
||||
|
||||
if out_file:
|
||||
write_if_changed(out_file, contents)
|
||||
else:
|
||||
print contents
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@@ -0,0 +1,216 @@
|
||||
# Copyright 2012 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Environment Variables (Optional):
|
||||
# MOD_PAGESPEED_DIR: absolute path to the mod_pagespeed/src directory
|
||||
# PSOL_BINARY: absolute path to pagespeed_automatic.a
|
||||
|
||||
mod_pagespeed_dir="${MOD_PAGESPEED_DIR:-unset}"
|
||||
if [ "$mod_pagespeed_dir" = "unset" ] ; then
|
||||
mod_pagespeed_dir="$ngx_addon_dir/psol/include"
|
||||
build_from_source=false
|
||||
|
||||
if [ ! -e "$mod_pagespeed_dir" ] ; then
|
||||
echo "ngx_pagespeed: pagespeed optimization library not found:"
|
||||
echo ""
|
||||
echo " You need to separately download the pagespeed library:"
|
||||
echo ""
|
||||
echo " $ cd /path/to/ngx_pagespeed"
|
||||
echo " $ wget https://dl.google.com/dl/page-speed/psol/1.7.30.3.tar.gz"
|
||||
echo " $ tar -xzvf 1.7.30.3.tar.gz # expands to psol/"
|
||||
echo ""
|
||||
echo " Or see the installation instructions:"
|
||||
echo " https://github.com/pagespeed/ngx_pagespeed#how-to-build"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
else
|
||||
build_from_source=true
|
||||
fi
|
||||
|
||||
echo "mod_pagespeed_dir=$mod_pagespeed_dir"
|
||||
echo "build_from_source=$build_from_source"
|
||||
|
||||
ngx_feature="psol"
|
||||
ngx_feature_name=""
|
||||
ngx_feature_run=no
|
||||
ngx_feature_incs="
|
||||
#include \"net/instaweb/htmlparse/public/html_parse.h\"
|
||||
#include \"net/instaweb/htmlparse/public/html_writer_filter.h\"
|
||||
#include \"net/instaweb/util/public/string.h\"
|
||||
#include \"net/instaweb/util/public/string_writer.h\"
|
||||
#include \"net/instaweb/util/public/null_message_handler.h\"
|
||||
"
|
||||
|
||||
os_name='unknown_os'
|
||||
arch_name='unknown_arch'
|
||||
uname_os=`uname`
|
||||
uname_arch=`uname -m`
|
||||
|
||||
if [ $uname_os = 'Linux' ]; then
|
||||
os_name='linux'
|
||||
elif [ $uname_os = 'Darwin' ]; then
|
||||
os_name='mac'
|
||||
else
|
||||
echo "OS not supported: $uname_os"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $uname_arch = 'x86_64' -o $uname_arch = 'amd64' ]; then
|
||||
arch_name='x64'
|
||||
elif [ $uname_arch = 'x86_32' -o $uname_arch = 'i686' \
|
||||
-o $uname_arch = 'i386' ]; then
|
||||
arch_name='ia32'
|
||||
else
|
||||
echo "Architecture not supported: $uname_arch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$NGX_DEBUG" = "YES" ]; then
|
||||
buildtype=Debug
|
||||
else
|
||||
buildtype=Release
|
||||
fi
|
||||
|
||||
# The compiler needs to know that __sync_add_and_fetch_4 is ok,
|
||||
# and this requires an instruction that didn't exist on i586 or i386.
|
||||
if [ "$uname_arch" = "i686" ]; then
|
||||
FLAG_MARCH='-march=i686'
|
||||
fi
|
||||
|
||||
# Building with HTTPS fetching enabled pulls in a version of OpenSSL that causes
|
||||
# linker errors, so disable it here.
|
||||
CFLAGS="$CFLAGS -DSERF_HTTPS_FETCHING=0 $FLAG_MARCH"
|
||||
|
||||
case "$NGX_GCC_VER" in
|
||||
4.8*)
|
||||
# On GCC 4.8 and above, -Wall enables -Wunused-local-typedefs. This breaks
|
||||
# on VerifySizesAreEqual in bit_cast in chromium/src/base/basictypes.h which
|
||||
# has a typedef that is intentionally unused.
|
||||
CFLAGS="$CFLAGS -Wno-unused-local-typedefs"
|
||||
|
||||
# On GCC 4.8 and above, we get the following compiler warning:
|
||||
# chromium/src/base/memory/scoped_ptr.h:133:7: warning: declaration of ‘class scoped_ptr<C>’ [enabled by default]
|
||||
# Based on discussion at http://gcc.gnu.org/bugzilla/show_bug.cgi?id=54055,
|
||||
# this is invalid code, but hasn't been fixed yet in chromium.
|
||||
# Unfortunately, there also does not appear to be a flag for just disabling
|
||||
# that warning, so we add Wno-error to override nginx's default -Werror
|
||||
# option.
|
||||
CFLAGS="$CFLAGS -Wno-error"
|
||||
;;
|
||||
esac
|
||||
|
||||
pagespeed_include="\
|
||||
$mod_pagespeed_dir \
|
||||
$mod_pagespeed_dir/third_party/chromium/src \
|
||||
$mod_pagespeed_dir/third_party/google-sparsehash/src \
|
||||
$mod_pagespeed_dir/third_party/google-sparsehash/gen/arch/$os_name/$arch_name/include \
|
||||
$mod_pagespeed_dir/third_party/protobuf/src \
|
||||
$mod_pagespeed_dir/third_party/re2/src \
|
||||
$mod_pagespeed_dir/out/$buildtype/obj/gen \
|
||||
$mod_pagespeed_dir/out/$buildtype/obj/gen/protoc_out/instaweb \
|
||||
$mod_pagespeed_dir/third_party/apr/src/include \
|
||||
$mod_pagespeed_dir/third_party/aprutil/src/include \
|
||||
$mod_pagespeed_dir/third_party/apr/gen/arch/$os_name/$arch_name/include \
|
||||
$mod_pagespeed_dir/third_party/aprutil/gen/arch/$os_name/$arch_name/include"
|
||||
ngx_feature_path="$pagespeed_include"
|
||||
|
||||
if $build_from_source ; then
|
||||
psol_library_binaries="\
|
||||
$mod_pagespeed_dir/net/instaweb/automatic/pagespeed_automatic.a \
|
||||
$mod_pagespeed_dir/out/$buildtype/obj.target/third_party/serf/libserf.a \
|
||||
$mod_pagespeed_dir/out/$buildtype/obj.target/third_party/aprutil/libaprutil.a \
|
||||
$mod_pagespeed_dir/out/$buildtype/obj.target/third_party/apr/libapr.a"
|
||||
else
|
||||
psol_library_dir="$ngx_addon_dir/psol/lib/$buildtype/$os_name/$arch_name"
|
||||
psol_library_binaries="\
|
||||
$psol_library_dir/pagespeed_automatic.a \
|
||||
$psol_library_dir/libserf.a \
|
||||
$psol_library_dir/libaprutil.a \
|
||||
$psol_library_dir/libapr.a"
|
||||
fi
|
||||
|
||||
pagespeed_libs="-lstdc++ $psol_library_binaries -lrt -pthread -lm"
|
||||
ngx_feature_libs="$pagespeed_libs"
|
||||
ngx_feature_test="
|
||||
GoogleString output_buffer;
|
||||
net_instaweb::StringWriter write_to_string(&output_buffer);
|
||||
|
||||
net_instaweb::NullMessageHandler handler;
|
||||
net_instaweb::HtmlParse html_parse(&handler);
|
||||
net_instaweb::HtmlWriterFilter html_writer_filter(&html_parse);
|
||||
|
||||
html_writer_filter.set_writer(&write_to_string);
|
||||
html_parse.AddFilter(&html_writer_filter);
|
||||
|
||||
html_parse.StartParse(\"http:example.com\");
|
||||
html_parse.ParseText(
|
||||
\"<html ><body ><h1 >Test</h1 ><p>Test Text</p></body></html>\n\");
|
||||
html_parse.FinishParse();
|
||||
|
||||
printf(\"parsed as: %s\", output_buffer.c_str())"
|
||||
|
||||
# Test whether we have pagespeed and can compile and link against it.
|
||||
. "$ngx_addon_dir/cpp_feature"
|
||||
|
||||
if [ $ngx_found = yes ]; then
|
||||
ps_src="$ngx_addon_dir/src"
|
||||
ngx_addon_name=ngx_pagespeed
|
||||
NGX_ADDON_DEPS="$NGX_ADDON_DEPS \
|
||||
$ps_src/log_message_handler.h \
|
||||
$ps_src/ngx_base_fetch.h \
|
||||
$ps_src/ngx_caching_headers.h \
|
||||
$ps_src/ngx_fetch.h \
|
||||
$ps_src/ngx_list_iterator.h \
|
||||
$ps_src/ngx_message_handler.h \
|
||||
$ps_src/ngx_pagespeed.h \
|
||||
$ps_src/ngx_rewrite_driver_factory.h \
|
||||
$ps_src/ngx_rewrite_options.h \
|
||||
$ps_src/ngx_server_context.h \
|
||||
$ps_src/ngx_url_async_fetcher.h"
|
||||
NGX_ADDON_SRCS="$NGX_ADDON_SRCS \
|
||||
$ps_src/log_message_handler.cc \
|
||||
$ps_src/ngx_base_fetch.cc \
|
||||
$ps_src/ngx_caching_headers.cc \
|
||||
$ps_src/ngx_fetch.cc \
|
||||
$ps_src/ngx_list_iterator.cc \
|
||||
$ps_src/ngx_message_handler.cc \
|
||||
$ps_src/ngx_pagespeed.cc \
|
||||
$ps_src/ngx_rewrite_driver_factory.cc \
|
||||
$ps_src/ngx_rewrite_options.cc \
|
||||
$ps_src/ngx_server_context.cc \
|
||||
$ps_src/ngx_url_async_fetcher.cc \
|
||||
$mod_pagespeed_dir/out/$buildtype/obj/gen/data2c_out/instaweb/net/instaweb/apache/install/mod_pagespeed_example/mod_pagespeed_console_out.cc \
|
||||
$mod_pagespeed_dir/out/$buildtype/obj/gen/data2c_out/instaweb/net/instaweb/apache/install/mod_pagespeed_example/mod_pagespeed_console_css_out.cc \
|
||||
$mod_pagespeed_dir/out/$buildtype/obj/gen/data2c_out/instaweb/net/instaweb/apache/install/mod_pagespeed_example/mod_pagespeed_console_html_out.cc \
|
||||
$mod_pagespeed_dir/net/instaweb/system/add_headers_fetcher.cc \
|
||||
$mod_pagespeed_dir/net/instaweb/system/loopback_route_fetcher.cc \
|
||||
$mod_pagespeed_dir/net/instaweb/system/serf_url_async_fetcher.cc"
|
||||
|
||||
# Make pagespeed run immediately before gzip.
|
||||
HTTP_FILTER_MODULES=$(echo $HTTP_FILTER_MODULES |\
|
||||
sed "s/$HTTP_GZIP_FILTER_MODULE/$HTTP_GZIP_FILTER_MODULE $ngx_addon_name/")
|
||||
# Make the etag header filter run immediately after gzip.
|
||||
HTTP_FILTER_MODULES=$(echo $HTTP_FILTER_MODULES |\
|
||||
sed "s/$HTTP_GZIP_FILTER_MODULE/ngx_pagespeed_etag_filter $HTTP_GZIP_FILTER_MODULE/")
|
||||
CORE_LIBS="$CORE_LIBS $pagespeed_libs"
|
||||
CORE_INCS="$CORE_INCS $pagespeed_include"
|
||||
else
|
||||
cat << END
|
||||
$0: error: module ngx_pagespeed requires the pagespeed optimization library
|
||||
END
|
||||
exit 1
|
||||
fi
|
||||
|
||||
have=NGX_PAGESPEED . auto/have
|
||||
+122
@@ -0,0 +1,122 @@
|
||||
# Copyright (C) Igor Sysoev
|
||||
# Copyright (C) Nginx, Inc.
|
||||
# 2012-10-01 Modified from auto/feature by jefftk to support c++ test files.
|
||||
|
||||
echo $ngx_n "checking for $ngx_feature ...$ngx_c"
|
||||
|
||||
cat << END >> $NGX_AUTOCONF_ERR
|
||||
|
||||
----------------------------------------
|
||||
checking for $ngx_feature
|
||||
|
||||
END
|
||||
|
||||
ngx_found=no
|
||||
|
||||
if test -n "$ngx_feature_name"; then
|
||||
ngx_have_feature=`echo $ngx_feature_name \
|
||||
| tr abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ`
|
||||
fi
|
||||
|
||||
if test -n "$ngx_feature_path"; then
|
||||
for ngx_temp in $ngx_feature_path; do
|
||||
ngx_feature_inc_path="$ngx_feature_inc_path -I $ngx_temp"
|
||||
done
|
||||
fi
|
||||
|
||||
cat << END > $NGX_AUTOTEST.cc
|
||||
|
||||
#include <sys/types.h>
|
||||
$NGX_INCLUDE_UNISTD_H
|
||||
$ngx_feature_incs
|
||||
|
||||
int main() {
|
||||
$ngx_feature_test;
|
||||
return 0;
|
||||
}
|
||||
|
||||
END
|
||||
|
||||
|
||||
ngx_test="$CC $CC_TEST_FLAGS $CC_AUX_FLAGS $ngx_feature_inc_path \
|
||||
-o $NGX_AUTOTEST $NGX_AUTOTEST.cc $NGX_TEST_LD_OPT $ngx_feature_libs"
|
||||
|
||||
ngx_feature_inc_path=
|
||||
|
||||
eval "/bin/sh -c \"$ngx_test\" >> $NGX_AUTOCONF_ERR 2>&1"
|
||||
|
||||
|
||||
if [ -x $NGX_AUTOTEST ]; then
|
||||
|
||||
case "$ngx_feature_run" in
|
||||
|
||||
yes)
|
||||
# /bin/sh is used to intercept "Killed" or "Abort trap" messages
|
||||
if /bin/sh -c $NGX_AUTOTEST >> $NGX_AUTOCONF_ERR 2>&1; then
|
||||
echo " found"
|
||||
ngx_found=yes
|
||||
|
||||
if test -n "$ngx_feature_name"; then
|
||||
have=$ngx_have_feature . auto/have
|
||||
fi
|
||||
|
||||
else
|
||||
echo " found but is not working"
|
||||
fi
|
||||
;;
|
||||
|
||||
value)
|
||||
# /bin/sh is used to intercept "Killed" or "Abort trap" messages
|
||||
if /bin/sh -c $NGX_AUTOTEST >> $NGX_AUTOCONF_ERR 2>&1; then
|
||||
echo " found"
|
||||
ngx_found=yes
|
||||
|
||||
cat << END >> $NGX_AUTO_CONFIG_H
|
||||
|
||||
#ifndef $ngx_feature_name
|
||||
#define $ngx_feature_name `$NGX_AUTOTEST`
|
||||
#endif
|
||||
|
||||
END
|
||||
else
|
||||
echo " found but is not working"
|
||||
fi
|
||||
;;
|
||||
|
||||
bug)
|
||||
# /bin/sh is used to intercept "Killed" or "Abort trap" messages
|
||||
if /bin/sh -c $NGX_AUTOTEST >> $NGX_AUTOCONF_ERR 2>&1; then
|
||||
echo " not found"
|
||||
|
||||
else
|
||||
echo " found"
|
||||
ngx_found=yes
|
||||
|
||||
if test -n "$ngx_feature_name"; then
|
||||
have=$ngx_have_feature . auto/have
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
|
||||
*)
|
||||
echo " found"
|
||||
ngx_found=yes
|
||||
|
||||
if test -n "$ngx_feature_name"; then
|
||||
have=$ngx_have_feature . auto/have
|
||||
fi
|
||||
;;
|
||||
|
||||
esac
|
||||
|
||||
else
|
||||
echo " not found"
|
||||
|
||||
echo "----------" >> $NGX_AUTOCONF_ERR
|
||||
cat $NGX_AUTOTEST.cc >> $NGX_AUTOCONF_ERR
|
||||
echo "----------" >> $NGX_AUTOCONF_ERR
|
||||
echo $ngx_test >> $NGX_AUTOCONF_ERR
|
||||
echo "----------" >> $NGX_AUTOCONF_ERR
|
||||
fi
|
||||
|
||||
rm $NGX_AUTOTEST*
|
||||
-702
@@ -1,702 +0,0 @@
|
||||
# This Makefile contains targets for building, testing, and debugging
|
||||
# mod_pagespeed during develoment. Useful targets:
|
||||
#
|
||||
# apache_debug
|
||||
# apache_release
|
||||
# - Build a development (debug), or release (optimized), version of
|
||||
# mod_pagespeed.
|
||||
#
|
||||
# apache_debug_psol
|
||||
# - Build a development version of PSOL, as '.a' file suitable for
|
||||
# ngx_pagespeed to link against.
|
||||
#
|
||||
# apache_test
|
||||
# - Run all unit tests. You can also run the tests in halves with:
|
||||
# - mod_pagespeed_test
|
||||
# - Run unit tests under test.gyp:mod_pagespeed_test
|
||||
# - pagespeed_automatic_test
|
||||
# - Run unit tests under test.gyp:pagespeed_automatic_test
|
||||
# - You can run a specific unit test by setting TEST=test_name.
|
||||
# For example:
|
||||
# TEST=SerfUrlAsyncFetcherTest.FetchOneURLWithGzip
|
||||
# You can also run a subset of tests, using wildcards:
|
||||
# TEST=SerfUrlAsyncFetcherTest.* make apache_test
|
||||
# What you give to TEST is passed to googletest with --gtest_filter, so for
|
||||
# more details see the doc there:
|
||||
# https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md#running-a-subset-of-the-tests
|
||||
#
|
||||
# apache_release_test
|
||||
# - Run all unit test with a release (non-debug) build.
|
||||
#
|
||||
# apache_root_test
|
||||
# - Currently broken test that builds install tarball, installs it as the root
|
||||
# apache on port 80 (requires sudo password) and runs most of our system
|
||||
# tests to make sure filters are working and rewritten resources are served.
|
||||
#
|
||||
# (This file also pulls in the contents of Makefile.tests, so some useful
|
||||
# targets are defined there instead. To run system tests, for example, you can
|
||||
# run apache_debug_smoke_test.)
|
||||
#
|
||||
# apache_install_conf
|
||||
# - Copy install/debug.conf.template to your development apache directory,
|
||||
# making substitutions for @@VARS@@ and, depending on how OPTIONS is set,
|
||||
# uncommenting some #TESTVAR lines.
|
||||
# - When running tests this target is run automatically, but you may need to
|
||||
# run it manually during debugging.
|
||||
#
|
||||
# apache_debug_install
|
||||
# apache_release_install
|
||||
# - Install the debug/release version of mod_pagespeed into the build of apache
|
||||
# that build_development_apache.sh created.
|
||||
# - This is another command that you probably only need when debugging
|
||||
# manually.
|
||||
#
|
||||
# apache_debug_start
|
||||
# apache_debug_stop
|
||||
# apache_debug_restart
|
||||
# - Start, stop, and restart the development apache instance. This also
|
||||
# includes a few other potentially useful maintenance tasks, like cleaning up
|
||||
# temp files and rotating logs.
|
||||
# - Unless you specify FAST_RESTART=1, apache_debug_restart runs
|
||||
# apache_debug_install to make sure you're running the module you built and
|
||||
# not an earlier version.
|
||||
#
|
||||
# apache_debug_slurp
|
||||
# - mod_pagespeed can be run in "slurp" mode, where it is configured as a
|
||||
# forward proxy that saves a copy of everything that runs through it. Later
|
||||
# on the slurp can be served in read-only mode, to serve the cached copy of
|
||||
# whatever was visited. This is intended for testing and debugging, and is
|
||||
# what the load test uses.
|
||||
#
|
||||
# apache_debug_leak_test
|
||||
# - Run the unit tests and system tests under valgrind, to check for memory
|
||||
# errors and leaks.
|
||||
#
|
||||
# update_gyp_manifest
|
||||
# - If you add or remove a .gyp or .gypi file, run this target so we know to
|
||||
# watch it for changes.
|
||||
#
|
||||
# submodule_update
|
||||
# - The mod_pagespeed git repo includes its dependencies as git submodules.
|
||||
# These dependencies are pinned to a specific git revision. If someone has
|
||||
# upgraded one, you need to run submodule_update to apply those changes.
|
||||
#
|
||||
# doxygen
|
||||
# - Builds html documentation for PSOL. It includes everything defined as
|
||||
# public in the .h files. You might ask: how do I know what things are safe
|
||||
# to depend on as consumer of PSOL, and which might change? This is a good
|
||||
# question with a bad answer: you can depend on anything, and anything might
|
||||
# change. We don't offer ABI or even API compatibility across versions, even
|
||||
# across point releases.
|
||||
#
|
||||
# This Makefile also includes various test targets that really should be in
|
||||
# Makefile.tests:
|
||||
# - apache_debug_proxy_test
|
||||
# - apache_debug_slurp_test
|
||||
# TODO(jefftk): move these into Makefile.tests
|
||||
#
|
||||
# It also includes other targets that are not indended to be run directly, and
|
||||
# are either here to support devel/ scripts or to support other targets.
|
||||
# somewhat redundant with the .gyp files, but helps bridge the gap and automate
|
||||
# the transition.
|
||||
#
|
||||
# You can use the CONF= parameter to change build settings or enable coverage
|
||||
# testing for debug builds:
|
||||
#
|
||||
# CONF=Debug
|
||||
# - default, shouldn't be needed
|
||||
# CONF=Coverage
|
||||
# - run the test coverage analysis scripts after tests complete
|
||||
# CONF=Release
|
||||
# - shouldn't be needed; use targets with "release" in their name instead.
|
||||
# CONF=OptDebug
|
||||
# - build optimized binaries with debug info (-O2 -g)
|
||||
|
||||
# We need gcc-mozilla/bin to pick up our special gcc 4.8 for old versions of
|
||||
# ubuntu. It's ok to have it in the path even if we're on a machine where that
|
||||
# directory doesn't exist; bash will just move on.
|
||||
# TODO(jefftk): source ubuntu/make_vars.mk here, then move this there.
|
||||
# TODO(jefftk): get this Makefile working on other platforms (centos, ubuntu16)
|
||||
PATH := /usr/lib/gcc-mozilla/bin:/usr/local/bin:/usr/bin:$(PATH)
|
||||
export PATH
|
||||
|
||||
GIT_SRC = $(realpath ${PWD}/..)
|
||||
DEVEL_DIR = $(GIT_SRC)/devel
|
||||
INSTALL_DATA_DIR = $(GIT_SRC)/install
|
||||
|
||||
# Where binaries go.
|
||||
GIT_RELEASE_BIN = $(GIT_SRC)/out/Release
|
||||
GIT_DEBUG_BIN = $(GIT_SRC)/out/Debug
|
||||
GIT_BIN = $(GIT_SRC)/out/$(BUILDTYPE)
|
||||
|
||||
# GYP_MANIFEST holds a cached copy of the gyp files in this client. When we
|
||||
# first create a new checkout we run update_gyp_manifest, and from then on we
|
||||
# use the cached copy when we need to know whether to rerun hooks.
|
||||
GYP_MANIFEST = $(GIT_SRC)/gyp_manifest.d
|
||||
.PHONY : update_gyp_manifest
|
||||
update_gyp_manifest :
|
||||
find $(GIT_SRC) -name '*.gyp' -o -name '*.gypi' | \
|
||||
sort > $(GYP_MANIFEST)
|
||||
|
||||
# It's ok for this variable to be empty, and it will be empty the first time we
|
||||
# run in a new checkout. When it's empty we won't know whether to run gyp
|
||||
# but on an initial checkout we run them regardless because we won't yet
|
||||
# have the hook stamp.
|
||||
GYP_FILES = $(shell [ -f $(GYP_MANIFEST) ] && cat $(GYP_MANIFEST) || echo "")
|
||||
|
||||
HOOKS_STAMP = $(GIT_SRC)/hooks.timestamp
|
||||
|
||||
ifeq ($(OBJDIR),)
|
||||
OBJDIR := $(shell mktemp -td instaweb.XXXXXX)
|
||||
endif
|
||||
export OBJDIR
|
||||
MAKE := $(MAKE) --no-print-directory OBJDIR=$(OBJDIR)
|
||||
|
||||
# Unset environment variables that can interfere with the make process.
|
||||
# TODO(vchudnov): Not all of these may be problematic. Narrow down
|
||||
# this list if possible.
|
||||
unexport CC CXX GYP_DEFINES GYP_GENERATORS http_proxy
|
||||
|
||||
# gcc-mozilla contains /usr/lib/gcc-mozilla/bin/gcc but not
|
||||
# /usr/lib/gcc-mozilla/bin/cc so if we want to use gcc-mozilla, which we do on
|
||||
# Ubuntu 12 LTS, then we need to explicitly set CC=gcc.
|
||||
# TODO(jefftk): do this in a way that lets people explicitly choose to build
|
||||
# with clang instead of forcing gcc on everyone.
|
||||
export CC=gcc
|
||||
|
||||
# By default, precompiled JS files are used. When developing, though, we want to
|
||||
# enable local compilation of JS, so export the flag that enables use of the
|
||||
# closure compiler. Note that this var is checked when gyp is run, not at
|
||||
# compile time.
|
||||
export BUILD_JS=1
|
||||
|
||||
# Run the proxy tests from a host configured via
|
||||
# cd install; ./ubuntu.sh setup_test_machine
|
||||
# We use 'export' here so that apache/system_test.sh can see it.
|
||||
|
||||
PAGESPEED_TEST_HOST ?= selfsigned.modpagespeed.com
|
||||
export PAGESPEED_TEST_HOST
|
||||
|
||||
# We don't want our targets to be run parallelized, as we want
|
||||
# various directory maintenance steps to be run in-order
|
||||
# This prevents that from happening, but still passes -j to actual
|
||||
# compilation
|
||||
.NOTPARALLEL :
|
||||
|
||||
.PHONY : prepare_objdir
|
||||
prepare_objdir : $(OBJDIR)
|
||||
CHECKIN_PREP_TARGETS = test_no_var_growth prepare_objdir exes
|
||||
|
||||
.PHONY : checkin_prep echo_checkin_prep
|
||||
checkin_prep : $(CHECKIN_PREP_TARGETS)
|
||||
echo_checkin_prep :
|
||||
@echo $(CHECKIN_PREP_TARGETS)
|
||||
|
||||
# Determine gyp BUILDTYPE and hooks we should run based on the passed in CONF
|
||||
# TOOD(jefftk): The difference between setting CONF, BUILDTYPE, and running a
|
||||
# target that sets BUILDTYPE makes it pretty hard to know what BUILDTYPE(s) a
|
||||
# section of this Makefile might run under. Clean this up.
|
||||
CONF ?= Debug
|
||||
HOOKTYPE = NoHooks
|
||||
BUILDTYPE = $(CONF)
|
||||
|
||||
ifeq ($(CONF),Coverage)
|
||||
override BUILDTYPE=Debug_Coverage
|
||||
override HOOKTYPE=Coverage
|
||||
else ifeq ($(CONF),OptDebug)
|
||||
# For -O2 -g type builds, we have to build as BUILDTYPE=Release CXXLAGS=-g;
|
||||
# with BUILDTYPE=Debug CXXFLAGS="-O2" we would get a -O2 -g -O0 out of gyp,
|
||||
# with the -O0 cancelling the -O2.
|
||||
override BUILDTYPE=Release
|
||||
# We need to make sure to add -g only once (since otherwise recursive
|
||||
# invocation will end up with something like -g -g -g and force a rebuild)
|
||||
ifeq (,$(findstring -g,$(CFLAGS)))
|
||||
override CFLAGS+= -g
|
||||
override CXXFLAGS+= -g
|
||||
endif
|
||||
export CFLAGS
|
||||
endif
|
||||
|
||||
# We'd like to put this in gyp file, but that makes it hard to compile
|
||||
# on old versions of g++ that do not support this flag. However we
|
||||
# can add the flag during development. Don't add this in if it's
|
||||
# already present; that causes cxxflags to mismatch adding extra compiles.
|
||||
ifeq (,$(findstring -Wtype-limits,$(CXXFLAGS)))
|
||||
override CXXFLAGS+= -Wtype-limits
|
||||
endif
|
||||
export CXXFLAGS
|
||||
export BUILDTYPE
|
||||
|
||||
# test to make sure that CXXFLAGS doesn't grow on each recursion
|
||||
.PHONY : test_no_var_growth
|
||||
test_no_var_growth :
|
||||
[ "$(CXXFLAGS)" = \
|
||||
"`$(MAKE) echo_var VAR=CXXFLAGS `" ] || \
|
||||
$(DEVEL_DIR)/expectfail echo CXXFLAGS changed on Makefile recursion
|
||||
|
||||
APACHE_RELEASE_MODULES_DIR=/usr/lib/apache2/modules
|
||||
|
||||
APACHE_DEBUG_ROOT ?= $(HOME)/apache2
|
||||
|
||||
# Note: these must be distinct from the ports used for apache_root_test.
|
||||
APACHE_PORT = 8080
|
||||
APACHE_SLURP_ORIGIN_PORT = 8081
|
||||
APACHE_SLURP_PORT = 8082
|
||||
APACHE_SECONDARY_PORT = 8083
|
||||
APACHE_TERTIARY_PORT = 8085
|
||||
APACHE_HTTPS_PORT = 8443
|
||||
CONTROLLER_PORT = 8086
|
||||
RCPORT=9091
|
||||
# If you add additional ports here, make sure they don't overlap with the test
|
||||
# ports ngx_pagespeed uses. Check ngx_pagespeed/test/run_tests.sh for the list.
|
||||
|
||||
OPTIONS = $(EXTRA_OPTIONS) HTTPS_TEST=1
|
||||
APACHE_DEBUG_MODULES = $(APACHE_DEBUG_ROOT)/modules
|
||||
APACHE_DEBUG_BIN = $(APACHE_DEBUG_ROOT)/bin
|
||||
MOD_PAGESPEED_CACHE = $(APACHE_DEBUG_ROOT)/pagespeed_cache
|
||||
MOD_PAGESPEED_LOG = $(APACHE_DEBUG_ROOT)/pagespeed_log
|
||||
TMP_SLURP_DIR = /tmp/instaweb/$(USER)/slurp
|
||||
# Not used in this Makefile, but needed for some submake/script.
|
||||
APACHE_LOG = $(APACHE_DEBUG_ROOT)/logs/error_log
|
||||
APACHE_DEBUG_PAGESPEED_CONF = $(APACHE_DEBUG_ROOT)/conf/pagespeed.conf
|
||||
APACHE_DOC_ROOT = $(APACHE_DEBUG_ROOT)/htdocs
|
||||
|
||||
APACHE_SERVER = http://localhost:$(APACHE_PORT)
|
||||
|
||||
# We want to track borgingssl branch chromium-stable at head, but submodules
|
||||
# tracks a specific commit ID.
|
||||
# If we need more deps like this, we should figure out a more generic solution.
|
||||
UPDATE_BORINGSSL = git checkout chromium-stable && git pull
|
||||
|
||||
# These hooks are invoked before/after apache or tests are run
|
||||
.PHONY : pre_start_NoHooks post_run_NoHooks pre_start_Coverage post_run_Coverage
|
||||
.PHONY : pre_start post_run
|
||||
pre_start_NoHooks :
|
||||
post_run_NoHooks :
|
||||
|
||||
pre_start_Coverage :
|
||||
$(DEVEL_DIR)/gcov-all.sh --prepare $(GIT_SRC)
|
||||
|
||||
post_run_Coverage :
|
||||
$(DEVEL_DIR)/gcov-all.sh --summarize $(GIT_SRC)
|
||||
|
||||
pre_start : pre_start_$(HOOKTYPE)
|
||||
|
||||
post_run : post_run_$(HOOKTYPE)
|
||||
|
||||
# We check closure-compiled javascript into git to enable people to use
|
||||
# closure-compiled output without needing to have a closure compiler dependency
|
||||
# in the build process.
|
||||
GENFILES = $(GIT_SRC)/net/instaweb/genfiles
|
||||
|
||||
.PHONY : apache_debug apache_debug_psol
|
||||
|
||||
# Sets up a development tree, and builds the modules
|
||||
apache_debug : $(HOOKS_STAMP)
|
||||
@echo "building Apache module $@ ..."
|
||||
cd $(GIT_SRC) && $(MAKE)
|
||||
@echo Built mod_pagespeed successfully:
|
||||
ls -l $(GIT_DEBUG_BIN)/libmod_pagespeed.so
|
||||
@echo To install, type
|
||||
@echo " " cp $(GIT_DEBUG_BIN)/libmod_pagespeed.so \
|
||||
$(APACHE_DEBUG_ROOT)/modules/mod_pagespeed.so
|
||||
|
||||
apache_debug_psol : apache_debug
|
||||
cd $(GIT_SRC)/pagespeed/automatic && $(MAKE) \
|
||||
MOD_PAGESPEED_ROOT=$(GIT_SRC) \
|
||||
OUTPUT_DIR=$(GIT_DEBUG_BIN) \
|
||||
BUILDTYPE=Debug \
|
||||
CXXFLAGS=$(CXXFLAGS) \
|
||||
all
|
||||
@echo Built PSOL successfully under in $(GIT_DEBUG_BIN)
|
||||
|
||||
.PHONY : submodule_update
|
||||
submodule_update:
|
||||
@echo "Updating local checkouts from open source. If these generate any"
|
||||
@echo "merge conflicts you'll need to resolve them manually."
|
||||
cd $(GIT_SRC) && git pull --ff-only && git submodule update --recursive
|
||||
|
||||
.PHONY : exes apache_debug_install apache_debug_stop apache_debug_start
|
||||
exes : apache_debug_install
|
||||
apache_debug_install : apache_debug
|
||||
install -c $(GIT_BIN)/libmod_pagespeed.so \
|
||||
$(APACHE_DEBUG_MODULES)/mod_pagespeed.so
|
||||
install -c $(GIT_BIN)/libmod_pagespeed_ap24.so \
|
||||
$(APACHE_DEBUG_MODULES)/mod_pagespeed_ap24.so
|
||||
|
||||
apache_debug_stop :
|
||||
$(MAKE) stop
|
||||
$(MAKE) post_run
|
||||
|
||||
apache_debug_start :
|
||||
$(MAKE) pre_start
|
||||
$(MAKE) stop
|
||||
$(DEVEL_DIR)/apache_cleanup.sh $(USER)
|
||||
-$(DEVEL_DIR)/apache_rotate_logs.sh $(APACHE_DEBUG_ROOT)/logs
|
||||
$(MAKE) start
|
||||
|
||||
.PHONY : apache_debug_restart
|
||||
ifeq ($(FAST_RESTART),1)
|
||||
apache_debug_restart :
|
||||
$(MAKE) apache_debug_stop
|
||||
$(MAKE) apache_debug_start
|
||||
else
|
||||
# Restarts with newest module
|
||||
apache_debug_restart :
|
||||
$(MAKE) apache_debug_stop
|
||||
$(MAKE) apache_debug_install
|
||||
$(MAKE) apache_debug_start
|
||||
endif
|
||||
|
||||
# Installs a slurping pagespeed.conf on localhost:8082 and runs a debug
|
||||
# binary.
|
||||
.PHONY : apache_debug_slurp
|
||||
apache_debug_slurp : slurp_test_prepare apache_install_conf
|
||||
$(MAKE) apache_debug_restart
|
||||
@echo Set your browser proxy to `hostname`:$(APACHE_SLURP_PORT)
|
||||
|
||||
# Stop and start Apache as reliably as possible.
|
||||
.PHONY : stop start
|
||||
stop :
|
||||
$(INSTALL_DATA_DIR)/stop_apache.sh $(APACHE_DEBUG_BIN)/apachectl \
|
||||
$(APACHE_DEBUG_ROOT)/logs/httpd.pid \
|
||||
$(APACHE_DEBUG_BIN)/httpd \
|
||||
graceful-stop \
|
||||
$(APACHE_PORT)
|
||||
|
||||
start :
|
||||
$(APACHE_DEBUG_BIN)/apachectl start
|
||||
@if [ ! -f $(APACHE_DEBUG_ROOT)/logs/httpd.pid ]; then \
|
||||
/bin/echo -n "Waiting for httpd to start"; \
|
||||
while [ ! -f $(APACHE_DEBUG_ROOT)/logs/httpd.pid ]; do \
|
||||
/bin/echo -n "."; \
|
||||
sleep 1; \
|
||||
done; \
|
||||
/bin/echo; \
|
||||
fi
|
||||
|
||||
.PHONY : apache_debug_leak_test
|
||||
apache_debug_leak_test :
|
||||
rm -rf $(MOD_PAGESPEED_CACHE)/* # Start with an empty cache
|
||||
$(DEVEL_DIR)/check_for_leaks \
|
||||
$(APACHE_DEBUG_ROOT) localhost:$(APACHE_PORT)
|
||||
|
||||
include $(INSTALL_DATA_DIR)/Makefile.tests
|
||||
|
||||
# Installs the apache debug server and runs a stress test against it.
|
||||
# This will blow away your existing cache and pagespeed.conf.
|
||||
#
|
||||
# Note: this test is obsolete; you probably want mps_load_test.sh or one of the
|
||||
# siege tests.
|
||||
#
|
||||
# TODO(sligocki): Lock Apache.
|
||||
apache_debug_stress_test : stress_test_prepare apache_install_conf
|
||||
$(MAKE) apache_debug_restart
|
||||
$(INSTALL_DATA_DIR)/stress_test.sh localhost:$(APACHE_PORT)
|
||||
$(MAKE) apache_debug_stop
|
||||
|
||||
stress_test_prepare :
|
||||
# stop old apaches, w/o generating any coverage reports,
|
||||
# (as we're about to do some new test)
|
||||
$(MAKE) stop
|
||||
$(eval OPTIONS+=STRESS_TEST=1)
|
||||
-rm -rf $(MOD_PAGESPEED_CACHE)/*
|
||||
|
||||
# This test checks that when ProxyPass is set on a host with mod_pagespeed
|
||||
# enabled, URLs are rewritten correctly. (See install/debug.conf.template)
|
||||
# See: http://github.com/pagespeed/mod_pagespeed/issues/74
|
||||
#
|
||||
# TODO(sligocki): Lock Apache.
|
||||
.PHONY : apache_debug_proxy_test proxy_test_prepare
|
||||
apache_debug_proxy_test : proxy_test_prepare apache_install_conf
|
||||
$(MAKE) apache_debug_restart
|
||||
$(WGET_NO_PROXY) -q -O /dev/null \
|
||||
$(APACHE_SERVER)/proxy_pass.html?PageSpeedFilters=extend_cache
|
||||
sleep 1
|
||||
$(WGET_NO_PROXY) -q -O - \
|
||||
$(APACHE_SERVER)/proxy_pass.html?PageSpeedFilters=extend_cache\
|
||||
| grep "localhost:8080/proxy_pass.css.pagespeed.ce"
|
||||
# Before this was fixed, it would be rewritten as localhost:8081/...
|
||||
|
||||
proxy_test_prepare :
|
||||
$(eval OPTIONS+=PROXY_TEST=1)
|
||||
|
||||
# This test checks that we can serve slurped pages, both from requests
|
||||
# like those sent from a browser-proxy. We'd also like to test
|
||||
# requests sent from a fake DNS, like webpagetest, but I haven't
|
||||
# figured out how to do that yet. For now we'll just test read-only
|
||||
# proxying.
|
||||
#
|
||||
# In our test flow, we first warm up mod_pagespeed's cache with a
|
||||
# fetch and a sleep. Then we test to make sure that we rewrite a
|
||||
# resource properly, with mps on, and that we don't rewrite it with
|
||||
# mps off.
|
||||
#
|
||||
# TODO(sligocki): Lock Apache.
|
||||
.PHONY : apache_debug_slurp_test slurp_test_prepare
|
||||
apache_debug_slurp_test : slurp_test_prepare apache_install_conf
|
||||
$(MAKE) apache_debug_restart
|
||||
$(DEVEL_DIR)/slurp_test.sh $(APACHE_SERVER) \
|
||||
$(APACHE_SLURP_ORIGIN_PORT) $(APACHE_SLURP_PORT) $(WGET) \
|
||||
$(TMP_SLURP_DIR) $(PAGESPEED_TEST_HOST)
|
||||
|
||||
slurp_test_prepare :
|
||||
$(eval OPTIONS+=SLURP_TEST=1)
|
||||
rm -rf $(MOD_PAGESPEED_CACHE)/*
|
||||
|
||||
# This is for trace-based stress tests, that operate on recorded URLs
|
||||
# and slurp databases
|
||||
# Note: you can pass PAR=, EXP= and RUNS= to stress test targets that
|
||||
# execute the tests
|
||||
.PHONY : check_dump_dir apache_trace_stress_test_prepare
|
||||
check_dump_dir :
|
||||
$(if $(DUMP_DIR),,$(error "Need data directory set with DUMP_DIR="))
|
||||
$(if $(wildcard $(DUMP_DIR)/slurp),,\
|
||||
$(error "No slurp/ under $(DUMP_DIR)"))
|
||||
|
||||
# mod_pagespeed load tests can be started with scripts/mps_load_test.sh. If
|
||||
# given an arg 'memcached', then that script will export MEMCACHED=1 and that
|
||||
# will be passed to target 'apache_install_conf' as indicated below. Same
|
||||
# with cache_invalidation and inline_unauthorized_resources (IUR).
|
||||
apache_trace_stress_test_prepare : check_dump_dir
|
||||
$(MAKE) stress_test_prepare
|
||||
echo MEMCACHED=$(MEMCACHED)
|
||||
echo REDIS=$(REDIS)
|
||||
echo PURGING=$(PURGING)
|
||||
echo IUR=$(IUR)
|
||||
echo IPRO_PRESERVE=$(IPRO_PRESERVE)
|
||||
$(MAKE) apache_install_conf SLURP_DIR=$(DUMP_DIR)/slurp SLURP_WRITE=0 \
|
||||
OPTIONS="$(OPTIONS) \
|
||||
LOADTEST_TEST=1 \
|
||||
STRESS_TEST=1 \
|
||||
HTTPS_TEST=0 \
|
||||
IPRO_PRESERVE_LOADTEST_TEST=$(IPRO_PRESERVE) \
|
||||
MEMCACHED_LOADTEST_TEST=$(MEMCACHED) \
|
||||
REDIS_LOADTEST_TEST=$(REDIS) \
|
||||
PURGING_LOADTEST_TEST=$(PURGING) \
|
||||
IUR_LOADTEST_TEST=$(IUR)"
|
||||
|
||||
apache_trace_stress_test_server :
|
||||
$(MAKE) apache_trace_stress_test_prepare
|
||||
$(MAKE) apache_debug_install
|
||||
$(MAKE) apache_debug_start
|
||||
|
||||
ifeq ($(TEST),)
|
||||
TEST_ARG=
|
||||
else
|
||||
TEST_ARG=--gtest_filter=$(TEST)
|
||||
endif
|
||||
|
||||
.PHONY : apache_test mod_pagespeed_test
|
||||
apache_test : apache_debug
|
||||
$(MAKE) pre_start
|
||||
cd $(GIT_SRC) && \
|
||||
install/run_program_with_ext_caches.sh \
|
||||
$(GIT_BIN)/mod_pagespeed_test $(TEST_ARG)
|
||||
cd $(GIT_SRC) && \
|
||||
install/run_program_with_ext_caches.sh \
|
||||
$(GIT_BIN)/pagespeed_automatic_test $(TEST_ARG)
|
||||
$(MAKE) post_run
|
||||
|
||||
# TODO(jefftk): figure out if dividing the unit tests into mod_pagespeed_test
|
||||
# and pagespeed_automatic_test is actually helpful. Currently it looks like if
|
||||
# there was once a principled distinction between the targets it has atrophied.
|
||||
mod_pagespeed_test : apache_debug
|
||||
$(MAKE) pre_start
|
||||
cd $(GIT_SRC) && \
|
||||
install/run_program_with_ext_caches.sh \
|
||||
$(GIT_BIN)/mod_pagespeed_test $(TEST_ARG)
|
||||
$(MAKE) post_run
|
||||
|
||||
.PHONY : pagespeed_automatic_smoke_test
|
||||
# TODO(jefftk): get rid of this and just use apache_debug_psol
|
||||
pagespeed_automatic_smoke_test :
|
||||
@echo Building Pagespeed Automatic ...
|
||||
cd $(GIT_SRC)/pagespeed/automatic/ && \
|
||||
$(MAKE) MOD_PAGESPEED_ROOT=$(GIT_SRC) \
|
||||
OUTPUT_DIR=$(GIT_BIN) \
|
||||
CXXFLAGS=$(CXXFLAGS) \
|
||||
all
|
||||
|
||||
.PHONY : pagespeed_automatic_test
|
||||
pagespeed_automatic_test : apache_debug
|
||||
$(MAKE) pre_start
|
||||
cd $(GIT_SRC) && \
|
||||
install/run_program_with_ext_caches.sh \
|
||||
$(GIT_BIN)/pagespeed_automatic_test $(TEST_ARG)
|
||||
$(MAKE) post_run
|
||||
|
||||
.PHONY : apache_release_test
|
||||
apache_release_test : apache_release internal_release_test
|
||||
|
||||
TMP_PREFIX = /tmp/mod_pagespeed.$(USER).install
|
||||
|
||||
# Builds a binary release tarball, installs it as root (requires sudo password)
|
||||
# and tests that rewrites occur.
|
||||
#
|
||||
# TODO(jmarantz): We are still leaving a bunch of stuff behind in /tmp; organize
|
||||
# the temp dirs better and clean up.
|
||||
.PHONY : apache_root_test internal_release_test
|
||||
|
||||
# TODO(jmarantz): This target is not working yet...it fails copying over
|
||||
# pagespeed_libraries.conf. Investigate:
|
||||
# cat common/pagespeed.load.template | \
|
||||
# sed s~@@APACHE_MODULEDIR@@~/usr/lib/apache2/modules~ | \
|
||||
# sed s/@@COMMENT_OUT_DEFLATE@@// > /tmp/mod_pagespeed.install/pagespeed.load
|
||||
# cp -f /tmp/instaweb.vRV047/mod_pagespeed-test-jmarantz/install/net/instaweb/genfiles/conf/pagespeed_libraries.conf /tmp/mod_pagespeed.install/pagespeed_libraries.conf
|
||||
# cp: cannot stat ‘/tmp/instaweb.vRV047/mod_pagespeed-test-jmarantz/install/net/instaweb/genfiles/conf/pagespeed_libraries.conf’: No such file or directory
|
||||
# TODO(jefftk): this also depends on prepare_release.sh and install-glucid.sh,
|
||||
# that haven't been open sourced yet.
|
||||
#apache_root_test :
|
||||
# sudo /etc/init.d/apache2 stop
|
||||
# sudo rm -rf $(TMP_PREFIX) /tmp/mod_pagespeed.install
|
||||
# cd ../.. && devel/prepare_release.sh -force \
|
||||
# mod_pagespeed-test-$(USER)
|
||||
# cd $(OBJDIR) && tar xzf \
|
||||
# $(TMP_PREFIX)/mod_pagespeed-test-$(USER).tgz
|
||||
# cd $(OBJDIR)/mod_pagespeed-test-$(USER) && ./install-glucid.sh
|
||||
# rm -rf $(OBJDIR) /tmp/mod_pagespeed-test-$(USER).tgz
|
||||
# sudo rm -rf $(TMP_PREFIX) /tmp/mod_pagespeed.install
|
||||
|
||||
internal_release_test :
|
||||
cd $(GIT_SRC) && \
|
||||
BUILDTYPE=Release install/run_program_with_ext_caches.sh \
|
||||
$(GIT_RELEASE_BIN)/mod_pagespeed_test $(TEST_ARG) && \
|
||||
BUILDTYPE=Release install/run_program_with_ext_caches.sh \
|
||||
$(GIT_RELEASE_BIN)/pagespeed_automatic_test $(TEST_ARG)
|
||||
|
||||
# Configuration root for Apache file-system and cache directories, to
|
||||
# be written into config file.
|
||||
#
|
||||
# Installs a development copy of the the Instaweb server into $APACHE_DEBUG_ROOT
|
||||
.PHONY : apache_install_conf
|
||||
apache_install_conf : setup_doc_root
|
||||
rm -f $(APACHE_DEBUG_ROOT)/conf/pagespeed.conf
|
||||
mkdir -p $(MOD_PAGESPEED_CACHE)
|
||||
mkdir -p $(MOD_PAGESPEED_LOG)
|
||||
cd $(INSTALL_DATA_DIR) && \
|
||||
$(MAKE) $(APACHE_DEBUG_ROOT)/conf/pagespeed.conf \
|
||||
STAGING_DIR=$(APACHE_DEBUG_ROOT)/conf \
|
||||
APACHE_DOC_ROOT=$(APACHE_DEBUG_ROOT)/htdocs \
|
||||
APACHE_MODULES=$(APACHE_DEBUG_MODULES) \
|
||||
APACHE_DOMAIN=$(APACHE_SERVER) \
|
||||
CONTROLLER_PORT=$(CONTROLLER_PORT) \
|
||||
RCPORT=$(RCPORT) \
|
||||
APACHE_SECONDARY_PORT=$(APACHE_SECONDARY_PORT) \
|
||||
APACHE_TERTIARY_PORT=$(APACHE_TERTIARY_PORT) \
|
||||
APACHE_HTTPS_DOMAIN=$(APACHE_HTTPS_SERVER) \
|
||||
MOD_PAGESPEED_CACHE=$(MOD_PAGESPEED_CACHE) \
|
||||
MOD_PAGESPEED_LOG=$(MOD_PAGESPEED_LOG) \
|
||||
MOD_PAGESPEED_ROOT=$(GIT_SRC) \
|
||||
SLURP_DIR=$(SLURP_DIR) \
|
||||
SLURP_WRITE=$(SLURP_WRITE) \
|
||||
PAGESPEED_TEST_HOST=$(PAGESPEED_TEST_HOST) \
|
||||
TMP_SLURP_DIR=$(TMP_SLURP_DIR) \
|
||||
$(OPTIONS)
|
||||
$(DEVEL_DIR)/apache_configure_https_port.sh $(APACHE_DEBUG_ROOT) \
|
||||
$(APACHE_HTTPS_PORT)
|
||||
$(DEVEL_DIR)/apache_create_server_certificate.sh $(APACHE_DEBUG_ROOT)
|
||||
$(DEVEL_DIR)/apache_configure_php5_from_etc_php5.sh $(APACHE_DEBUG_ROOT)
|
||||
$(DEVEL_DIR)/apache_install.sh \
|
||||
$(APACHE_DEBUG_ROOT) $(APACHE_PORT) $(GIT_SRC)
|
||||
|
||||
# Builds the release objects. This should not be run on its own -- it
|
||||
# should be run manually via 'make apache_release'.
|
||||
.PHONY : internal_release_build
|
||||
internal_release_build : $(GIT_SRC) $(HOOKS_STAMP)
|
||||
@echo "building Apache release module ..."
|
||||
cd $(GIT_SRC) && BUILDTYPE=Release $(MAKE)
|
||||
|
||||
DOXYGEN_TARBALL = $(OBJDIR)/psol_doc.tar.gz
|
||||
# Generates a doxygen tarball in $(DOXYGEN_TARBALL) suitable for
|
||||
# scp to modpagespeed.com.
|
||||
.PHONY : doxygen
|
||||
doxygen :
|
||||
$(DEVEL_DIR)/doxify_tree.sh $(DOXYGEN_TARBALL)
|
||||
|
||||
.PHONY : apache_release apache_release_install
|
||||
apache_release : internal_release_build
|
||||
@echo Release apache module built. Install it with
|
||||
@echo " " cd $(GIT_SRC)/install
|
||||
@echo " " make staging
|
||||
@echo " " sudo make install
|
||||
@echo "or"
|
||||
@echo " " sudo cp \
|
||||
$(GIT_RELEASE_BIN)/libmod_pagespeed.so \
|
||||
$(APACHE_RELEASE_MODULES_DIR)/mod_pagespeed.so
|
||||
|
||||
apache_release_install : apache_release
|
||||
@echo "Copy the module to your Apache2 modules directory."
|
||||
@echo "Then, restart your Apache server."
|
||||
install -c $(GIT_RELEASE_BIN)/libmod_pagespeed.so \
|
||||
$(APACHE_RELEASE_MODULES_DIR)/mod_pagespeed.so
|
||||
install -c $(GIT_RELEASE_BIN)/libmod_pagespeed_ap24.so \
|
||||
$(APACHE_RELEASE_MODULES_DIR)/mod_pagespeed_ap24.so
|
||||
/etc/init.d/apache2 stop
|
||||
/etc/init.d/apache2 start
|
||||
|
||||
# Using pipefail to ensure grep -v doesn't obscure a gyp failure.
|
||||
$(HOOKS_STAMP) : $(GPERF) $(GYP_FILES)
|
||||
bash -o pipefail -c \
|
||||
"cd $(GIT_SRC) && python build/gyp_chromium --depth=. | \
|
||||
egrep -v '^(Generating )[a-zA-Z0-9_/.-]*(Makefile|mk)$$'"
|
||||
touch $(HOOKS_STAMP)
|
||||
|
||||
$(OBJDIR) : make_obj_dirs
|
||||
|
||||
.PHONY : make_obj_dirs clean
|
||||
make_obj_dirs : $(GIT_SRC)
|
||||
@echo Setting up library directories...
|
||||
@mkdir -p $(GIT_INSTAWEB_LIB_PATH) \
|
||||
$(GENFILES) \
|
||||
$(OBJDIR)/apache/net/instaweb \
|
||||
$(OBJDIR)/apache/pagespeed/opt/ads \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/base \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/cache \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/html \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/http \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/image \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/image/testdata/gif \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/image/testdata/jpeg \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/image/testdata/png \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/image/testdata/pngsuite \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/image/testdata/pngsuite/gif \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/image/testdata/resized \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/image/testdata/webp \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/js \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/sharedmem \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/thread \
|
||||
$(OBJDIR)/apache/pagespeed/kernel/util \
|
||||
$(OBJDIR)/apache/pagespeed/install \
|
||||
$(OBJDIR)/apache/pagespeed/apache \
|
||||
$(OBJDIR)/apache/pagespeed/automatic \
|
||||
$(OBJDIR)/apache/pagespeed/automatic/public \
|
||||
$(OBJDIR)/apache/pagespeed/controller \
|
||||
$(OBJDIR)/apache/pagespeed/system \
|
||||
$(OBJDIR)/apache/pagespeed/system/public \
|
||||
$(OBJDIR)/css_parser
|
||||
@cd $(OBJDIR)/apache/net/instaweb; mkdir -p \
|
||||
config htmlparse/public http/public http/testdata \
|
||||
js/public public rewriter/public rewriter/testdata spriter \
|
||||
spriter/public util/public
|
||||
|
||||
clean :
|
||||
rm -rf $(OBJDIR)
|
||||
rm -rf $(GIT_SRC)/out
|
||||
|
||||
# Install gperf if it doesn't exist
|
||||
GPERF = /usr/bin/gperf
|
||||
$(GPERF) :
|
||||
sudo apt-get install gperf
|
||||
|
||||
.PHONY : echo_var echo_vars
|
||||
echo_var :
|
||||
@echo $($(VAR))
|
||||
|
||||
echo_vars :
|
||||
@echo OBJDIR=$(OBJDIR)
|
||||
@echo G3_ROOT=$(G3_ROOT)
|
||||
@echo SANDBOX=$(SANDBOX)
|
||||
@echo GOOGLE_PROTO_FILES=$(GOOGLE_PROTO_FILES)
|
||||
@echo GIT_SRC=$(GIT_SRC)
|
||||
@echo GYP_MANIFEST=$(GYP_MANIFEST)
|
||||
@@ -1,51 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2010 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# See bug 3103898 and
|
||||
# http://carlosrivero.com/fix-apache
|
||||
#---no-space-left-on-device-couldnt-create-accept-lock
|
||||
#
|
||||
# This script might help cleanup any leftover resources that prevent
|
||||
# Apache from restarting. The error message might look a little something
|
||||
# like this:
|
||||
#
|
||||
# [Sat Oct 16 21:22:46 2010] [warn] pid file /usr/local/apache2/logs/httpd.pid
|
||||
# overwritten -- Unclean shutdown of previous Apache run?
|
||||
#
|
||||
# [Sat Oct 16 21:22:46 2010] [emerg] (28)No space left on device: Couldn't
|
||||
# create accept lock (/usr/local/apache2/logs/accept.lock.16025) (5)
|
||||
#
|
||||
# Usage:
|
||||
#
|
||||
# devel/apache_cleanup $USER
|
||||
#
|
||||
# You may want to see the owners of the IPC blocks by running ipcs -s
|
||||
# manually. For example, you might need to run:
|
||||
#
|
||||
# sudo devel/apache_cleanup www-data
|
||||
# or
|
||||
# sudo devel/apache_cleanup root
|
||||
|
||||
apache_user=$1
|
||||
|
||||
for ipsemId in $(ipcs -s | grep $apache_user | cut -f 2 -d ' '); do
|
||||
echo ipcrm -s $ipsemId
|
||||
ipcrm -s $ipsemId || true
|
||||
done
|
||||
for ipsemId in $(ipcs -m | grep $apache_user | cut -f 2 -d ' '); do
|
||||
echo ipcrm -m $ipsemId
|
||||
ipcrm -m $ipsemId || true
|
||||
done
|
||||
@@ -1,48 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2011 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Script to enable SSL on the given port for Apache, usually in ~/apache2.
|
||||
#
|
||||
# usage: apache_configure_https_port.sh apache-root-directory https-port
|
||||
|
||||
APACHE_ROOT=$1
|
||||
HTTPS_PORT=$2
|
||||
|
||||
# If either argument is missing, do nothing (assume that https is disabled).
|
||||
[ -z "$APACHE_ROOT" ] && exit 0
|
||||
[ -z "$HTTPS_PORT" ] && exit 0
|
||||
|
||||
# Change the port only if we can find the file where we expect it.
|
||||
conf_file=$APACHE_ROOT/conf/extra/httpd-ssl.conf
|
||||
if [ -e $conf_file ]; then
|
||||
sed -e '/^[ ]*Listen /s/^.*$/Listen '"$HTTPS_PORT"'/' \
|
||||
-e '/<VirtualHost /s/.*:[0-9]*/<VirtualHost localhost:'"$HTTPS_PORT"'/' \
|
||||
-e '/^[ ]*ServerName /s/^.*$/ServerName '"$(hostname):$HTTPS_PORT"'/' \
|
||||
${conf_file} > ${conf_file}.$$
|
||||
if mv -f ${conf_file}.$$ ${conf_file}; then
|
||||
echo HTTPS was enabled on port $HTTPS_PORT in $conf_file
|
||||
else
|
||||
rm -f ${conf_file}.$$
|
||||
echo FAILED: mv ${conf_file}.$$ ${conf_file}
|
||||
echo Cannot enable HTTPS on port $HTTPS_PORT in $conf_file
|
||||
fi
|
||||
else
|
||||
echo $conf_file does not exist.
|
||||
echo Consider updating devel/Makefile and/or devel/$(basename $0)
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
@@ -1,167 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2012 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Script to enable PHP5 in Apache assuming it has already been installed into
|
||||
# the standard Ubuntu directory (/etc/apache, /usr/apache) rather than the
|
||||
# ~/apache2 directory we use [in other words, modules have been apt install'd
|
||||
# rather than built from source].
|
||||
#
|
||||
# PHP5 can be installed using the following commands:
|
||||
# apt-get install php5-common php5
|
||||
# apt-get install php5-cgi php5-cli libapache2-mod-fcgid # * worker MPM only *
|
||||
#
|
||||
# Note that it does not fail if any of these are not installed since we don't
|
||||
# want to force site admins to install them just to run some tests.
|
||||
#
|
||||
# usage: apache_configure_php5_from_etc_php5.sh apache-root-directory
|
||||
APACHE_ROOT=$1
|
||||
if [ -z "${APACHE_ROOT}" ]; then
|
||||
echo "Usage: $0 <apache-root-directory>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
HTTPD_CONF=${APACHE_ROOT}/conf/httpd.conf
|
||||
DST_PHP5_CONFIG=${APACHE_ROOT}/conf/php5.conf
|
||||
DST_PHP5_MODULE=${APACHE_ROOT}/modules/libphp5.so
|
||||
DST_FCGID_CONFIG=${APACHE_ROOT}/conf/fcgid.conf
|
||||
DST_FCGID_MODULE=${APACHE_ROOT}/modules/mod_fcgid.so
|
||||
|
||||
# Note: contains an embedded TAB.
|
||||
WS="[ ]"
|
||||
|
||||
# Early exit if everything seems to be installed already.
|
||||
grep -q "^${WS}*Include${WS}.*conf/php5.conf${WS}*$" "${HTTPD_CONF}" && \
|
||||
grep -q "^${WS}*Include${WS}.*conf/fcgid.conf${WS}*$" "${HTTPD_CONF}"
|
||||
if [[ $? -eq 0 && \
|
||||
-r "${DST_PHP5_CONFIG}" && \
|
||||
-r "${DST_PHP5_MODULE}" && \
|
||||
-r "${DST_FCGID_MODULE}" ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Hardwire where we get things from since it's a Ubuntu standard.
|
||||
SRC_PHP5_INIDIR=/etc/php5/apache2
|
||||
SRC_PHP5_MODULE=/usr/lib/apache2/modules/libphp5.so
|
||||
|
||||
# We want our own build of fcgid since we want to test with 2.2, while the
|
||||
# packages are for 2.4
|
||||
SRC_FCGID_MODULE=${APACHE_ROOT}/modules/mod_fcgid-src_build.so
|
||||
|
||||
# Bail if PHP5 isn't installed [where we expect it].
|
||||
|
||||
if [[ ! -r "/usr/bin/php5-cgi" ||
|
||||
! -r "${SRC_FCGID_MODULE}" ]]; then
|
||||
echo "*** PHP5 is not installed, or is not installed where we expect" >&2
|
||||
echo " under /etc/php5 and /usr/lib/apache2. Please run:" >&2
|
||||
echo " sudo apt-get install php5-common php5" >&2
|
||||
echo " sudo apt-get install php5-cgi php5-cli libapache2-mod-fcgid">&2
|
||||
echo ""
|
||||
echo " You may also need to rm -rf ${APACHE_ROOT}" >&2
|
||||
echo " and re-run install/build_development_apache.sh"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Tricky grep'ing to find the correct Directory section in httpd.conf:
|
||||
PAT1="<Directory${WS}${WS}*${APACHE_ROOT}/htdocs${WS}*>"
|
||||
PAT2="<Directory${WS}${WS}*\"${APACHE_ROOT}/htdocs\"${WS}*>"
|
||||
HTDOCS_OPEN_LINENO=$(
|
||||
egrep -n "${PAT1}|${PAT2}" "${HTTPD_CONF}" \
|
||||
| sed -e 's/:.*//')
|
||||
if [ -z "$HTDOCS_OPEN_LINENO" ]; then
|
||||
echo
|
||||
echo "*** ${HTTPD_CONF} does not have a line like:" >&2
|
||||
echo ' <Directory "'${APACHE_ROOT}/htdocs'">' >&2
|
||||
echo " which is the expected document root for the installation" >&2
|
||||
echo " and whose entry needs to be updated. ABORTING." >&2
|
||||
exit 1
|
||||
fi
|
||||
HTDOCS_CLOSE_LINENO=$(
|
||||
tail -n +${HTDOCS_OPEN_LINENO} "${HTTPD_CONF}" \
|
||||
| grep -n "^${WS}*</${WS}*Directory${WS}*>" \
|
||||
| head -1 \
|
||||
| sed -e 's/:.*//')
|
||||
OPTIONS_LINENO=$(
|
||||
tail -n +${HTDOCS_OPEN_LINENO} "${HTTPD_CONF}" \
|
||||
| head -${HTDOCS_CLOSE_LINENO:-999999} \
|
||||
| grep -i "^${WS}${WS}*Options${WS}.*[+]\?ExecCGI")
|
||||
if [ -z "$OPTIONS_LINENO" ]; then
|
||||
[ -n "${HTDOCS_CLOSE_LINENO}" ] && \
|
||||
HTDOCS_CLOSE_LINENO=$((HTDOCS_OPEN_LINENO + HTDOCS_CLOSE_LINENO - 1))
|
||||
sed -e "${HTDOCS_CLOSE_LINENO:-$}"'i\
|
||||
# Required for mod_fcgi which is required for PHP when using worker MPM.\
|
||||
Options +ExecCGI' "${HTTPD_CONF}" > "${HTTPD_CONF}".tmp
|
||||
mv "${HTTPD_CONF}".tmp "${HTTPD_CONF}"
|
||||
fi
|
||||
|
||||
# Add the necessary lines to httpd.conf if/as necessary.
|
||||
fgrep -q "LoadModule fcgid_module modules/mod_fcgid.so" "${HTTPD_CONF}"
|
||||
if [ $? -ne 0 ]; then
|
||||
# Backwards compatibility: check if PHP5 for Apache 2.2 prefork is setup.
|
||||
grep -q "^${WS}*LoadModule${WS}${WS}*php5_module${WS}.*modules/libphp5.so${WS}*$" "${HTTPD_CONF}"
|
||||
if [ $? -eq 0 ]; then
|
||||
# Remove the lines that just setup PHP5 for APache 2.2 prefork.
|
||||
sed -e "/^${WS}*LoadModule${WS}${WS}*php5_module${WS}.*/d" \
|
||||
-e "/^${WS}*Include${WS}${WS}*conf\/php5.conf${WS}*$/d" \
|
||||
"${HTTPD_CONF}" > "${HTTPD_CONF}.tmp"
|
||||
mv "${HTTPD_CONF}.tmp" "${HTTPD_CONF}"
|
||||
fi
|
||||
# Insert the all-singing all dancing lines for Apache 2.2 prefork/worker.
|
||||
# Unconditionally use mod_fcgid.
|
||||
cat - >> "${HTTPD_CONF}" <<EOF
|
||||
LoadModule fcgid_module modules/mod_fcgid.so
|
||||
Include conf/fcgid.conf
|
||||
AddHandler fcgid-script .php
|
||||
FCGIWrapper /usr/bin/php-cgi .php
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Copy the config files over as necessary.
|
||||
if [ ! -f "${DST_PHP5_CONFIG}" ]; then
|
||||
cat - > "${DST_PHP5_CONFIG}" <<EOF
|
||||
<IfModule php5_module>
|
||||
AddHandler php5-script .php
|
||||
DirectoryIndex index.html index.php
|
||||
AddType text/html .php
|
||||
AddType application/x-httpd-php-source phps
|
||||
PHPIniDir ${SRC_PHP5_INIDIR}
|
||||
</IfModule>
|
||||
EOF
|
||||
fi
|
||||
|
||||
if [ ! -f "${DST_FCGID_CONFIG}" ]; then
|
||||
cat - > "${DST_FCGID_CONFIG}" <<EOF
|
||||
<IfModule mod_fcgid.c>
|
||||
AddHandler fcgid-script .fcgi
|
||||
FcgidConnectTimeout 20
|
||||
FcgidProcessTableFile fcgid/fcgid_shm
|
||||
FcgidIPCDir fcgid/sock
|
||||
</IfModule>
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Link the modules as necessary.
|
||||
if [[ ! -f "${DST_PHP5_MODULE}" && -f "${SRC_PHP5_MODULE}" ]]; then
|
||||
ln -s "${SRC_PHP5_MODULE}" "${DST_PHP5_MODULE}"
|
||||
fi
|
||||
if [ ! -f "${DST_FCGID_MODULE}" ]; then
|
||||
ln -s "${SRC_FCGID_MODULE}" "${DST_FCGID_MODULE}"
|
||||
fi
|
||||
|
||||
# Create the mod_fcgid directories.
|
||||
[ -d "${APACHE_ROOT}"/fcgid ] || mkdir "${APACHE_ROOT}"/fcgid
|
||||
[ -d "${APACHE_ROOT}"/fcgid/sock ] || mkdir "${APACHE_ROOT}"/fcgid/sock
|
||||
|
||||
exit 0
|
||||
@@ -1,33 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2012 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Script to create a server certificate (and key) file for Apache,
|
||||
# usually in ~/apache2.
|
||||
#
|
||||
# usage: apache_create_server_certificate.sh apache-root-directory
|
||||
|
||||
APACHE_ROOT=$1
|
||||
|
||||
# Create a cert file iff we don't already have one.
|
||||
if [ ! -e $APACHE_ROOT/conf/server.crt ]; then
|
||||
openssl req -new -x509 -days 36500 -sha1 -newkey rsa:1024 -nodes \
|
||||
-keyout $APACHE_ROOT/conf/server.key \
|
||||
-out $APACHE_ROOT/conf/server.crt \
|
||||
-subj "/O=Company/OU=Department/CN=$(hostname)"
|
||||
echo Certificate files were created: $APACHE_ROOT/conf/server.{key,crt}
|
||||
fi
|
||||
|
||||
exit 0
|
||||
@@ -1,85 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2010 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Script to install a debuggable mod_pagespeed.so into the Apache
|
||||
# distribution, usually in ~/apache2.
|
||||
|
||||
APACHE_DEBUG_ROOT=$1
|
||||
APACHE_DEBUG_PORT=$2
|
||||
SRC_TREE=$3
|
||||
|
||||
mkdir -p $APACHE_DEBUG_ROOT/pagespeed/cache
|
||||
chmod a+rwx $APACHE_DEBUG_ROOT/pagespeed/cache
|
||||
|
||||
# Check to see of mod_pagespeed is already loaded into httpd.conf
|
||||
conf_file=$APACHE_DEBUG_ROOT/conf/httpd.conf
|
||||
if [ -e $conf_file ]; then
|
||||
if grep -q "^Listen $APACHE_DEBUG_PORT\$" $conf_file; then
|
||||
echo $conf_file is set up to listen on the port $APACHE_DEBUG_PORT.
|
||||
else
|
||||
echo $conf_file is not set up to listen on port $APACHE_DEBUG_PORT
|
||||
echo please remedy
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if grep -q "LoadModule pagespeed_module" $conf_file; then
|
||||
echo mod_pagespeed is already loaded in config file $conf_file
|
||||
else
|
||||
echo adding mod_pagespeed into apache config file $conf_file
|
||||
cat $SRC_TREE/install/common/pagespeed.load.template | \
|
||||
sed s#@@APACHE_MODULEDIR@@#$APACHE_DEBUG_ROOT/modules# | \
|
||||
sed s#@@COMMENT_OUT_DEFLATE@@## >> $conf_file
|
||||
echo Include $APACHE_DEBUG_ROOT/conf/pagespeed.conf >> $conf_file
|
||||
fi
|
||||
|
||||
# Now hack the file to also load mod_h2.
|
||||
MOD_H2=$APACHE_DEBUG_ROOT/modules/mod_http2.so
|
||||
if [ -f $MOD_H2 ]; then
|
||||
if grep -q "LoadModule http2_module" $conf_file; then
|
||||
echo http2_module is already loaded in config file $conf_file
|
||||
else
|
||||
echo adding http2_module into apache config file $conf_file
|
||||
cat $conf_file | sed -e '/pagespeed.conf/i\
|
||||
\
|
||||
# Load mod_http2 to test mod_pagespeed integration. This is done before\
|
||||
# pagespeed.conf so it can detect it.\
|
||||
LoadModule http2_module '$MOD_H2'\
|
||||
Protocols h2 http/1.1 \
|
||||
Protocols h2c http/1.1\
|
||||
' > $conf_file.sp
|
||||
mv $conf_file.sp $conf_file
|
||||
fi
|
||||
else
|
||||
echo "No mod_http2 in $APACHE_DEBUG_ROOT/modules, so not loading"
|
||||
fi
|
||||
|
||||
# pagespeed_libraries.conf was added later, so check for it separately.
|
||||
libraries_conf_file="$APACHE_DEBUG_ROOT/conf/pagespeed_libraries.conf"
|
||||
if grep -q "Include $libraries_conf_file" $conf_file; then
|
||||
echo pagespeed_libraries.conf is already loaded by $conf_file
|
||||
else
|
||||
echo adding pagespeed_libraries.conf include to $conf_file
|
||||
cp -f $SRC_TREE/net/instaweb/genfiles/conf/pagespeed_libraries.conf \
|
||||
$libraries_conf_file
|
||||
echo Include $libraries_conf_file >> $conf_file
|
||||
fi
|
||||
else
|
||||
echo "$conf_file does not exist. Consider updating devel/Makefile and/or"
|
||||
echo "devel/apache_install.sh"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
@@ -1,92 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2011 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Rotate the logs in the apache logs directory specified on the command line,
|
||||
# and gzip them, then erase old logs if disk usage is over 85%. Note that
|
||||
# apache must be stopped when we do this. Note also that we take pains not to
|
||||
# erase the newly-rotated logs, as those are the ones we are likely to care
|
||||
# deeply about. This may mean the disk stays full, but logs are pretty
|
||||
# compressible so it's unlikely.
|
||||
set -e
|
||||
now() {
|
||||
date '+%Y%m%d-%H%M'
|
||||
}
|
||||
stamp=$(now)
|
||||
if [ $# -ne 1 -o ! -d "$1" ]; then
|
||||
echo "Usage: apache_rotate_logs.sh logs_directory" >&2
|
||||
exit 1
|
||||
fi
|
||||
cd "$1"
|
||||
if [ -f "error_log.gz" ]; then
|
||||
# Clean up after partial log rotation
|
||||
echo "Cleaning up error_log.gz"
|
||||
mv error_log.gz error_log.$stamp.gz
|
||||
cleaned_up=true
|
||||
fi
|
||||
if [ -f "access_log.gz" ]; then
|
||||
# Clean up after partial log rotation
|
||||
echo "Cleaning up access_log.gz"
|
||||
mv access_log.gz access_log.$stamp.gz
|
||||
cleaned_up=true
|
||||
fi
|
||||
if [ ! -f "error_log" -a ! -f "access_log" ]; then
|
||||
# No logs to rotate.
|
||||
echo "No new logs to rotate"
|
||||
else
|
||||
# gzip can be kind of slow, so parallelize.
|
||||
# But gzip well, as this stuff eats a ton of space.
|
||||
if [ -f "error_log" ]; then
|
||||
echo "Gzipping error_log"
|
||||
gzip -9 error_log &
|
||||
fi
|
||||
if [ -f "access_log" ]; then
|
||||
echo "Gzipping access_log"
|
||||
gzip -9 access_log
|
||||
fi
|
||||
wait
|
||||
if [ ! -z "$cleaned_up" ]; then
|
||||
# If we used stamp, create a fresh one (effectively spin)
|
||||
old_stamp=stamp
|
||||
stamp=$(now)
|
||||
while [ "$stamp" == "$old_stamp" ]; do
|
||||
sleep 1
|
||||
stamp=$(now)
|
||||
done
|
||||
fi
|
||||
# Now timestamp the just-compressed logs.
|
||||
if [ -f "error_log.gz" ]; then
|
||||
echo "Timestamping error_log"
|
||||
mv error_log.gz error_log.$stamp.gz
|
||||
fi
|
||||
if [ -f "access_log.gz" ]; then
|
||||
echo "Timestamping access_log"
|
||||
mv access_log.gz access_log.$stamp.gz
|
||||
fi
|
||||
fi
|
||||
# Clean up old logs if the disk is getting full (>85%).
|
||||
df_percent() {
|
||||
df . --output=pcent | egrep -o '[0-9]+'
|
||||
}
|
||||
if [ $(df_percent) -ge 85 ]; then
|
||||
echo "Cleaning required."
|
||||
for log in $(/bin/ls -1tr *_log.[0-9]* | head -n -2); do
|
||||
echo "Cleaning $log"
|
||||
rm $log
|
||||
if [ $(df_percent) -lt 85 ]; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
@@ -1,96 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Note: you might need to type your password a few times, once early, and once
|
||||
# at the end.
|
||||
#
|
||||
# This should be run on a release branch to make sure we can make a tarball and
|
||||
# at least build it on our workstations. It will also copy the tarball into
|
||||
# ~/release (where the binaries usually go).
|
||||
#
|
||||
# Like most of our dev tools this assumes Ubuntu 14 LTS. If that isn't what you
|
||||
# have, it's probably easiest to run this in a VM.
|
||||
#
|
||||
# Note that if this fails you may need to tweak the file list inside
|
||||
# devel/create_distro_tarball.sh
|
||||
|
||||
set -e # exit script if any command returns an error
|
||||
set -u # exit the script if any variable is uninitialized
|
||||
|
||||
function usage {
|
||||
echo "Usage: devel/build_release_tarball.sh <beta|stable>"
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ $# -ne 1 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
if [ ! -d net/instaweb ]; then
|
||||
echo "This script must be run from the root of the mps checkout."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
source net/instaweb/public/VERSION
|
||||
RELEASE="$MAJOR.$MINOR.$BUILD.$PATCH"
|
||||
CHANNEL="$1"
|
||||
|
||||
deps="libpng12-dev libicu-dev libssl-dev libjpeg-dev realpath build-essential
|
||||
pkg-config gperf unzip libapr1-dev libaprutil1-dev apache2-dev"
|
||||
if dpkg-query -Wf '${Status}\n' $deps 2>&1 | \
|
||||
grep -v "install ok installed"; then
|
||||
# Only run apt-get install if one of the deps is not already installed.
|
||||
# See: http://stackoverflow.com/questions/1298066
|
||||
sudo apt-get install $deps
|
||||
fi
|
||||
|
||||
RELEASE_DIR="$HOME/release/$RELEASE"
|
||||
mkdir -p "$RELEASE_DIR"
|
||||
REVISION="$(build/lastchange.sh "$PWD" | sed 's/LASTCHANGE=//')"
|
||||
TARBALL="$RELEASE_DIR/mod-pagespeed-$CHANNEL-$RELEASE-r$REVISION.tar.bz2"
|
||||
devel/create_distro_tarball.sh "$TARBALL"
|
||||
|
||||
echo "Tarball should now be at $TARBALL"
|
||||
|
||||
# Try to build it
|
||||
BUILD_DIR="$(mktemp -d)"
|
||||
echo "Doing a test build inside $BUILD_DIR"
|
||||
cd "$BUILD_DIR"
|
||||
|
||||
if openssl version | grep "^OpenSSL 1[.]0[.][01]\|^OpenSSL 0[.]"; then
|
||||
echo "Your openssl version is too old to build the tarball; we need 1.0.2+"
|
||||
echo "Building 1.0.2 from source..."
|
||||
OPENSSL_VERSION="1.0.2j"
|
||||
wget "https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz"
|
||||
tar -xzvf "openssl-${OPENSSL_VERSION}.tar.gz"
|
||||
cd openssl-"${OPENSSL_VERSION}"
|
||||
./config --prefix="$BUILD_DIR" shared
|
||||
make
|
||||
make install
|
||||
export SSL_CERT_DIR=/etc/ssl/certs
|
||||
export PKG_CONFIG_PATH="$BUILD_DIR/lib/pkgconfig"
|
||||
export LD_LIBRARY_PATH="$BUILD_DIR/lib"
|
||||
cd "$BUILD_DIR"
|
||||
fi
|
||||
|
||||
tar xjf "$TARBALL"
|
||||
cd modpagespeed*
|
||||
./generate.sh -Dsystem_include_path_apr=/usr/include/apr-1.0/ \
|
||||
-Dsystem_include_path_httpd=/usr/include/apache2
|
||||
cd src
|
||||
make -j6
|
||||
out/Debug/mod_pagespeed_test
|
||||
# These tests fail because they are golded against a specific version of
|
||||
# compression libraries.
|
||||
# TODO(sligocki): Could we change the tests to be less fragile or test in a
|
||||
# different way in this case?
|
||||
BROKEN_TESTS=\
|
||||
ImageConverterTest.OptimizePngOrConvertToJpeg:\
|
||||
ImageConverterTest.ConvertOpaqueGifToJpeg:\
|
||||
JpegOptimizerTest.ValidJpegsLossy:\
|
||||
JpegOptimizerTest.ValidJpegLossyAndColorSampling:\
|
||||
JpegOptimizerTest.ValidJpegsProgressiveAndLossy
|
||||
out/Debug/pagespeed_automatic_test --gtest_filter=-$BROKEN_TESTS
|
||||
|
||||
echo "Cleaning up"
|
||||
rm -rf "$BUILD_DIR"
|
||||
|
||||
@@ -1,229 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -u # exit the script if any variable is uninitialized
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
cd "$this_dir/.."
|
||||
src="$PWD"
|
||||
|
||||
if [ ! -d install ]; then
|
||||
echo "Expected to see install."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
source "$src/install/shell_utils.sh" || exit 1
|
||||
|
||||
# In order to set up the pagespeed.conf file correctly for external cache
|
||||
# tests, we must have a special env variable established. The easiest way
|
||||
# to do that is to re-run this script under 'run_program_with_<cache-type>.sh',
|
||||
# which executes a command and then brings down external cache server.
|
||||
#
|
||||
# So when we run this script, if we don't already have our external cache
|
||||
# configured, we just re-run the script under run_program_with_*.sh. That will
|
||||
# establish a single external cache server for all the unit tests and system
|
||||
# tests.
|
||||
|
||||
# One weird trick for testing a variable for whether it is set, without
|
||||
# triggering an error due to "set -u" above:
|
||||
# http://stackoverflow.com/questions/3601515/how-to-check-if-a-variable-is-set-in-bash
|
||||
if [[ -z ${MEMCACHED_PORT+x} ]]; then
|
||||
exec "$src/install/run_program_with_memcached.sh" "$0" "$@"
|
||||
fi
|
||||
if [[ -z ${REDIS_PORT+x} ]]; then
|
||||
exec "$src/install/run_program_with_redis.sh" "$0" "$@"
|
||||
fi
|
||||
|
||||
valgrind="/usr/bin/valgrind"
|
||||
if [ ! -e $valgrind ]; then
|
||||
echo "***" You must install the system valgrind into $valgrind
|
||||
echo sudo apt-get install valgrind
|
||||
exit 1
|
||||
fi
|
||||
|
||||
install_log_file=/tmp/install.log.$$
|
||||
valgrind_test_out=/tmp/valgrind.test.out.$$
|
||||
valgrind_httpd_out=/tmp/valgrind.httpd.out.$$
|
||||
system_test_log=/tmp/system_test.log.$$
|
||||
apache_test_log=/tmp/apache_test.log.$$
|
||||
exit_status=0
|
||||
failures=""
|
||||
run_unit_tests=1
|
||||
OPTIONS="${OPTIONS:-""} VALGRIND_TEST=1"
|
||||
|
||||
if [ $1 == "--no_unit_tests" ]; then
|
||||
run_unit_tests="0"
|
||||
shift
|
||||
fi
|
||||
apache_debug_root=$1
|
||||
shift
|
||||
server=$1
|
||||
shift
|
||||
|
||||
function record_error() {
|
||||
exit_status=1
|
||||
failures="$failures $@"
|
||||
echo FAIL: $@
|
||||
}
|
||||
|
||||
function check_valgrind_log_for_problems() {
|
||||
# TODO(jmaessen): Consider checking 'Use of uninitialized', but
|
||||
# that will require image library exclusions.
|
||||
grep 'Invalid .* of size' $2 && \
|
||||
record_error "$1 contains invalid memory operations."
|
||||
grep 'definitely lost: [1-9][0-9,]* bytes in [1-9][0-9,]* blocks' $2 && \
|
||||
record_error "$1 Directly lost bytes"
|
||||
grep "indirectly lost: [1-9][0-9,]* bytes in [1-9][0-9,]* blocks" $2 && \
|
||||
record_error "$1 Indirectly Lost bytes"
|
||||
}
|
||||
|
||||
# Because valgrind is so slow, we want to run it under tee. But tee swallows
|
||||
# the exit status code, so we can get false positives. Instead we ignore the
|
||||
# exit status from tee and search for a pattern in the log that indicates an
|
||||
# error.
|
||||
#
|
||||
# $1 = the program to be run
|
||||
# $2 = the log file
|
||||
# $3 = the error-pattern to search for in the log.
|
||||
function run_and_grep_for_error {
|
||||
echo `date`: Running "$1 in $PWD ..."
|
||||
$1 2>&1 | tee $2
|
||||
grep "$3" $2 >/dev/null
|
||||
if [ $? -eq 0 ]; then
|
||||
record_error "$1 failed: \"$3\" found in $2"
|
||||
fi
|
||||
}
|
||||
|
||||
SUPPRESSIONS="$src/devel/valgrind_suppressions.txt"
|
||||
|
||||
function check_unit_test_for_leaks() {
|
||||
exe=$1
|
||||
shards=$2
|
||||
echo `date`: Running $exe with $valgrind, log to $valgrind_test_out.$exe
|
||||
cd $src
|
||||
# For the unit tests only we use --child-silent-after-fork so that
|
||||
# cross-process communication tests don't trigger false leak warnings
|
||||
# at exit of kids they fork.
|
||||
|
||||
# If one sets envvars GTEST_TOTAL_SHARDS as well as GTEST_SHARD_INDEX
|
||||
# with 0 <= GTEST_SHARD_INDEX < GTEST_TOTAL_SHARDS, gtest will only
|
||||
# execute a portion of tests in a given process, letting us to parallelize
|
||||
# unit test execution.
|
||||
export GTEST_TOTAL_SHARDS=$shards
|
||||
last_shard=$((GTEST_TOTAL_SHARDS - 1))
|
||||
LOGS=
|
||||
for i in $(seq 0 $last_shard); do
|
||||
export GTEST_SHARD_INDEX=$i
|
||||
LOG=$valgrind_test_out.$exe.$i
|
||||
LOGS="$LOGS $LOG"
|
||||
|
||||
echo $valgrind --leak-check=full \
|
||||
--suppressions=$SUPPRESSIONS \
|
||||
--read-var-info=yes --num-callers=20 --child-silent-after-fork=yes \
|
||||
./out/Debug/$exe "2>&1" "|" tee $LOG "&"
|
||||
$valgrind --leak-check=full --suppressions=$SUPPRESSIONS \
|
||||
--read-var-info=yes --num-callers=20 --child-silent-after-fork=yes \
|
||||
./out/Debug/$exe 2>&1 | tee $LOG &
|
||||
done
|
||||
wait
|
||||
|
||||
run_and_grep_for_error "cat $LOGS" $valgrind_test_out.$exe '^\[ FAILED \] '
|
||||
check_valgrind_log_for_problems $exe $valgrind_test_out.$exe
|
||||
}
|
||||
|
||||
# Checks the system-tests using a pagespeed.conf configuration. An
|
||||
# argument must be supplied that will be used as a suffix for log files.
|
||||
function check_system_test_for_leaks() {
|
||||
suffix=$1
|
||||
|
||||
local options="HTTPS_TEST=0 $OPTIONS"
|
||||
if [[ $suffix == "memcached" ]]; then
|
||||
options+=" MEMCACHED_TEST=1"
|
||||
elif [[ $suffix == "redis" ]]; then
|
||||
options+=" REDIS_TEST=1"
|
||||
fi
|
||||
|
||||
echo make apache_debug_install apache_install_conf \
|
||||
OPTIONS="$options" '>&' "$install_log_file"
|
||||
make apache_debug_install apache_install_conf \
|
||||
OPTIONS="$options" >& $install_log_file
|
||||
|
||||
outfile=$valgrind_httpd_out.$suffix
|
||||
echo `date`: Running httpd $valgrind with output spewed to $outfile
|
||||
ps auxww | grep httpd
|
||||
|
||||
echo $valgrind --gen-suppressions=all --leak-check=full --trace-children=yes \
|
||||
--suppressions=$SUPPRESSIONS \
|
||||
$apache_debug_root/bin/httpd --enable-pool-debug \
|
||||
">&" $outfile "&"
|
||||
$valgrind --gen-suppressions=all --leak-check=full --trace-children=yes \
|
||||
--suppressions=$SUPPRESSIONS \
|
||||
$apache_debug_root/bin/httpd --enable-pool-debug \
|
||||
>& $outfile &
|
||||
|
||||
local apache_timeout=30
|
||||
echo -n Waiting up to "$apache_timeout" seconds for valgrind/httpd \
|
||||
to start listening
|
||||
if ! wait_cmd_with_timeout "$apache_timeout" \
|
||||
wget -q --timeout=2 -O/dev/null "http://$server"
|
||||
then
|
||||
record_error apache/valgrind did not start after "$apache_timeout" seconds.
|
||||
return 1
|
||||
fi
|
||||
|
||||
run_and_grep_for_error "$src/pagespeed/apache/system_test.sh \
|
||||
$server" $apache_test_log '^\FAIL\.'
|
||||
|
||||
$apache_debug_root/bin/apachectl graceful-stop
|
||||
|
||||
echo -n Waiting for httpd to actually exit...
|
||||
while [ -f $apache_debug_root/logs/httpd.pid ]; do sleep 1; done
|
||||
echo done.
|
||||
|
||||
echo `date`: Waiting for $valgrind to finish spewing to $outfile
|
||||
wait
|
||||
check_valgrind_log_for_problems httpd $outfile
|
||||
echo `date`: Cleaning up
|
||||
|
||||
tail -12 $outfile | tee $outfile.tail
|
||||
}
|
||||
|
||||
set +u
|
||||
PAGESPEED_TEST_HOST=${PAGESPEED_TEST_HOST:-selfsigned.modpagespeed.com}
|
||||
export PAGESPEED_TEST_HOST
|
||||
set -u
|
||||
|
||||
$apache_debug_root/bin/apachectl graceful-stop
|
||||
if [ $? -ne 0 ]; then
|
||||
record_error restart failed: please see log $install_log_file
|
||||
else
|
||||
if [ $run_unit_tests = "1" ]; then
|
||||
# We use 1 shard here for convenience of memcached setup, since this
|
||||
# test is quick.
|
||||
check_unit_test_for_leaks mod_pagespeed_test 1
|
||||
|
||||
# 3 shards here in hope of using the idle CPU while not overwhelming things
|
||||
# if other things (e.g. checkin.blaze) are parallel to us.
|
||||
check_unit_test_for_leaks pagespeed_automatic_test 3
|
||||
fi
|
||||
|
||||
cd "$src/devel"
|
||||
|
||||
echo Running system tests using a file cache...
|
||||
check_system_test_for_leaks file_cache
|
||||
|
||||
echo Running system tests using memcached...
|
||||
check_system_test_for_leaks memcached
|
||||
|
||||
echo Running system tests using redis...
|
||||
check_system_test_for_leaks redis
|
||||
fi
|
||||
|
||||
if [ $exit_status -eq 0 ]; then
|
||||
echo PASS
|
||||
else
|
||||
echo FAIL: $failures
|
||||
fi
|
||||
echo NOTE: there can be multiple LEAK SUMMARY entries in Valgrid log, which
|
||||
echo can be located in different parts of log, not necessarily in the end.
|
||||
|
||||
exit $exit_status
|
||||
@@ -1,18 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Scans source directories for _test.cc files to find ones that aren't mentioned
|
||||
# in the test gyp file. On success produces no output, otherwise prints the
|
||||
# names of the unreferenced _test.cc files.
|
||||
|
||||
set -u
|
||||
set -e
|
||||
|
||||
this_dir="$(dirname "${BASH_SOURCE[0]}")"
|
||||
cd "$this_dir/.."
|
||||
|
||||
test_gyp="net/instaweb/test.gyp"
|
||||
find net pagespeed -name *_test.cc | while read test_path; do
|
||||
if ! grep -q "$(basename "$test_path")" "$test_gyp"; then
|
||||
echo "$test_path ($(basename "$test_path")) missing from $test_gyp"
|
||||
fi
|
||||
done
|
||||
-136
@@ -1,136 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Run the checkin tests for mod_pagespeed. These help ensure the build won't
|
||||
# break.
|
||||
#
|
||||
# Extra parmeters are passed to make. For example, pass V=1 to get verbose
|
||||
# gyp builds.
|
||||
#
|
||||
# We don't clean by default in checkin tests. That makes checkin tests take way
|
||||
# too long, especially when re-running after a failure. This adds risk because
|
||||
# someone might break the build by checking in a dependence on a file that
|
||||
# cannot be regenerated, but that risk seems low compared with the cost of total
|
||||
# rebuilds.
|
||||
|
||||
echo Starting tests at time `date`
|
||||
|
||||
# Don't leave processes hanging around on exit. "jobs -p" gives all background
|
||||
# processes.
|
||||
trap 'kill $(jobs -p)' SIGINT SIGTERM EXIT
|
||||
|
||||
APACHE_DEBUG_ROOT="$HOME/apache2"
|
||||
|
||||
FAIL=
|
||||
if [ ! -d "$APACHE_DEBUG_ROOT" ]; then
|
||||
echo "You must install a local Apache before running checkin tests, e.g."
|
||||
echo " install/build_development_apache.sh 2.2 prefork"
|
||||
FAIL=true
|
||||
fi
|
||||
|
||||
required_binaries="autoconf g++ gperf libtool valgrind memcached redis-server"
|
||||
missing=""
|
||||
for bin in $required_binaries; do
|
||||
which $bin >/dev/null || missing="$missing $bin"
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo You are missing required packages $missing. Type:
|
||||
echo sudo apt-get install $missing
|
||||
FAIL=true
|
||||
fi
|
||||
|
||||
if ! locale -a | grep -q tr_TR.utf8; then
|
||||
echo "You are missing language-pack-tr-base. Type:"
|
||||
echo "sudo apt-get install language-pack-tr-base"
|
||||
FAIL=true
|
||||
fi
|
||||
|
||||
if [ ! -f /usr/bin/php5-cgi ]; then
|
||||
echo "You are missing php5. Type:"
|
||||
echo "sudo apt-get install php5-cgi"
|
||||
FAIL=true
|
||||
fi
|
||||
|
||||
if [ ! -f $APACHE_DEBUG_ROOT/modules/mod_fcgid-src_build.so ]; then
|
||||
echo "You are missing source build of fcgid. Please re-run "
|
||||
echo "build_development_apache.sh"
|
||||
FAIL=true
|
||||
fi
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
cd "$this_dir"
|
||||
|
||||
tests_missing=$(./check_tests_are_run.sh)
|
||||
if [ ! -z "$tests_missing" ]; then
|
||||
echo "$tests_missing"
|
||||
FAIL=true
|
||||
fi
|
||||
|
||||
if [ -n "$FAIL" ]; then
|
||||
echo "***" Please correct above errors and try again.
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export OBJDIR=/tmp/instaweb.$$
|
||||
make_log=/tmp/checkin.make.$$
|
||||
blaze_log=/tmp/checkin.blaze.$$
|
||||
|
||||
function kill_subprocesses() {
|
||||
echo "^C caught by $0, killing jobs..."
|
||||
kill -INT $(jobs -p)
|
||||
|
||||
# Make is resilient to kills, partially due to our recursive make calls.
|
||||
# Also we need to be wary of other 'make' processes on the system for
|
||||
# different clients, but we always use OBJDIR=$OBJDIR on the command-line for
|
||||
# our recursive makes. So keep finding them and killing them until they are
|
||||
# all dead dead dead.
|
||||
|
||||
continue=1
|
||||
while [ $continue -eq 1 ];
|
||||
do
|
||||
# TODO(jmarantz): jmaessen suggests: How about
|
||||
# processes=$(ps auxw | awk "/[m]ake OBJDIR=$OBJDIR/{ print \$2 }")
|
||||
# This would be more silent. But I'm inclined to leave it noisy for
|
||||
# now until we are confident it's working well.
|
||||
ps auxw | grep "make OBJDIR=$OBJDIR" | grep -v grep
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
processes=$(ps auxw|grep "make OBJDIR=$OBJDIR"|awk '{ print $2 }')
|
||||
kill -TERM $processes
|
||||
sleep 5
|
||||
else
|
||||
# All done. Let the 'checkin' script itself exit.
|
||||
continue=0
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
trap '{ kill_subprocesses; exit 1; }' INT
|
||||
|
||||
echo "$this_dir/checkin.make $* &> $make_log &"
|
||||
rm -f "$make_log"
|
||||
touch "$make_log"
|
||||
echo $PWD
|
||||
# TODO(jefftk): combine checkin and checkin.make
|
||||
./checkin.make "$@" &> "$make_log" &
|
||||
make_pid=$!
|
||||
|
||||
# Show make's output as it runs...
|
||||
tail -f $make_log &
|
||||
|
||||
exit_status=0
|
||||
|
||||
# Wait for make to finish.
|
||||
wait $make_pid
|
||||
MAKE_STATUS="$?"
|
||||
|
||||
if [ "$MAKE_STATUS" = "0" ]; then
|
||||
echo checkin.make Passed.
|
||||
rm "$make_log"
|
||||
else
|
||||
echo "*** checkin.make failed: check $make_log for details. Last 4 lines:"
|
||||
tail -n 4 "$make_log"
|
||||
exit_status=1
|
||||
fi
|
||||
|
||||
echo Exiting checkin at "$(date)" with status "$exit_status"
|
||||
exit $exit_status
|
||||
@@ -1,79 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This script is intended to be run from 'checkin'. It runs a series of tests,
|
||||
# noting which ones failed, and allowing re-running only the failed ones if
|
||||
# needed.
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
cd "$this_dir"
|
||||
|
||||
# When a single system test fails, keep running until the end of the test
|
||||
# script, and then print out all failing tests. While this isn't a better flow
|
||||
# for interactive use for all users, for system tests it allows you to see the
|
||||
# full list of system tests that failed so you can iterate on them or test them
|
||||
# for flakiness.
|
||||
export CONTINUE_AFTER_FAILURE=true
|
||||
|
||||
source checkin_test_helpers.sh
|
||||
|
||||
export OBJDIR=${OBJDIR:-/tmp/instaweb.$$}
|
||||
make_args_array=($MAKE_ARGS)
|
||||
mkdir -p "$OBJDIR"
|
||||
|
||||
failed_tests=""
|
||||
prep_failures=""
|
||||
for prep in $(make echo_checkin_prep); do
|
||||
run_noisy_command_showing_log "$OBJDIR"/"$prep".log "$prep" \
|
||||
make "${make_args_array[@]}" "$prep"
|
||||
if [ "$?" -ne "0" ]; then
|
||||
prep_failures+=" $prep"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ! -z "$prep_failures" ]; then
|
||||
echo checkin_prep failed: "$prep_failures"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$#" -eq 0 ]; then
|
||||
tests=( \
|
||||
apache_test \
|
||||
apache_release_test \
|
||||
apache_system_tests \
|
||||
pagespeed_automatic_smoke_test \
|
||||
)
|
||||
else
|
||||
tests=("$@")
|
||||
fi
|
||||
|
||||
for testname in "${tests[@]}"; do
|
||||
is_system_test=$(echo "$testname" | grep -c system_test)
|
||||
if [ "$is_system_test" = 1 ]; then
|
||||
SERVER="Apache"
|
||||
LOCKFILE="$APACHE_LOCKFILE"
|
||||
echo -n Waiting for "$SERVER" lock "$LOCKFILE" ...
|
||||
acquire_lock "$SERVER" "$LOCKFILE"
|
||||
print_elapsed_time
|
||||
echo ""
|
||||
fi
|
||||
run_noisy_command_showing_log "$OBJDIR/${testname}.log" "$testname" \
|
||||
make "${make_args_array[@]}" "$testname"
|
||||
if [ "$?" -ne "0" ]; then
|
||||
failed_tests+=" $testname"
|
||||
fi
|
||||
if [ "$is_system_test" = 1 ]; then
|
||||
run_noisy_command_showing_log "$OBJDIR/apache_install_conf.log" \
|
||||
"Returning Apache config to a consistent state." \
|
||||
make "${make_args_array[@]}" apache_install_conf
|
||||
release_lock "$SERVER" "$LOCKFILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -z "$failed_tests" ]; then
|
||||
echo "All 'make' tests passed."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo Failing tests: "$failed_tests"
|
||||
echo Re-run with devel/checkin.make "$failed_tests"
|
||||
exit 1
|
||||
@@ -1,144 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2011 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Author: sligocki@google.com (Shawn Ligocki)
|
||||
#
|
||||
# Helper functions for holding locks so that checkin tests from two clients
|
||||
# can be run simultaneously, and printing status on long-running commands.
|
||||
#
|
||||
# Sourced from checkin.make.
|
||||
|
||||
readonly APACHE_LOCKFILE="/tmp/pagespeed-apache.lock"
|
||||
|
||||
function acquire_lock {
|
||||
local server=$1
|
||||
local lockfile=$2
|
||||
|
||||
local lockfile_tmp="$lockfile.$$"
|
||||
local printed_msg=0
|
||||
|
||||
echo $$ > "$lockfile_tmp"
|
||||
# ln will fail if $lockfile exists, making this an atomic test and set.
|
||||
# Note that this is a hard link (ln), not a symlink (ln -s).
|
||||
while ! ln "$lockfile_tmp" "$lockfile" 2>/dev/null; do
|
||||
local lock_pid=$(cat "$lockfile" 2>/dev/null)
|
||||
if [ "$lock_pid" = "$$" ]; then
|
||||
## We already have the lock, apparently!
|
||||
break
|
||||
fi
|
||||
|
||||
if [ -n "$lock_pid" ] && ! ps "$lock_pid" >/dev/null; then
|
||||
echo "Removing stale lock. Process PID=$lock_pid, no longer exists."
|
||||
rm "$lockfile"
|
||||
else
|
||||
if [ "$printed_msg" = 0 ]; then
|
||||
echo -n "Waiting for PID $lock_pid to give up the $server lock."
|
||||
printed_msg=1
|
||||
else
|
||||
echo -n '.'
|
||||
fi
|
||||
sleep 1
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$printed_msg" != 0 ]; then
|
||||
echo
|
||||
fi
|
||||
rm -f "$lockfile_tmp"
|
||||
}
|
||||
|
||||
function release_lock {
|
||||
SERVER=$1
|
||||
LOCKFILE=$2
|
||||
|
||||
echo "Unlocking $SERVER."
|
||||
rm "$LOCKFILE"
|
||||
}
|
||||
exit_status=0
|
||||
|
||||
# Returns the unix system time in seconds.
|
||||
function now_sec() {
|
||||
date +%s
|
||||
}
|
||||
|
||||
start_time_sec=$(now_sec)
|
||||
|
||||
# Prints the elapsed time, in seconds, since the last time print_elapsed_time
|
||||
# was called. Any arguments to this function will be passed to through as the
|
||||
# first args to echo. The intent is you can put
|
||||
# print_elapsed_time -n
|
||||
# to allow callers to print more stuff on the same line.
|
||||
function print_elapsed_time() {
|
||||
current_time_sec=$(now_sec)
|
||||
if [ "$previous_time_sec" != 0 ]; then
|
||||
echo -n : "$((current_time_sec - start_time_sec))" sec
|
||||
fi
|
||||
}
|
||||
|
||||
# Determines whether the passed-in PID is alive.
|
||||
function is_process_alive() {
|
||||
ps "$1" > /dev/null
|
||||
}
|
||||
|
||||
# Runs command, redirecting stdout+stderr to a logfile, which is specified as
|
||||
# the first argument. The second argument is a string to put in the status
|
||||
# messsage. This might be all or part of the actual command, or something
|
||||
# descriptive. The rest of the arguments are the command.
|
||||
#
|
||||
# The full command will be added as the first line of the logfile.
|
||||
#
|
||||
# This function blocks until the command finishes, but it prints out status
|
||||
# lines at increasing intervals, with the max interval being 60 seconds. Once
|
||||
# the 60-second threshold is reached, each status line is emitted with a
|
||||
# newline. This is so that two long-running commands running in parallel
|
||||
# don't completely overwrite each other's status.
|
||||
#
|
||||
# The global variable 'exit_status' is set to 0 if the command succeeds, 1 if
|
||||
# it fails.
|
||||
function run_noisy_command_showing_log() {
|
||||
logfile="$1"
|
||||
shift
|
||||
description="$1"
|
||||
shift
|
||||
|
||||
start_time_sec=$(now_sec)
|
||||
previous_time_sec=$start_time_sec
|
||||
echo "$@" "&>" "$logfile" "..."
|
||||
("$@" ; echo exit_status=$?) &> "$logfile" &
|
||||
pid=$!
|
||||
print_interval_sec=60
|
||||
while is_process_alive $pid; do
|
||||
sleep 1
|
||||
current_sec=$(now_sec)
|
||||
interval_sec=$((current_sec - previous_time_sec))
|
||||
if [ $interval_sec -ge $print_interval_sec ]; then
|
||||
previous_time_sec=$current_sec
|
||||
lines_in_logfile=$(wc -l < "$logfile")
|
||||
echo " ... $description: $lines_in_logfile lines$(print_elapsed_time)"
|
||||
fi
|
||||
done
|
||||
if [ "$(tail -n 1 "$logfile")" = "exit_status=0" ]; then
|
||||
echo -n "PASS"
|
||||
local exit_status=0
|
||||
else
|
||||
echo -n "FAIL"
|
||||
local exit_status=1
|
||||
fi
|
||||
print_elapsed_time
|
||||
# shellcheck disable=SC2145
|
||||
echo " ($@ >& $logfile)"
|
||||
return $exit_status
|
||||
}
|
||||
@@ -1,195 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2011 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Author: morlovich@google.com (Maksim Orlovich)
|
||||
#
|
||||
# The usual mechanism used to develop mod_pagespeed and build binaries is based
|
||||
# on merging all dependencies into a single source tree. This script enables a
|
||||
# standard untar/configure/make flow that does not bundle widely available
|
||||
# external libraries. It generates the tarball including the configure (or
|
||||
# rather generate.sh) script.
|
||||
#
|
||||
# If --minimal is passed, it will cut out even more things. This was meant
|
||||
# for packaging properly Debian, which has a particularly extensive package
|
||||
# repository. At the moment this configuration requires further patching of
|
||||
# the .gyp[i] files and doesn't work out of the box. The pruning was also done
|
||||
# as of branch 33, so further tweaks might be required for this mode in
|
||||
# 34 or newer.
|
||||
#
|
||||
# This is expected to be run from build_release_tarball.sh, on the branch you
|
||||
# want a tarball for.
|
||||
|
||||
set -e # exit script if any command returns an error
|
||||
set -u # exit the script if any variable is uninitialized
|
||||
|
||||
function usage {
|
||||
echo "create_distro_tarball_debian.sh [ --minimal ] tarball"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# This outputs a little wrapper around gyp that calls it with appropriate -D
|
||||
# flag
|
||||
function config {
|
||||
cat <<SCRIPT_END
|
||||
#!/bin/sh
|
||||
#
|
||||
# This script uses gyp to generate Makefiles for mod_pagespeed built against
|
||||
# the following system libraries:
|
||||
# apr, aprutil, apache httpd headers, icu, libjpeg_turbo, libpng, zlib.
|
||||
#
|
||||
# Besides the -D use_system_libs=1 below, you may need to set (via -D var=value)
|
||||
# paths for some of these libraries via these variables:
|
||||
# system_include_path_httpd, system_include_path_apr,
|
||||
# system_include_path_aprutil.
|
||||
#
|
||||
# for example, you might run
|
||||
# ./generate.sh -Dsystem_include_path_apr=/usr/include/apr-1 \\
|
||||
# -Dsystem_include_path_httpd=/usr/include/httpd
|
||||
# to specify APR and Apache include directories.
|
||||
#
|
||||
# Also, BUILDTYPE=Release can be passed to make (the default is Debug).
|
||||
echo "Generating src/Makefile"
|
||||
src/build/gyp_chromium -D use_system_libs=1 \$*
|
||||
SCRIPT_END
|
||||
}
|
||||
|
||||
if [ $# -lt 1 ]; then
|
||||
usage
|
||||
exit
|
||||
fi
|
||||
|
||||
MINIMAL=0
|
||||
if [ "$1" == "--minimal" ]; then
|
||||
MINIMAL=1
|
||||
shift 1
|
||||
fi
|
||||
|
||||
TARBALL="$1"
|
||||
if [ -z "$TARBALL" ]; then
|
||||
usage
|
||||
fi
|
||||
touch "$TARBALL"
|
||||
TARBALL="$(realpath $TARBALL)"
|
||||
MPS_CHECKOUT="$PWD"
|
||||
|
||||
git submodule update --init --recursive
|
||||
|
||||
# Pick up our version info, and wrap src inside a modpagespeed-version dir.
|
||||
source net/instaweb/public/VERSION
|
||||
VER_STRING="$MAJOR.$MINOR.$BUILD.$PATCH"
|
||||
TEMP_DIR="$(mktemp -d)"
|
||||
WRAPPER_DIR="modpagespeed-$VER_STRING"
|
||||
mkdir "$TEMP_DIR/$WRAPPER_DIR"
|
||||
DIR="$WRAPPER_DIR/src"
|
||||
ln -s "$MPS_CHECKOUT" "$TEMP_DIR/$DIR"
|
||||
|
||||
# Also create a little helper script that shows how to run gyp
|
||||
config > "$TEMP_DIR/$WRAPPER_DIR/generate.sh"
|
||||
chmod +x "$TEMP_DIR/$WRAPPER_DIR/generate.sh"
|
||||
|
||||
# Normally, the build system runs build/lastchange.sh to figure out what
|
||||
# to put into the last portion of the version number. We are, however, going to
|
||||
# be getting rid of the .git dirs, so that will not work (nor would it without
|
||||
# network access). Luckily, we can provide the number via LASTCHANGE.in,
|
||||
# so we just compute it now, and save it there.
|
||||
./build/lastchange.sh "$MPS_CHECKOUT" > LASTCHANGE.in
|
||||
|
||||
# Things that depends on minimal or not.
|
||||
if [ $MINIMAL -eq 0 ]; then
|
||||
GTEST=$DIR/testing
|
||||
GFLAGS=$DIR/third_party/gflags
|
||||
GIFLIB=$DIR/third_party/giflib
|
||||
ICU="$DIR/third_party/icu/icu.gyp \
|
||||
$DIR/third_party/icu/source/common/unicode/"
|
||||
JSONCPP=$DIR/third_party/jsoncpp
|
||||
LIBWEBP=$DIR/third_party/libwebp
|
||||
PROTOBUF=$DIR/third_party/protobuf
|
||||
RE2=$DIR/third_party/re2
|
||||
else
|
||||
GTEST="$DIR/testing \
|
||||
--exclude $DIR/testing/gmock \
|
||||
--exclude $DIR/testing/gtest"
|
||||
GFLAGS=$DIR/third_party/gflags/gflags.gyp
|
||||
GIFLIB=$DIR/third_party/giflib/giflib.gyp
|
||||
ICU=$DIR/third_party/icu/icu.gyp
|
||||
JSONCPP=$DIR/third_party/jsoncpp/jsoncpp.gyp
|
||||
LIBWEBP="$DIR/third_party/libwebp/COPYING \
|
||||
$DIR/third_party/libwebp/examples/gif2webp_util.*"
|
||||
PROTOBUF="$DIR/third_party/protobuf/*.gyp \
|
||||
$DIR/third_party/protobuf/*.gypi"
|
||||
RE2=$DIR/third_party/re2/re2.gyp
|
||||
fi
|
||||
|
||||
# It's tarball time!
|
||||
# Note that this is highly-version specific, and requires tweaks for every
|
||||
# release as its dependencies change. Always run the version of this
|
||||
# script that's on the branch you're making a tarball for.
|
||||
cd "$TEMP_DIR"
|
||||
tar cj --dereference --exclude='.git' --exclude='.svn' --exclude='.hg' -f $TARBALL \
|
||||
--exclude='*.mk' --exclude='*.pyc' \
|
||||
--exclude=$DIR/net/instaweb/genfiles/*/*.cc \
|
||||
$WRAPPER_DIR/generate.sh \
|
||||
$DIR/LASTCHANGE.in \
|
||||
$DIR/base \
|
||||
$DIR/build \
|
||||
--exclude $DIR/build/android/arm-linux-androideabi-gold \
|
||||
$DIR/install \
|
||||
$DIR/net/instaweb \
|
||||
$DIR/pagespeed \
|
||||
$DIR/strings \
|
||||
$GTEST \
|
||||
$DIR/third_party/apr/apr.gyp \
|
||||
$DIR/third_party/aprutil/aprutil.gyp \
|
||||
$DIR/third_party/aprutil/apr_memcache2.h \
|
||||
$DIR/third_party/aprutil/apr_memcache2.c \
|
||||
$DIR/third_party/httpd/httpd.gyp \
|
||||
$DIR/third_party/httpd24/httpd24.gyp \
|
||||
$DIR/third_party/base64 \
|
||||
$DIR/third_party/brotli \
|
||||
$DIR/third_party/chromium/src/base \
|
||||
--exclude src/third_party/chromium/src/base/third_party/xdg_mime \
|
||||
--exclude src/third_party/chromium/src/base/third_party/xdg_user_dirs \
|
||||
$DIR/third_party/chromium/src/build \
|
||||
--exclude $DIR/third_party/chromium/src/build/android \
|
||||
$DIR/third_party/chromium/src/googleurl \
|
||||
$DIR/third_party/chromium/src/net/tools \
|
||||
$DIR/third_party/closure/ \
|
||||
$DIR/third_party/closure_library/ \
|
||||
$DIR/third_party/css_parser \
|
||||
$DIR/third_party/domain_registry_provider \
|
||||
$GFLAGS \
|
||||
$GIFLIB \
|
||||
$DIR/third_party/google-sparsehash \
|
||||
$DIR/third_party/grpc \
|
||||
$DIR/third_party/hiredis \
|
||||
$ICU \
|
||||
$JSONCPP \
|
||||
$DIR/third_party/libjpeg_turbo/libjpeg_turbo.gyp \
|
||||
$DIR/third_party/libpng/libpng.gyp \
|
||||
$LIBWEBP \
|
||||
$DIR/third_party/modp_b64 \
|
||||
$DIR/third_party/optipng \
|
||||
$PROTOBUF \
|
||||
$DIR/third_party/rdestl \
|
||||
$RE2 \
|
||||
$DIR/third_party/redis-crc \
|
||||
$DIR/third_party/serf \
|
||||
$DIR/third_party/zlib/zlib.gyp \
|
||||
$DIR/tools/gyp \
|
||||
$DIR/url
|
||||
|
||||
cd "$MPS_CHECKOUT"
|
||||
rm -r "$TEMP_DIR"
|
||||
@@ -1,60 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2003 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Runs a file in $1 through a giant sed script, transforming normal
|
||||
# C++ comments to Doxygen comments. The resultant file is placed in
|
||||
# $2/$1, so $2 must be a subdirectory.
|
||||
#
|
||||
# Usage: scripts/doxify.sh filename destination_directory
|
||||
|
||||
set -u # exit the script if any variable is uninitialized
|
||||
set -e # exit script if any command returns an error
|
||||
|
||||
filename=$1
|
||||
destination_directory=$2
|
||||
outfile=$destination_directory/$filename
|
||||
|
||||
mkdir -p "$(dirname $outfile)"
|
||||
|
||||
sed -r 's~/\*([^\*])~/\*\*\1~; # /* -> /** \
|
||||
s~///*~///~; # // -> /// \
|
||||
s~;[ ]*/\*\*([^<*]*)~; /\*\*<\1~; # /** -> /**< on right after code \
|
||||
s~;[ ]*///*([^<])~; ///<\1~; # /// -> ///< on right after code \
|
||||
s~,([ ]*)///*([^<])~,\1///<\2~; # /// -> ///< on right after enum \
|
||||
s~([[:alnum:]][ ]*)///*([^<])~\1///<\2~; # /// -> ///< on right after enum \
|
||||
s~DISALLOW_COPY_AND_ASSIGN\(.*\)\;~~; # /// -> ///< on right after code \
|
||||
s~(///) *---+([^-].+[^-]) *---+~\1\2~; # /// ---- Bla ---- -> /// Bla
|
||||
s~(///) *===+([^=].+[^=]) *===+~\1\2~; # /// ==== Bla ==== -> /// Bla
|
||||
s~(///) *\*\*\*+([^\*].+[^\*]) *\*\*\*+~\1\2~; # /// **** Bla **** -> /// Bla
|
||||
s~(///) *----*( *)~\1\2~; # /// -------- -> ///
|
||||
s~(///) *====*( *)~\1\2~; # /// ======== -> ///
|
||||
s~(///) *\*\*\*\**( *)~\1\2~; # /// ******** -> ///
|
||||
s~(///) *\* \* \*( \*)* *~\1~; # /// * * * * * -> ///
|
||||
s~(([^A-Z_])((TODO|FIXME)[^A-Z_].*))~\2 @todo \3~; # TODO* -> @todo TODO* \
|
||||
s~(([^A-Z_])((BUG)[^A-Z_].*))~\2 @bug \3~; # BUG* -> @bug BUG* \
|
||||
s~([ \t]*)ABSTRACT([ \t]*\;)~\1\=0\2~; # void f() ABSTRACT; -> void f() =0; \
|
||||
s~DECLARE_string(.*)~DECLARE_STRING\1~; # /// -> ///< on right after code \
|
||||
s~DECLARE_bool(.*)~DECLARE_BOOL\1~; # /// -> ///< on right after code \
|
||||
s~DECLARE_int32(.*)~DECLARE_INT32\1~; # /// -> ///< on right after code \
|
||||
s~DECLARE_uint32(.*)~DECLARE_UINT32\1~; # /// -> ///< on right after code \
|
||||
s~DECLARE_int64(.*)~DECLARE_INT64\1~; # /// -> ///< on right after code \
|
||||
s~DECLARE_uint64(.*)~DECLARE_UINT64\1~; # /// -> ///< on right after code \
|
||||
s~/// *(Copyright(.*))~// \1~; # clutter \
|
||||
s~/// *(All [rR]ights [rR]eserved(.*))~// \1~; # clutter \
|
||||
s~/// *(Date: (.*))~/// @file~; # clutter \
|
||||
s~/// *Author:(.*)~/// @file~; # /// Author -> /// @file \
|
||||
s~/// *Author(.*)~/// @file~; # /// Author -> /// @file ' \
|
||||
< $filename > $outfile
|
||||
@@ -1,85 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2010 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Processes the open-source header files using Doxygen. Each header
|
||||
# must be preprocessed using doxify.sh to convert normal C++ comments
|
||||
# into Doxygen Usage.
|
||||
#
|
||||
# comments: devel/doxify_tree.sh output_tarball
|
||||
|
||||
set -e # exit script if any command returns an error
|
||||
set -u # exit the script if any variable is uninitialized
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
cd "$this_dir/.."
|
||||
src="$PWD"
|
||||
cfg="$src/devel/doxygen.cfg"
|
||||
|
||||
if [ $# != 1 ]; then
|
||||
echo Usage: $0 output_tarball
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! which doxygen > /dev/null; then
|
||||
echo "doxygen is not installed; run"
|
||||
echo " sudo apt-get install doxygen"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# This generates a documentation tarball, suitable for copying to
|
||||
# modpagespeed.com. This should be in $1
|
||||
tarball="$(readlink -f $1)"
|
||||
|
||||
source "$src/net/instaweb/public/VERSION"
|
||||
PSOL_VERSION="$MAJOR.$MINOR.$BUILD.$PATCH"
|
||||
|
||||
WORKDIR=$(mktemp -d)
|
||||
trap "rm -r $WORKDIR" EXIT
|
||||
|
||||
OUTPUT_DIRECTORY="$WORKDIR/doxygen_out"
|
||||
mkdir "$OUTPUT_DIRECTORY"
|
||||
|
||||
hacked_copies="$WORKDIR/hacked_copies"
|
||||
mkdir "$hacked_copies"
|
||||
|
||||
echo Preprocessing header files to turn normal C++ comments into Doxygen-style
|
||||
echo comments....
|
||||
find net/ pagespeed/ -name "*.h" -exec "$src/devel/doxify.sh" {} \
|
||||
"$hacked_copies" \;
|
||||
|
||||
# These variables are referenced in doxygen.cfg, so export them before running
|
||||
# doxygen.
|
||||
export PSOL_VERSION
|
||||
export OUTPUT_DIRECTORY
|
||||
|
||||
log_file=$OUTPUT_DIRECTORY/doxygen.log
|
||||
cd $hacked_copies
|
||||
doxygen $cfg 2> $log_file
|
||||
|
||||
# Doxygen produces a large number of warnings about undocumented classes. At
|
||||
# some point we should fix all these but this is going to take a while as there
|
||||
# are 12431 as of 2016-11-18.
|
||||
#
|
||||
# These will reference files that we have hacked in this script, and using Emacs
|
||||
# to navigate to these errors will get you to files you should never edit.
|
||||
# Strip off the prefix so we'll print files with their source of truth.
|
||||
grep hacked_copies $log_file | sed -e s@$hacked_copies/@@g
|
||||
|
||||
# TODO(jmarantz): walk through files in $OUTPUT_DIRECTORY/html and see whether
|
||||
# there are changes to the corresponding files in the documentation.
|
||||
cd $OUTPUT_DIRECTORY
|
||||
tar czf $tarball .
|
||||
ls -l $tarball
|
||||
-1552
File diff suppressed because it is too large
Load Diff
@@ -1,8 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
"$@"
|
||||
if [ $? -eq 0 ]; then
|
||||
echo expected $1 to fail
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
||||
@@ -1,168 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright 2010 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Fetches a set of URLs via a proxy, keeping statistics.
|
||||
|
||||
This script attempts to fetch all URLs in the list given on
|
||||
the command-line via a specified proxy. It differs from the
|
||||
widely available tools in that:
|
||||
- The proxy connection is kept-alive to try to maximize throughput.
|
||||
- Statuses and completion times for each URL are output to stdout to
|
||||
help analyze the results.
|
||||
|
||||
With the --js option the output is a JavaScript object literal with fields named
|
||||
for URLs with http:// replaced with whatever is passed as test_cat,
|
||||
followed by a dash.
|
||||
|
||||
"""
|
||||
|
||||
__author__ = "morlovich@google.com (Maksim Orlovich)"
|
||||
|
||||
import getopt
|
||||
import httplib
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
import urlparse
|
||||
|
||||
|
||||
def OpenProxy(config):
|
||||
if config.ssl_mode:
|
||||
new_proxy = httplib.HTTPSConnection(config.proxy_host, config.proxy_port)
|
||||
else:
|
||||
new_proxy = httplib.HTTPConnection(config.proxy_host, config.proxy_port)
|
||||
new_proxy.connect()
|
||||
return new_proxy
|
||||
|
||||
|
||||
def ReopenProxy(config, old_proxy):
|
||||
old_proxy.close()
|
||||
return OpenProxy(config)
|
||||
|
||||
|
||||
def TestName(config, test_url):
|
||||
return test_url.replace("http://", config.test_cat + "-")
|
||||
|
||||
|
||||
def FormatResult(config, time_str, status, test_url):
|
||||
if config.js_mode:
|
||||
return '"%s": %s,' % (TestName(config, test_url), time_str)
|
||||
else:
|
||||
return "%s %s %s" % (time_str, status, test_url)
|
||||
|
||||
|
||||
class Configuration(object):
|
||||
"""packages up execution settings."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initializes settings from command-line arguments."""
|
||||
try:
|
||||
opts, _ = getopt.getopt(sys.argv[1:], "",
|
||||
["ssl", "js=", "proxy_host=", "proxy_port=",
|
||||
"urls_file=", "user_agent="])
|
||||
except getopt.GetoptError as err:
|
||||
print str(err)
|
||||
print ("Usage: devel/fetch_all.py [--ssl] [--js test_cat] "
|
||||
"[--proxy_host host] [--proxy_port port] [--urls_file file] "
|
||||
"[--user_agent user_agent]")
|
||||
sys.exit(2)
|
||||
|
||||
self.ssl_mode = False
|
||||
self.js_mode = False
|
||||
self.has_user_agent = False
|
||||
|
||||
for name, value in opts:
|
||||
if name == "--ssl":
|
||||
self.ssl_mode = True
|
||||
elif name == "--js":
|
||||
self.js_mode = True
|
||||
self.test_cat = value
|
||||
elif name == "--proxy_host":
|
||||
self.proxy_host = value
|
||||
elif name == "--proxy_port":
|
||||
self.proxy_port = int(value)
|
||||
elif name == "--urls_file":
|
||||
self.urls_file = value
|
||||
elif name == "--user_agent":
|
||||
self.has_user_agent = True
|
||||
self.user_agent = value
|
||||
|
||||
|
||||
def main():
|
||||
conf = Configuration()
|
||||
|
||||
# Open a persistent connection to the proxy
|
||||
proxy = OpenProxy(conf)
|
||||
|
||||
if conf.js_mode:
|
||||
print "{"
|
||||
|
||||
f = open(conf.urls_file, "rt")
|
||||
for url in f:
|
||||
try:
|
||||
# Fetch url
|
||||
status = 301
|
||||
followed = 0
|
||||
while followed < 5:
|
||||
url = url.strip()
|
||||
if conf.ssl_mode:
|
||||
url = url.replace("http://", "https://", 1)
|
||||
start = time.time()
|
||||
|
||||
headers = {"Accept-Encoding": "gzip"}
|
||||
if conf.has_user_agent:
|
||||
headers["User-Agent"] = conf.user_agent
|
||||
if "Chrome/" in conf.user_agent:
|
||||
headers["Accept"] = "image/webp"
|
||||
proxy.request("GET", url, None, headers)
|
||||
|
||||
response = proxy.getresponse()
|
||||
response.read()
|
||||
stop = time.time()
|
||||
status = response.status
|
||||
|
||||
# Honor server's close request
|
||||
connect_ctl = response.getheader("connection", default="")
|
||||
if connect_ctl.lower().find("close") != -1:
|
||||
proxy = ReopenProxy(conf, proxy)
|
||||
|
||||
# Report.
|
||||
print FormatResult(conf, str((stop - start)*1000),
|
||||
str(status), url)
|
||||
|
||||
# Handle redirections
|
||||
if 301 <= status <= 303 or status == 307:
|
||||
url = urlparse.urljoin(url,
|
||||
response.getheader("Location", default=""))
|
||||
followed += 1
|
||||
else:
|
||||
break
|
||||
except httplib.BadStatusLine:
|
||||
print FormatResult(conf, "0", "BadStatusLine", url)
|
||||
proxy = ReopenProxy(conf, proxy)
|
||||
except httplib.IncompleteRead:
|
||||
print FormatResult(conf, "0", "IncompleteRead", url)
|
||||
proxy = ReopenProxy(conf, proxy)
|
||||
except socket.error:
|
||||
print FormatResult(conf, "0", "SocketError", url)
|
||||
proxy = ReopenProxy(conf, proxy)
|
||||
|
||||
if conf.js_mode:
|
||||
print "}"
|
||||
|
||||
f.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,266 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2011 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# A helper for running gcov on all of the project sources.
|
||||
# Usage:
|
||||
# gcov-all.sh (--prepare | --summarize) path
|
||||
#
|
||||
# where path is the same location where one runs make
|
||||
#
|
||||
# There are two modes:
|
||||
# --prepare cleans up all the .gcda files. This should be done
|
||||
# before running the test, as we need an accurate set of these
|
||||
# to know which object files to include in the executable.
|
||||
# It also effectively zeroes all the measurements, preventing
|
||||
# different runs from getting added together.
|
||||
#
|
||||
# --summarize goes through the produced files, runs gcov on
|
||||
# them, producing the gcov-summary.html and the gcov/ directory
|
||||
# with individual dumps
|
||||
#
|
||||
# Glossary:
|
||||
# .gcno file: produced by gcc during compilation, along with the
|
||||
# corresponding .o file
|
||||
# .gcda file: produced when an instrumented application is run
|
||||
# (or .so is loaded), and then at its exit. Contains
|
||||
# the actual measurements.
|
||||
#
|
||||
# To invoke gcov, we need to pass it in a list of all the source
|
||||
# files we want coverage information for, as well as the directory
|
||||
# to look into for the corresponding .gcno/.gcda files. The
|
||||
# summarize mode collects these based on the .gcda files that exist.
|
||||
#
|
||||
# TODO(morlovich): evaluate lcov as an option? Its output looks nice.
|
||||
|
||||
function summarize {
|
||||
WORKDIR=`mktemp -d`
|
||||
SRCDIR=`pwd`
|
||||
OUTNAME=gcov-summary.html
|
||||
|
||||
echo "Collecting all object and profile data into:" $WORKDIR
|
||||
|
||||
# Here, we look for the .gcda files, and the .o and .gcno that go with them.
|
||||
# This is because they get generated for any object files that gets linked in,
|
||||
# as soon as the executable/module are initialized, giving us an accurate
|
||||
# picture of what should be checked
|
||||
|
||||
GCDAS=`find ./out/Debug_Coverage -name '*.gcda'`
|
||||
DATA=
|
||||
for F in $GCDAS
|
||||
do
|
||||
BASE=${F%.gcda}
|
||||
GCNO=$BASE.gcno
|
||||
O=$BASE.o
|
||||
if [ ! -f $GCNO ]; then
|
||||
echo "WARNING: can't find " $GCNO
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ ! -f $O ]; then
|
||||
echo "WARNING: can't find " $O
|
||||
continue
|
||||
fi
|
||||
|
||||
DATA+=" $F $GCNO $O"
|
||||
done
|
||||
|
||||
cp $DATA $WORKDIR/
|
||||
|
||||
echo "Generating gcov summary into file://"$PWD/$OUTNAME
|
||||
|
||||
# Collect relevant sources. For each one, we check if we have the
|
||||
# gcda (which means we have gcno, too). We want this for two reasons:
|
||||
#
|
||||
# 1) We only want coverage for a file if the gcda is there
|
||||
# 2) gcov has a bug that screws up output if some files' .gcno
|
||||
# does not exist (see http://gcc.gnu.org/bugzilla/show_bug.cgi?id=35568)
|
||||
#
|
||||
# TODO(morlovich): worry about duplicate names!
|
||||
SOURCES=`find -L $SRCDIR/net $SRCDIR/pagespeed -name '*.cc' -or -name '*.c'`
|
||||
|
||||
FILTERED_SOURCES=
|
||||
for F in $SOURCES
|
||||
do
|
||||
GCDA=`basename $F .c`
|
||||
GCDA=$WORKDIR/`basename $GCDA .cc`.gcda
|
||||
if [ -f $GCDA ]; then
|
||||
if [ $F == ${F/.svn/marker/} ]; then
|
||||
FILTERED_SOURCES="$FILTERED_SOURCES $GCDA"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
htmlDriver > $OUTNAME
|
||||
echo "<pre id='data' style='display:none'>" >> $OUTNAME
|
||||
gcov -o $WORKDIR $FILTERED_SOURCES >> $OUTNAME
|
||||
echo "</pre>" >> $OUTNAME
|
||||
|
||||
echo "Moving all the .gcov files to gcov subdir (after wiping it)"
|
||||
rm -rf $SRCDIR/gcov
|
||||
mkdir $SRCDIR/gcov
|
||||
mv *.gcov $SRCDIR/gcov
|
||||
|
||||
echo "Cleaning up..."
|
||||
rm -r $WORKDIR
|
||||
}
|
||||
|
||||
# This outputs the html driver that visualizes the results
|
||||
function htmlDriver {
|
||||
cat <<TEMPLATE_END
|
||||
<!DOCTYPE html>
|
||||
<head>
|
||||
<script>
|
||||
// Computes a color for given goodness percentage. (Using CSS3 hsl syntax)
|
||||
function percentColor(percent) {
|
||||
var hue = (percent / 100 * 120).toFixed(0);
|
||||
return 'hsl(' + hue + ', 100%, 50%)';
|
||||
}
|
||||
|
||||
// Adds a row with given DOM for the file info and given coverage
|
||||
// percentage to the provided table section, giving it the appropriate color
|
||||
function addResultRow(tsection, fileInfo, percent) {
|
||||
var row = tsection.insertRow(-1);
|
||||
row.style.backgroundColor = percentColor(percent);
|
||||
|
||||
var fileNameCell = row.insertCell(-1);
|
||||
fileNameCell.appendChild(fileInfo);
|
||||
|
||||
var percentCell = row.insertCell(-1);
|
||||
percentCell.align = 'right';
|
||||
percentCell.appendChild(document.createTextNode(percent.toFixed(2) + '%'));
|
||||
}
|
||||
|
||||
// Adds a result for given filename and coverage percentage to the first body
|
||||
// of the table with id 'outTable'
|
||||
function addFileResultRow(fileName, percent) {
|
||||
var table = document.getElementById('outTable');
|
||||
var tbody = table.tBodies[0];
|
||||
|
||||
// We want a link to the .gcov file here
|
||||
var a = document.createElement('a');
|
||||
a.appendChild(document.createTextNode(fileName));
|
||||
|
||||
var fragments = fileName.split('/');
|
||||
a.setAttribute('href', 'gcov/' + fragments[fragments.length - 1] + '.gcov');
|
||||
|
||||
addResultRow(tbody, a, percent);
|
||||
}
|
||||
|
||||
function addSummaryResultRow(summary, percent) {
|
||||
var table = document.getElementById('outTable');
|
||||
var tfoot = table.tFoot;
|
||||
|
||||
addResultRow(tfoot, document.createTextNode(summary), percent);
|
||||
}
|
||||
|
||||
function prettifySummary() {
|
||||
// Get the raw data from the <pre id='data'>
|
||||
var preNode = document.getElementById('data');
|
||||
var txt = (preNode.textContent ? preNode.textContent : preNode.innerText);
|
||||
var allLines = txt.split('\n');
|
||||
|
||||
var currentFile;
|
||||
|
||||
// Collect file names, percentages, and lines
|
||||
var allFiles = []; // array of name, coverage %, lines pairs
|
||||
for (var i = 0; i < allLines.length; ++i) {
|
||||
var line = allLines[i];
|
||||
var fileInfo = /File '(.*)'/.exec(line);
|
||||
if (fileInfo) {
|
||||
currentFile = fileInfo[1];
|
||||
// get rid of ./ if needed.
|
||||
if (currentFile.substring(0, 2) == './') {
|
||||
currentFile = currentFile.substring(2);
|
||||
}
|
||||
}
|
||||
|
||||
var linesInfo = /Lines executed:(.*)% of (\d+)/.exec(line);
|
||||
if (linesInfo) {
|
||||
allFiles.push([currentFile, Number(linesInfo[1]), Number(linesInfo[2])]);
|
||||
}
|
||||
|
||||
if (/No executable lines/.exec(line)) {
|
||||
allFiles.push([currentFile, 0, 0]);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by filename
|
||||
allFiles.sort(function(a, b) {
|
||||
if (a[0] < b[0]) {
|
||||
return -1;
|
||||
} else if (a[0] == b[0]) {
|
||||
return 0;
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
});
|
||||
|
||||
// Append all results we want to table, coloring by coverage; and also compute
|
||||
// an overall number (which may include a few things we don't care about)
|
||||
var totalLines = 0;
|
||||
var totalCovered = 0;
|
||||
for (var i = 0; i < allFiles.length; ++i) {
|
||||
var fileName = allFiles[i][0];
|
||||
var percent = allFiles[i][1];
|
||||
var lines = allFiles[i][2];
|
||||
|
||||
// Skip paths -- we don't need coverage information for system headers
|
||||
if (fileName.charAt(0) == '/') {
|
||||
continue;
|
||||
}
|
||||
|
||||
totalLines += lines;
|
||||
totalCovered += Math.round(lines * percent / 100);
|
||||
|
||||
addFileResultRow(fileName, percent);
|
||||
}
|
||||
|
||||
addSummaryResultRow('Total (' + totalCovered + '/' + totalLines +')',
|
||||
totalCovered / totalLines * 100);
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload="prettifySummary()">
|
||||
<table id="outTable">
|
||||
<thead>
|
||||
<tr><th>File name</th><th>Coverage percentage</th></tr>
|
||||
</thead>
|
||||
<tbody></tbody>
|
||||
<tfoot style="font-weight:bold; "></tfoot>
|
||||
</table>
|
||||
TEMPLATE_END
|
||||
}
|
||||
|
||||
function usage {
|
||||
echo "Usage:" $0 "(--prepare | --summarize) path"
|
||||
}
|
||||
|
||||
if [ -z $2 ]; then
|
||||
usage
|
||||
exit
|
||||
fi
|
||||
|
||||
cd $2
|
||||
|
||||
case $1 in
|
||||
--prepare)
|
||||
echo "Removing old .gcda files"
|
||||
find $2/out/Debug_Coverage -name '*.gcda' -delete;;
|
||||
--summarize)
|
||||
summarize;;
|
||||
*)
|
||||
usage;;
|
||||
esac
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
ModPagespeedFileCachePath "#HOME/apache2/pagespeed_cache/"
|
||||
AddOutputFilterByType MOD_PAGESPEED_OUTPUT_FILTER text/html
|
||||
|
||||
<VirtualHost localhost:8080>
|
||||
ModPagespeed on
|
||||
|
||||
<Location /pagespeed_admin>
|
||||
Order allow,deny
|
||||
Allow from localhost
|
||||
Allow from 127.0.0.1
|
||||
SetHandler pagespeed_admin
|
||||
</Location>
|
||||
<Location /pagespeed_global_admin>
|
||||
Order allow,deny
|
||||
Allow from localhost
|
||||
Allow from 127.0.0.1
|
||||
SetHandler pagespeed_global_admin
|
||||
</Location>
|
||||
|
||||
KeepAlive On
|
||||
KeepAliveTimeout 60
|
||||
|
||||
<Directory "#HOME/apache2/htdocs/" >
|
||||
# This is enabled to make sure we don't crash mod_negotiation.
|
||||
Options +MultiViews
|
||||
</Directory>
|
||||
|
||||
ModPagespeedRewriteLevel AllFilters
|
||||
ModPagespeedSlurpDirectory #SLURP_DIR
|
||||
ModPagespeedSlurpReadOnly off
|
||||
ModPagespeedRewriteDeadlinePerFlushMs -1
|
||||
CustomLog "#LOG_PATH" %r
|
||||
</VirtualHost>
|
||||
@@ -1,67 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2017 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# This script collects slurps and URLs (post-optimization, if possible), of
|
||||
# some websites with the help of phantomjs.
|
||||
|
||||
function usage {
|
||||
echo "Usage: loadtest_collect/loadtest_collect_corpus.sh pages.txt out.tar.bz2"
|
||||
echo "Where pages.txt has a URL (including http://) per line"
|
||||
}
|
||||
|
||||
set -u # exit the script if any variable is uninitialized
|
||||
set -e
|
||||
|
||||
if [ $# -ne 2 ]; then
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -d devel ]; then
|
||||
cd devel
|
||||
fi
|
||||
if [ ! -d loadtest_collect ]; then
|
||||
echo Run this script from the top or devel/ directories
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! $(which phantomjs) ]; then
|
||||
echo "phantomjs not found, trying to install it with apt-get"
|
||||
sudo apt-get install phantomjs
|
||||
fi
|
||||
|
||||
SLURP_TOP_DIR=$(mktemp -d)
|
||||
SLURP_DIR=$SLURP_TOP_DIR/slurp
|
||||
mkdir $SLURP_DIR
|
||||
LOG_PATH=$SLURP_TOP_DIR/log.txt
|
||||
URLS_PATH=$SLURP_TOP_DIR/corpus_all_urls.txt
|
||||
|
||||
make clean_slate_for_tests
|
||||
make apache_debug_stop
|
||||
|
||||
sed -e "s^#HOME^$HOME^" -e "s^#SLURP_DIR^$SLURP_DIR^" \
|
||||
-e "s^#LOG_PATH^$LOG_PATH^" \
|
||||
< loadtest_collect/loadtest_collect.conf > ~/apache2/conf/pagespeed.conf
|
||||
make -j8 apache_debug_restart
|
||||
|
||||
for site in $(cat $1); do
|
||||
echo $site
|
||||
phantomjs --proxy=127.0.0.1:8080 loadtest_collect/script.js $site
|
||||
done
|
||||
cat $LOG_PATH | grep ^GET | cut -d ' ' -f 2 > $URLS_PATH
|
||||
cd $SLURP_TOP_DIR
|
||||
tar cvjf $2 .
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
// This is basically just the stripped down http://phantomjs.org/quick-start.html
|
||||
// example.
|
||||
var page = require('webpage').create();
|
||||
var system = require('system');
|
||||
if (system.args.length === 1) {
|
||||
console.log('Usage: script.js <some URL>');
|
||||
phantom.exit();
|
||||
}
|
||||
|
||||
page.open(system.args[1], function(status) {
|
||||
phantom.exit();
|
||||
});
|
||||
@@ -1,78 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2012 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Author: morlovich@google.com (Maksim Orlovich)
|
||||
#
|
||||
# Helpers for doing experiments with lots of vhosts.
|
||||
#
|
||||
# usage:
|
||||
# scripts/lots_of_vhosts.sh --config | --traffic
|
||||
#
|
||||
# You can also set envvar NUM_VHOSTS to configure how many hosts to use.
|
||||
|
||||
set -e # exit script if any command returns an error
|
||||
set -u # exit the script if any variable is uninitialized
|
||||
|
||||
NUM_VHOSTS=${NUM_VHOSTS:-10000}
|
||||
|
||||
function usage {
|
||||
cat <<EOF >&2
|
||||
Usage:
|
||||
scripts/lots_of_vhosts.sh --config | --traffic
|
||||
|
||||
--config generates a suffix for pagespeed.conf
|
||||
--traffic generates a list of URLs for trace_stress_test.sh
|
||||
You can also set environment variable NUM_VHOSTS to control the number of
|
||||
virtual hosts produced.
|
||||
|
||||
See also https://github.com/pagespeed/mod_pagespeed/wiki/Memory-Profiling
|
||||
EOF
|
||||
}
|
||||
|
||||
function config {
|
||||
echo "NameVirtualHost *:8080"
|
||||
for i in $(seq 0 $NUM_VHOSTS); do
|
||||
echo "<VirtualHost *:8080>"
|
||||
echo " DocumentRoot $HOME/apache2/htdocs/"
|
||||
echo " ServerName vhost"$i".example.com"
|
||||
echo " ModPagespeed on"
|
||||
echo " ModPagespeedFileCachePath \"/tmp/vhost\""
|
||||
echo " ModPagespeedBlockingRewriteKey \"foo"$i"\""
|
||||
echo "</VirtualHost>"
|
||||
done
|
||||
}
|
||||
|
||||
function traffic {
|
||||
for i in $(seq 0 $NUM_VHOSTS); do
|
||||
echo "http://vhost"$i".example.com/mod_pagespeed_example/"
|
||||
done
|
||||
}
|
||||
|
||||
if [ $# -ne 1 ]; then
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
case $1 in
|
||||
--config)
|
||||
config;;
|
||||
--traffic)
|
||||
traffic;;
|
||||
*)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -1,64 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2012 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# This script is intended to be run from devel/mps_load_test.sh, although it can
|
||||
# be run directly as well.
|
||||
#
|
||||
# Usage: devel/mps_generate_load.sh \
|
||||
# [--ipro_preserve] [--ssl] [--user_agent user_agent_string]
|
||||
|
||||
set -e # exit script if any command returns an error
|
||||
set -u # exit the script if any variable is uninitialized
|
||||
|
||||
devel_directory="$(dirname $0)"
|
||||
|
||||
corpus_suffix=
|
||||
IPRO_PRESERVE=0
|
||||
if [[ $# -ge 1 && "$1" = "--ipro_preserve" ]]; then
|
||||
shift
|
||||
corpus_suffix=.ipro_preserve
|
||||
IPRO_PRESERVE=1
|
||||
fi
|
||||
|
||||
extra_flags=
|
||||
if [[ $# -ge 1 && "$1" = "--ssl" ]]; then
|
||||
shift
|
||||
extra_flags=$1
|
||||
fi
|
||||
|
||||
user_agent=
|
||||
if [[ $# -ge 1 && "$1" = "--user_agent" ]]; then
|
||||
user_agent=$2
|
||||
shift 2
|
||||
fi
|
||||
|
||||
corpus_file=/tmp/corpus_all_urls.txt.$USER$corpus_suffix
|
||||
|
||||
# Grab the file from the server host if needed.
|
||||
if [ ! -e $corpus_file ]; then
|
||||
work_file=$(mktemp)
|
||||
src="$HOME/pagespeed-loadtest-corpus/corpus_all_urls.txt"
|
||||
cp $src $work_file
|
||||
if [ $IPRO_PRESERVE = 1 ]; then
|
||||
cat $work_file | fgrep -v .pagespeed. > $corpus_file
|
||||
rm $work_file
|
||||
else
|
||||
mv $work_file $corpus_file
|
||||
fi
|
||||
fi
|
||||
|
||||
PROXY_HOST=127.0.0.1 FLAGS=$extra_flags USER_AGENT=$user_agent PAR=50 RUNS=3 \
|
||||
$devel_directory/trace_stress_test.sh $corpus_file
|
||||
@@ -1,282 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2012 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Note: this script is not yet usable outside Google, because it depends on a
|
||||
# corpus database that we can't open source. It should be possible to create a
|
||||
# db with a combination of mod_pagespeed's slurping and a headless browser, but
|
||||
# we don't currently have a script or instructions on how to do this.
|
||||
# TODO(jefftk): resolve this
|
||||
#
|
||||
# This script runs a mod_pagespeed load-test. The typical
|
||||
# configuration is to run this on your development workstation and
|
||||
# mps_generate_load.sh will be run (via ssh) on a different machine
|
||||
# (localhost by default, for single-machine runs).
|
||||
#
|
||||
# Usage: scripts/mps_load_test.sh
|
||||
# [-start_apache_then_exit]
|
||||
# [-custom_so mod_pagespeed.so]
|
||||
# [-custom_so24 mod_pagespeed_ap24.so]
|
||||
# [-user_agent user_agent_string]
|
||||
# [-chrome]
|
||||
# [-memcached|-redis]
|
||||
# [-ipro_preserve]
|
||||
# [-purging]
|
||||
# [-inline_unauthorized_resources]
|
||||
# [-ssl]
|
||||
# [-debug]
|
||||
# [corpus_file.tar.bz2]
|
||||
#
|
||||
# Note: Order of supplied command line parameters matters for correct working.
|
||||
#
|
||||
# corpus_file.tar.bz2 is mandatory on the first run and ignored on later runs.
|
||||
# The extracted version is stored in the directory specified by $corpus (see
|
||||
# below) between runs.
|
||||
#
|
||||
# Example of user_agent_string: Chrome/23.0.1271.17
|
||||
# Saying '-chrome' is equivalent to saying 'give me a recent version of Chrome,
|
||||
# which needs to be updated by editing this script.
|
||||
#
|
||||
# If the 'machine name' argument is "localhost" then:
|
||||
# - you will not be prompted for your ssh password
|
||||
# - your machine will be unusable for a little while
|
||||
# - your results may be more consistent
|
||||
#
|
||||
# This scripts prompts you for your su password if it needs to set your
|
||||
# /proc/sys/net/ipv4/tcp_tw_recycle file to contain a "1".
|
||||
|
||||
set -e # exit script if any command returns an error
|
||||
set -u # exit the script if any variable is uninitialized
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
cd "$this_dir/.."
|
||||
src="$PWD"
|
||||
|
||||
gen_load="$src/devel/mps_generate_load.sh"
|
||||
corpus="$HOME/pagespeed-loadtest-corpus"
|
||||
|
||||
start_apache_then_exit=0
|
||||
|
||||
# Check if we are asked for an external cache (e.g memcached or redis) but
|
||||
# don't have a port configured first, as we need to re-launch ourselves using
|
||||
# run_program_with_EXTCACHE.sh, so we don't want to mess up $@.
|
||||
for argument in "$@"; do
|
||||
if [ "$argument" = "-memcached" -a -z "${MEMCACHED_PORT+x}" ]; then
|
||||
exec "$src/install/run_program_with_memcached.sh" \
|
||||
"$src/devel/mps_load_test.sh" "$@"
|
||||
elif [ "$argument" = "-redis" -a -z "${REDIS_PORT+x}" ]; then
|
||||
exec "$src/install/run_program_with_redis.sh" \
|
||||
"$src/devel/mps_load_test.sh" "$@"
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $# -ge 1 && "$1" = "-start_apache_then_exit" ]]; then
|
||||
start_apache_then_exit=1
|
||||
shift
|
||||
fi
|
||||
|
||||
custom_so=
|
||||
if [[ $# -ge 2 && "$1" = "-custom_so" ]]; then
|
||||
custom_so=$2
|
||||
shift 2
|
||||
fi
|
||||
|
||||
custom_so24=
|
||||
if [[ $# -ge 2 && "$1" = "-custom_so24" ]]; then
|
||||
custom_so24=$2
|
||||
shift 2
|
||||
fi
|
||||
|
||||
if [[ $# -ge 1 && "$1" = "-chrome" ]]; then
|
||||
export USER_AGENT_FLAG="--user_agent Chrome/47.0.2526.80"
|
||||
shift
|
||||
elif [[ $# -ge 2 && "$1" = "-user_agent" ]]; then
|
||||
export USER_AGENT_FLAG="--user_agent $2"
|
||||
shift 2
|
||||
else
|
||||
export USER_AGENT_FLAG=
|
||||
fi
|
||||
|
||||
if [[ $# -ge 1 && "$1" = "-memcached" ]]; then
|
||||
echo Using memcached on port $MEMCACHED_PORT
|
||||
shift
|
||||
export MEMCACHED=1
|
||||
export REDIS=0
|
||||
cache_stat_prefix="memcache"
|
||||
elif [[ $# -ge 1 && "$1" = "-redis" ]]; then
|
||||
echo Using redis on port "$REDIS_PORT"
|
||||
shift
|
||||
export MEMCACHED=0
|
||||
export REDIS=1
|
||||
cache_stat_prefix="redis"
|
||||
else
|
||||
export MEMCACHED=0
|
||||
export REDIS=0
|
||||
cache_stat_prefix="file_cache_"
|
||||
fi
|
||||
|
||||
if [[ $# -ge 1 && "$1" = "-ipro_preserve" ]]; then
|
||||
shift
|
||||
export IPRO_PRESERVE=1
|
||||
export EXTRA_URL_FLAGS=--ipro_preserve
|
||||
else
|
||||
export IPRO_PRESERVE=0
|
||||
export EXTRA_URL_FLAGS=
|
||||
fi
|
||||
|
||||
if [[ $# -ge 1 && "$1" = "-purging" ]]; then
|
||||
shift
|
||||
export PURGING=1
|
||||
else
|
||||
export PURGING=0
|
||||
fi
|
||||
|
||||
if [[ $# -ge 1 && "$1" = "-inline_unauthorized_resources" ]]; then
|
||||
shift
|
||||
export IUR=1
|
||||
else
|
||||
export IUR=0
|
||||
fi
|
||||
|
||||
if [[ $# -ge 1 && "$1" = "-ssl" ]]; then
|
||||
shift
|
||||
export EXTRA_FETCH_FLAGS=--ssl
|
||||
else
|
||||
export EXTRA_FETCH_FLAGS=
|
||||
fi
|
||||
|
||||
if [[ $# -ge 1 && "$1" = "-debug" ]]; then
|
||||
shift
|
||||
compile_mode="Debug"
|
||||
else
|
||||
compile_mode="OptDebug"
|
||||
fi
|
||||
|
||||
if [ $# -ge 2 ]; then
|
||||
echo "Unknown arguments: $@"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "$corpus" ] && [ $# -ne 1 ]; then
|
||||
echo "Invalid arguments: corpus required"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -d "$corpus" ] && [ $# -ne 0 ]; then
|
||||
echo "Warning: using already extracted corpus instead of $1"
|
||||
fi
|
||||
|
||||
# If an 'su' password is required, then get it before going off and compiling
|
||||
# stuff.
|
||||
"$src/devel/turn_on_timewait_recyling.sh"
|
||||
|
||||
if [ -d /var/run/pagespeed/ ]; then
|
||||
rm -rf /var/run/pagespeed/*
|
||||
else
|
||||
sudo mkdir -p /var/run/pagespeed
|
||||
sudo chown $USER /var/run/pagespeed
|
||||
fi
|
||||
|
||||
# Only ssh (and warn user that they will need a password) if using a separate
|
||||
# host for load generation.
|
||||
cmd="$gen_load $EXTRA_URL_FLAGS $EXTRA_FETCH_FLAGS $USER_AGENT_FLAG"
|
||||
|
||||
APACHE_DEBUG_ROOT=${APACHE_DEBUG_ROOT:-$HOME/apache2}
|
||||
|
||||
echo Checking whether we have the corpus available.
|
||||
if [ ! -d "$corpus" ]; then
|
||||
corpus_src="$1"
|
||||
echo "Copying corpus files from $corpus_src"
|
||||
mkdir -p "$corpus"
|
||||
cd "$corpus"
|
||||
tar xjf "$corpus_src"
|
||||
fi
|
||||
|
||||
cd "$src/devel"
|
||||
|
||||
# Build a version of mod_pagespeed with all optimizations enabled, but with
|
||||
# a build that includes DCHECKs.
|
||||
make -j8 CONF=$compile_mode apache_trace_stress_test_server \
|
||||
DUMP_DIR="$corpus" \
|
||||
APACHE_DEBUG_ROOT=${APACHE_DEBUG_ROOT} \
|
||||
MOD_PAGESPEED_CACHE=/var/run/pagespeed/cache
|
||||
|
||||
# If a custom .so got specified, install it.
|
||||
if [[ -n "$custom_so" ]]; then
|
||||
install -c $custom_so /usr/local/apache2/modules/mod_pagespeed.so
|
||||
fi
|
||||
|
||||
if [[ -n "$custom_so24" ]]; then
|
||||
install -c $custom_so24 /usr/local/apache2/modules/mod_pagespeed_ap24.so
|
||||
fi
|
||||
|
||||
# Restart apache for any hand-specified .so or alternative binary
|
||||
if [[ -n "$custom_so" || -n "$custom_so24" ]]; then
|
||||
make apache_debug_stop
|
||||
make apache_debug_start
|
||||
fi
|
||||
|
||||
if [[ "$start_apache_then_exit" = 1 ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
stop_crash_scraper="/tmp/stop_crash_scraper"
|
||||
error_log="$APACHE_DEBUG_ROOT/logs/error_log"
|
||||
rm -f "$stop_crash_scraper"
|
||||
|
||||
echo starting test ...
|
||||
"$src/devel/scrape_error_log_for_crashes.sh" \
|
||||
"$error_log" "$stop_crash_scraper" &
|
||||
echo $cmd ...
|
||||
$cmd
|
||||
touch "$stop_crash_scraper"
|
||||
|
||||
# Print some interesting statistics from the server
|
||||
statsfile=/tmp/mps_load_test_stats.$$
|
||||
wget -q -O $statsfile http://localhost:8080/mod_pagespeed_global_statistics
|
||||
grep "$cache_stat_prefix" $statsfile | grep -v onchange=
|
||||
grep shm $statsfile
|
||||
grep dropped $statsfile
|
||||
grep cache_batcher $statsfile
|
||||
grep rewrite_cached_output_missed_deadline $statsfile
|
||||
grep bytes_saved $statsfile
|
||||
grep serf $statsfile
|
||||
grep queued-fetch-count $statsfile
|
||||
grep page_load_count $statsfile
|
||||
grep 404_count $statsfile
|
||||
grep file_cache_bytes_freed_in_cleanup $statsfile
|
||||
grep file_cache_cleanups $statsfile
|
||||
grep file_cache_write_errors $statsfile
|
||||
grep image_webp_rewrites $statsfile
|
||||
egrep "num_css|num_js" $statsfile
|
||||
rm -f $statsfile
|
||||
|
||||
set +e
|
||||
echo 'egrep "exit signal|CRASH" $error_log'
|
||||
egrep "exit signal|CRASH" $error_log
|
||||
if [ $? = 0 ]; then
|
||||
echo "*** $error_log has dangerous looking errors. Please investigate."
|
||||
exit 1
|
||||
else
|
||||
echo "No deaths reported in $error_log -- ship it."
|
||||
fi
|
||||
|
||||
if [ "$MEMCACHED" = "1" -o "$REDIS" = "1" ]; then
|
||||
date
|
||||
echo -n Sleeping 5 seconds before killing external cache server to let
|
||||
echo -n outstanding writes quiesce...
|
||||
sleep 5
|
||||
echo done
|
||||
fi
|
||||
@@ -1,29 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2012 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if [ $# -lt 2 ]; then
|
||||
echo Usage: $0 error_log_filename stop_filename
|
||||
exit 1
|
||||
fi
|
||||
|
||||
error_log="$1"
|
||||
stop_file="$2"
|
||||
|
||||
(tail -f $error_log | egrep "exit signal|CRASH") & background_pid=$!
|
||||
while [ ! -e "$stop_file" ]; do sleep 10; done
|
||||
kill $background_pid
|
||||
|
||||
rm -f "$stop_file"
|
||||
@@ -1,50 +0,0 @@
|
||||
# Global configuration.
|
||||
#
|
||||
# The ModPagespeedFileCachePath directory must exist and be writable
|
||||
# by the apache user (as specified by the User directive).
|
||||
ModPagespeedFileCachePath "#HOME/apache2/pagespeed_cache/"
|
||||
|
||||
# Direct Apache to send all HTML output to the mod_pagespeed
|
||||
# output handler.
|
||||
AddOutputFilterByType MOD_PAGESPEED_OUTPUT_FILTER text/html
|
||||
|
||||
<VirtualHost localhost:8080>
|
||||
# Turn on mod_pagespeed. To completely disable mod_pagespeed, you
|
||||
# can set this to "off".
|
||||
ModPagespeed on
|
||||
|
||||
<Location /pagespeed_admin>
|
||||
Order allow,deny
|
||||
Allow from localhost
|
||||
Allow from 127.0.0.1
|
||||
SetHandler pagespeed_admin
|
||||
</Location>
|
||||
<Location /pagespeed_global_admin>
|
||||
Order allow,deny
|
||||
Allow from localhost
|
||||
Allow from 127.0.0.1
|
||||
SetHandler pagespeed_global_admin
|
||||
</Location>
|
||||
|
||||
# By default we siege-test without image beaconing.
|
||||
ModPagespeedCriticalImagesBeaconEnabled false
|
||||
|
||||
# Turn on "KeepAlive" so the sieges go fast.
|
||||
KeepAlive On
|
||||
KeepAliveTimeout 60
|
||||
LogFormat "%v %X %P %h %l %u %t \"%r\" %>s %b" common
|
||||
LogLevel warn
|
||||
|
||||
# This configuration is required for siege_instant_ipro.
|
||||
<Directory "#HOME/apache2/htdocs/mod_pagespeed_test/ipro/instant/wait/" >
|
||||
ModPagespeedInPlaceWaitForOptimized on
|
||||
ModPagespeedInPlaceRewriteDeadlineMs 5000
|
||||
</Directory>
|
||||
|
||||
<Directory "#HOME/apache2/htdocs/" >
|
||||
# This is enabled to make sure we don't crash mod_negotiation.
|
||||
Options +MultiViews
|
||||
</Directory>
|
||||
|
||||
# Test-specific configuration: #CUSTOM_CONFIG
|
||||
</VirtualHost>
|
||||
@@ -1,39 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
|
||||
sieges=(extended_css extended_js html_high_entropy html instant_ipro \
|
||||
ipro_image ipro_image_memcached rewritten_css rewritten_js)
|
||||
|
||||
echo "$(date): Starting ${#sieges[*]} sieges ..."
|
||||
|
||||
# So we can make the transactions/second line up, figure out the padding
|
||||
# we'll need for smaller siege-names.
|
||||
max_len=0
|
||||
for siege in "${sieges[@]}"; do
|
||||
len=${#siege}
|
||||
if [ "$len" -gt "$max_len" ]; then
|
||||
max_len=$len
|
||||
fi
|
||||
done
|
||||
|
||||
status=0
|
||||
for siege in ${sieges[*]}; do
|
||||
out="/tmp/siege_$siege.out"
|
||||
|
||||
# Make the columns line up by padding with spaces before the >& and "."
|
||||
# before the transactions-per-second.
|
||||
dots=$(eval printf "%0.s." {0..$((max_len - ${#siege}))})
|
||||
spaces=$(echo "$dots" | sed -e 's/./ /g')
|
||||
echo -n "$this_dir/siege_$siege.sh$spaces >& $out$dots.."
|
||||
"$this_dir/siege_$siege.sh" &> "$out"
|
||||
if [ $? -eq 0 ]; then
|
||||
grep "Transaction rate:" "$out" |cut -f2 -d: | \
|
||||
sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'
|
||||
else
|
||||
echo FAILED
|
||||
status=1
|
||||
fi
|
||||
done
|
||||
echo "$(date): Finished sieges, exit status $status"
|
||||
exit $status
|
||||
@@ -1,25 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Runs 'siege' on a single cache-extended URL cache-extended CSS file
|
||||
# scraped from rewrite_css.html.
|
||||
#
|
||||
# Usage:
|
||||
# devel/siege/siege_extended_css.sh
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
source "$this_dir/siege_helper.sh" || exit 1
|
||||
|
||||
# Fetch the rewrite_css example in cache-extend mode so we can get a small
|
||||
# cache-extended CSS file.
|
||||
EXAMPLE="http://localhost:8080/mod_pagespeed_example"
|
||||
|
||||
# The format of the 'link' HTML line we get is this:
|
||||
# <link rel="stylesheet" type="text/css"
|
||||
# href="styles/yellow.css.pagespeed.ce.lzJ8VcVi1l.css">
|
||||
# The line-break before 'href' is added here to avoid exceeding 80 cols
|
||||
# in this script but is not in the HTML.
|
||||
#
|
||||
# Splitting this by quotes seems a little fragile but it gets us the
|
||||
# URL in the 6th token.
|
||||
extract_pagespeed_url $EXAMPLE/rewrite_css.html 'link rel=' 6 extend_cache
|
||||
|
||||
run_siege "$EXAMPLE/$url"
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
source "$this_dir/siege_helper.sh" || exit 1
|
||||
|
||||
# Fetch the rewrite_css example in cache-extend mode so we can get a small
|
||||
# cache-extended CSS file.
|
||||
EXAMPLE="http://localhost:8080/mod_pagespeed_example"
|
||||
|
||||
# The format of the 'script' HTML line we want is this:
|
||||
# <script src="rewrite_javascript.js" ...
|
||||
extract_pagespeed_url $EXAMPLE/rewrite_javascript.html 'script src=' \
|
||||
2 extend_cache
|
||||
|
||||
run_siege "$EXAMPLE/$url"
|
||||
@@ -1,120 +0,0 @@
|
||||
# This file expects to be sourced from another file in the same directory in
|
||||
# order to set us up for siege testing.
|
||||
|
||||
set -u # exit the script if any variable is uninitialized
|
||||
set -e
|
||||
|
||||
if [ -d devel ]; then
|
||||
cd devel
|
||||
fi
|
||||
if [ ! -d siege ]; then
|
||||
echo Run this script from the top or devel/ directories
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! hash siege 2>/dev/null; then
|
||||
echo "'siege' command is not found, please install it. "
|
||||
echo "siege_instant_ipro needs 3.0.8 or newer, other tests work with 3.0.5 "
|
||||
echo "as well."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If an 'su' password is required, then get it before going off and compiling
|
||||
# stuff.
|
||||
./turn_on_timewait_recyling.sh
|
||||
|
||||
# Build optimized mod_pagespeed.so if necessary, and restart it.
|
||||
callgrind=0
|
||||
if [ $# -eq 1 ]; then
|
||||
if [ $1 == "-callgrind" ]; then
|
||||
shift
|
||||
callgrind=1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Stop callgrind if it was running previously.
|
||||
callgrind_control -k
|
||||
|
||||
# Clear all caches to make sure we start from a known state.
|
||||
make clean_slate_for_tests
|
||||
|
||||
# This variable contains config that we want to inject into siege.conf when
|
||||
# constructing pagespeed.conf, by sed-replacing "CUSTOM_CONFIG". This works
|
||||
# with multiple config lines.
|
||||
custom_config=""
|
||||
function add_config_line() {
|
||||
custom_config+="\n $1"
|
||||
}
|
||||
|
||||
# If $MEMCACHED_PORT is set (i.e. we were run from
|
||||
# run_program_with_memcached.sh) then configure it in the apache conf.
|
||||
set +u
|
||||
if [ ! -z "$MEMCACHED_PORT" ]; then
|
||||
add_config_line "ModPagespeedMemcachedServers localhost:$MEMCACHED_PORT"
|
||||
fi
|
||||
set -u
|
||||
|
||||
make apache_debug_stop
|
||||
sed -e "s/#CUSTOM_CONFIG/$custom_config/" -e "s^#HOME^$HOME^" < siege/siege.conf \
|
||||
> ~/apache2/conf/pagespeed.conf
|
||||
|
||||
if [ $callgrind -eq 1 ]; then
|
||||
echo running with callgrind...
|
||||
make -j8 apache_debug_install CONF=OptDebug
|
||||
valgrind --tool=callgrind --collect-systime=yes ~/apache2/bin/httpd -X &
|
||||
sleep 5
|
||||
callgrind=1
|
||||
else
|
||||
echo running without calgrind -- use -callgrind to get a profile.
|
||||
make -j8 apache_debug_restart BUILDTYPE=Release
|
||||
fi
|
||||
|
||||
# This function returns its value in shell variable 'url'. Note that it
|
||||
# will return whatever is in the HTML value, which is usually a relative
|
||||
# url.
|
||||
function extract_pagespeed_url() {
|
||||
url=""
|
||||
html=$1
|
||||
grep_pattern="$2"
|
||||
url_token_index="$3"
|
||||
filters="$4"
|
||||
OPTIONS="?PageSpeedFilters=$filters"
|
||||
|
||||
echo -n Finding pagespeed url in $html${OPTIONS}, pattern=\"${grep_pattern}\"
|
||||
echo ' #' $url_token_index
|
||||
while true; do
|
||||
LINE=$(wget -q -O - $html$OPTIONS | grep "$grep_pattern")
|
||||
if [ "$LINE" != '' ]; then
|
||||
url=$(echo $LINE | cut -d\" -f$url_token_index)
|
||||
echo $url
|
||||
break
|
||||
else
|
||||
sleep .1
|
||||
echo -n '.'
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
this_file=$(basename "$0")
|
||||
this_name=$(basename "$this_file" .sh)
|
||||
common_options=("--log=/tmp/$this_name.log" --rc=/dev/null)
|
||||
|
||||
# Runs siege, passing on any provided arguments.
|
||||
function run_siege_with_options() {
|
||||
(set -x; siege "${common_options[@]}" "$@")
|
||||
|
||||
if [ $callgrind -eq 1 ]; then
|
||||
sleep 2
|
||||
callgrind_control -d
|
||||
ls -ltR callgrind.*
|
||||
echo Type \'callgrind_control -k\' to close down valgrind.
|
||||
fi
|
||||
|
||||
(set -x; ./expectfail egrep "exit signal|CRASH" \
|
||||
~/apache2/logs/error_log)
|
||||
}
|
||||
|
||||
# Run siege on a set of arguments, with reasonable defaults.
|
||||
function run_siege() {
|
||||
run_siege_with_options --benchmark --time=60s --concurrent=50 "$@"
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Runs 'siege' on a HTML file.
|
||||
#
|
||||
# Usage:
|
||||
# devel/siege/siege_html.sh
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
source "$this_dir/siege_helper.sh" || exit 1
|
||||
|
||||
# TODO(jmarantz): There appears to be no better way to turn all
|
||||
# filters off via query-param. Though you might think that
|
||||
# PageSpeedRewriteLevel=PassThrough should work, it does not. There
|
||||
# is special handling for PageSpeedFilters=core but not for
|
||||
# PassThrough.
|
||||
URL="http://localhost:8080/mod_pagespeed_example/collapse_whitespace.html?PageSpeedFilters=rewrite_domains"
|
||||
run_siege "$URL"
|
||||
@@ -1,32 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Runs 'siege' on a HTML file, but with 400k unique query-params. We
|
||||
# use 400k because a typical siege covers >300k transactions and we
|
||||
# want to avoid repeats.
|
||||
#
|
||||
# Usage:
|
||||
# devel/siege/siege_html_high_entropy.sh
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
source "$this_dir/siege_helper.sh" || exit 1
|
||||
|
||||
# Generate a list of unique URLs, each of which resolving to the same trival
|
||||
# HTML file. I don't see an easy way of specifying zero rewriters (default is
|
||||
# CoreFilters) but by specifying a single rewriter "rewrite_domains" as a
|
||||
# query-param, we can emulate that. Note that "rewrite_domains" doesn't do
|
||||
# anything if there are no domain-mappings set up.
|
||||
#
|
||||
# TODO(jmarantz): There appears to be no better way to turn all
|
||||
# filters off via query-param. Though you might think that
|
||||
# PageSpeedRewriteLevel=PassThrough should work, it does not. There
|
||||
# is special handling for PageSpeedFilters=core but not for
|
||||
# PassThrough.
|
||||
echo "Generating URLs..."
|
||||
urls="/tmp/high_entropy_urls.list.$$"
|
||||
> "$urls"
|
||||
trap "rm -f $urls" EXIT
|
||||
base_url="http://localhost:8080/mod_pagespeed_example/collapse_whitespace.html?PageSpeedFilters=rewrite_domains&q"
|
||||
for i in {1..400000}; do
|
||||
echo "$base_url=$i" >> "$urls"
|
||||
done
|
||||
|
||||
run_siege --file="$urls"
|
||||
@@ -1,61 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
if ! hash siege 2>/dev/null; then
|
||||
echo "'siege' command is not found. Please install siege >=3.0.8."
|
||||
echo "Note that repository may contain older version of Siege."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check that siege is at least 3.0.8.
|
||||
siege_version=$(siege --version 2>&1 | head -n 1 | awk '{print $2}')
|
||||
major=$(echo "$siege_version" | awk -F. '{print $1}')
|
||||
minor=$(echo "$siege_version" | awk -F. '{print $2}')
|
||||
point=$(echo "$siege_version" | awk -F. '{print $3}')
|
||||
|
||||
recent_siege=false
|
||||
if [ "$major" -gt 3 ]; then
|
||||
recent_siege=true
|
||||
elif [ "$major" -eq 3 ]; then
|
||||
if [ "$minor" -gt 0 ]; then
|
||||
recent_siege=true
|
||||
elif [ "$point" -ge 8 ]; then
|
||||
recent_siege=true
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! "$recent_siege"; then
|
||||
# Versions before 3.0.8 didn't include the port number in the host header.
|
||||
echo "$0: siege is version $siege_version but we need at least 3.0.8"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
source "$this_dir/siege_helper.sh" || exit 1
|
||||
|
||||
# Make a list of urls large enough that we can run through them for at least
|
||||
# 20s.
|
||||
urls_file=$(mktemp /tmp/siege.urls.XXXXXX)
|
||||
function remove_urls_file() {
|
||||
rm "$urls_file" 2> /dev/null
|
||||
}
|
||||
trap remove_urls_file EXIT
|
||||
url_base="http://localhost:8080/mod_pagespeed_test/"
|
||||
url_base+="ipro/instant/wait/purple.css?$RANDOM="
|
||||
echo "Building url file..."
|
||||
> "$urls_file"
|
||||
N=100000
|
||||
for i in $(seq 1 "$N"); do
|
||||
echo "$url_base$i" >> "$urls_file"
|
||||
done
|
||||
|
||||
# The siege documentation suggests that --reps means how many times to run
|
||||
# through the file of urls, but it's actually implemented as meaning the number
|
||||
# of urls each of the concurrent processes should run through. So if we have N
|
||||
# urls to test and C processes, then each process should get N/C urls.
|
||||
C=10
|
||||
R="$(($N/$C))"
|
||||
run_siege_with_options \
|
||||
--file="$urls_file" \
|
||||
--reps="$R" \
|
||||
--benchmark \
|
||||
--concurrent="$C"
|
||||
@@ -1,25 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Runs 'siege' on a single ipro-optimized image.
|
||||
#
|
||||
# Usage:
|
||||
# devel/siege/siege_ipro_image.sh
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
source "$this_dir/siege_helper.sh" || exit 1
|
||||
|
||||
echo "Waiting for the image to be IPRO-optimized..."
|
||||
URL="http://localhost:8080/mod_pagespeed_example/images/Puzzle.jpg"
|
||||
|
||||
while true; do
|
||||
content_length=$(curl -sS -D- -o /dev/null "$URL" | \
|
||||
grep '^Content-Length: ' | \
|
||||
grep -o '[0-9]*')
|
||||
if [ "$content_length" -lt 100000 ]; then
|
||||
# the image is fully ipro optimized
|
||||
break
|
||||
fi
|
||||
sleep .1
|
||||
echo -n .
|
||||
done
|
||||
|
||||
run_siege "$URL"
|
||||
@@ -1,12 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Runs 'siege' on a single ipro-optimized image with memcached.
|
||||
#
|
||||
# Usage:
|
||||
# devel/siege/siege_ipro_image_memcached.sh
|
||||
|
||||
this_dir=$(readlink -e "$(dirname "${BASH_SOURCE[0]}")")
|
||||
root_dir=$(readlink -e "$this_dir/../..")
|
||||
install_dir="$root_dir/install"
|
||||
|
||||
set -e
|
||||
"$install_dir/run_program_with_memcached.sh" "$this_dir/siege_ipro_image.sh"
|
||||
@@ -1,25 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Runs 'siege' on a single cache-extended URL cache-extended CSS file
|
||||
# scraped from rewrite_css.html.
|
||||
#
|
||||
# Usage:
|
||||
# devel/siege/siege_extended_css.sh
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
source "$this_dir/siege_helper.sh" || exit 1
|
||||
|
||||
# Fetch the rewrite_css example in cache-extend mode so we can get a small
|
||||
# cache-extended CSS file.
|
||||
EXAMPLE="http://localhost:8080/mod_pagespeed_example"
|
||||
|
||||
# The format of the 'link' HTML line we get is this:
|
||||
# <link rel="stylesheet" type="text/css"
|
||||
# href="styles/yellow.css.pagespeed.ce.lzJ8VcVi1l.css">
|
||||
# The line-break before 'href' is added here to avoid exceeding 80 cols
|
||||
# in this script but is not in the HTML.
|
||||
#
|
||||
# Splitting this by quotes seems a little fragile but it gets us the
|
||||
# URL in the 6th token.
|
||||
extract_pagespeed_url $EXAMPLE/rewrite_css.html 'link rel=' 6 rewrite_css
|
||||
|
||||
run_siege "$EXAMPLE/$url"
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
source "$this_dir/siege_helper.sh" || exit 1
|
||||
|
||||
# Fetch the rewrite_css example in cache-extend mode so we can get a small
|
||||
# cache-extended CSS file.
|
||||
EXAMPLE="http://localhost:8080/mod_pagespeed_example"
|
||||
|
||||
# The format of the 'script' HTML line we want is this:
|
||||
# <script src="rewrite_javascript.js" ...
|
||||
extract_pagespeed_url $EXAMPLE/rewrite_javascript.html 'script src=' \
|
||||
2 rewrite_javascript
|
||||
|
||||
run_siege "$EXAMPLE/$url"
|
||||
@@ -1,84 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2016 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
set -e
|
||||
set -u
|
||||
|
||||
APACHE_SERVER="$1"
|
||||
APACHE_SLURP_ORIGIN_PORT="$2"
|
||||
APACHE_SLURP_PORT="$3"
|
||||
WGET="$4"
|
||||
TMP_SLURP_DIR="$5"
|
||||
PAGESPEED_TEST_HOST="$6"
|
||||
|
||||
this_dir=$(dirname "${BASH_SOURCE[0]}")
|
||||
source "$this_dir/../pagespeed/automatic/system_test_helpers.sh" \
|
||||
"$APACHE_SERVER"
|
||||
|
||||
EXTEND_CACHE_URL="http://www.modpagespeed.com/extend_cache.html"
|
||||
|
||||
DEVEL_DIR="$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
start_test "Testing slurping (read only, via proxy)"
|
||||
http_proxy="$APACHE_SERVER" "$WGET" -q -O /dev/null \
|
||||
"$EXTEND_CACHE_URL?PageSpeedFilters=extend_cache"
|
||||
|
||||
# TODO(sligocki): Use something like fetch_until rather than
|
||||
# always waiting 2 seconds :/
|
||||
sleep 2
|
||||
|
||||
OUT="$(http_proxy="$APACHE_SERVER" "$WGET" -q -O - \
|
||||
"$EXTEND_CACHE_URL?PageSpeedFilters=extend_cache")"
|
||||
check_from "$OUT" fgrep "images/Puzzle.jpg.pagespeed.ce."
|
||||
|
||||
OUT="$(http_proxy="$APACHE_SERVER" "$WGET" -q -O - \
|
||||
"$EXTEND_CACHE_URL?PageSpeed=off")"
|
||||
check_from "$OUT" fgrep '"images/Puzzle.jpg"'
|
||||
|
||||
start_test "Testing slurping (dns mode, mimicing webpagetest)"
|
||||
OUT="$("$WGET" --header="Host: www.modpagespeed.com" -q -O - --save-headers \
|
||||
"$EXTEND_CACHE_URL?PageSpeedFilters=extend_cache")"
|
||||
check_from "$OUT" grep -q 'HTTP/1.[01] 200 OK'
|
||||
|
||||
start_test "Testing slurping http://www.example.com expecting index.html ..."
|
||||
echo "rewrite will not happen"
|
||||
OUT="$(http_proxy="$APACHE_SERVER" "$WGET" -q -O - http://www.example.com/)"
|
||||
check_from "$OUT" fgrep "example.com expected body"
|
||||
|
||||
start_test "Connection-close stripping:"
|
||||
echo 'First check we get "Connection:close"'
|
||||
|
||||
echo "straight from the origin -- no proxy."
|
||||
rm -rf "$TMP_SLURP_DIR"
|
||||
|
||||
slurp_origin_url="http://localhost:$APACHE_SLURP_ORIGIN_PORT"
|
||||
slurp_origin_url+="/close_connection/close_connection.html"
|
||||
|
||||
OUT="$("$WGET" --no-proxy -q --save-headers -O - --header="Connection:" \
|
||||
"$slurp_origin_url")"
|
||||
check_from "$OUT" fgrep "Connection: close"
|
||||
|
||||
echo "Now check that Connection:close is stripped from a writing slurp."
|
||||
OUT=$(http_proxy=localhost:$APACHE_SLURP_PORT "$WGET" -q --save-headers -O - \
|
||||
--header="Connection:" "$slurp_origin_url" || true)
|
||||
check_not_from "$OUT" fgrep -q "Connection: close"
|
||||
|
||||
start_test "Testing slurp-proxying of a POST"
|
||||
rm -rf "$TMP_SLURP_DIR"
|
||||
OUT="$(http_proxy=localhost:$APACHE_SLURP_PORT "$WGET" -q -O - \
|
||||
--post-data="a=b&c=d" \
|
||||
http://$PAGESPEED_TEST_HOST/do_not_modify/cgi/verify_post.cgi)"
|
||||
check_from "$OUT" grep "PASS"
|
||||
@@ -1,152 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2012 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# This scripts reads a list of URLs from the provided file, and
|
||||
# fetches them in parallel from a local slurping proxy in a randomized
|
||||
# order. Loading times and statuses for them are then output to
|
||||
# /tmp/latency-(encoding of settings).txt
|
||||
|
||||
# number of fetches to do in parallel
|
||||
if [ -z $PAR ]; then
|
||||
PAR=10
|
||||
fi
|
||||
|
||||
# number of times to run
|
||||
if [ -z $RUNS ]; then
|
||||
RUNS=3
|
||||
fi
|
||||
|
||||
# How many times to repeat each trace without restarting the workers
|
||||
if [ -z $EXP ]; then
|
||||
EXP=3
|
||||
fi
|
||||
|
||||
# Proxy machine. If you specify this, make sure to give an IP address,
|
||||
# as doing DNS lookups for it can slow things down a lot
|
||||
if [ -z $PROXY_HOST ]; then
|
||||
PROXY_HOST=127.0.0.1
|
||||
fi
|
||||
|
||||
# .. and port
|
||||
if [ -z $PROXY_PORT ]; then
|
||||
PROXY_PORT=8080
|
||||
fi
|
||||
|
||||
# Extra flags to pass to fetch_all.py
|
||||
FLAGS=${FLAGS:-}
|
||||
|
||||
USER_AGENT_FLAG=${USER_AGENT:+--user_agent}
|
||||
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "Usage: devel/trace_stress_test.sh urls_file ..."
|
||||
echo "Shuffles each urls_file in turn, runs through shuffled file using"
|
||||
echo "$PAR parallel wget jobs. Repeats this process $RUN times."
|
||||
exit 2
|
||||
fi
|
||||
|
||||
OUR_PATH=`dirname $0`
|
||||
STAMP=`date +%Y%m%d-%H%M`
|
||||
LATENCY_REPORT=/tmp/latency-$PROXY_HOST-R$RUNS-P$PAR-E$EXP-$STAMP.txt
|
||||
TAIL_HEAD_TEMP=/tmp/tail_head.$$
|
||||
|
||||
echo "time status url" > $LATENCY_REPORT
|
||||
|
||||
# Examines file in $1, starting at line $2, and the next $3 lines into file $4.
|
||||
function tail_head {
|
||||
input_file=$1
|
||||
start_pos=$2
|
||||
num_lines=$3
|
||||
outfile=$4
|
||||
|
||||
# We make a temp file because otherwise we (at least Josh) get a lot of
|
||||
# "tail: write error" printed out.
|
||||
tail $input_file -n +$start_pos < $input_file > $TAIL_HEAD_TEMP
|
||||
head $TAIL_HEAD_TEMP -n $num_lines >$outfile
|
||||
}
|
||||
|
||||
function single_run {
|
||||
FILE=$1
|
||||
# Shuffle the log and split it into pieces
|
||||
SHUF_FILE=`mktemp`
|
||||
for I in `seq 1 $EXP`; do
|
||||
shuf $FILE >> $SHUF_FILE
|
||||
done
|
||||
LINES=`wc -l $SHUF_FILE | sed s#$SHUF_FILE##`
|
||||
# Setting chunk size slightly too large balances load a little better, most
|
||||
# obvious when $LINES < $PAR.
|
||||
CHUNK=`expr 1 + $LINES / $PAR`
|
||||
|
||||
# feed each chunk to a separate wget
|
||||
PIECES=
|
||||
LOGS=
|
||||
POS=0
|
||||
for I in `seq 1 $PAR`; do
|
||||
CUR_CHUNK=$CHUNK
|
||||
if [ $I -eq $PAR ]; then
|
||||
# make sure we also include the remainder
|
||||
EXTRA=`expr $LINES - $PAR \* $CHUNK`
|
||||
CUR_CHUNK=`expr $CUR_CHUNK + $EXTRA`
|
||||
fi
|
||||
PIECE=`mktemp`
|
||||
LOG=`mktemp`
|
||||
PIECES="$PIECES $PIECE"
|
||||
LOGS="$LOGS $LOG"
|
||||
tail_head $SHUF_FILE $POS $CUR_CHUNK $PIECE
|
||||
$OUR_PATH/fetch_all.py $FLAGS $USER_AGENT_FLAG $USER_AGENT \
|
||||
--proxy_host $PROXY_HOST --proxy_port $PROXY_PORT \
|
||||
--urls_file $PIECE &> $LOG &
|
||||
POS=`expr $POS + $CHUNK`
|
||||
done
|
||||
|
||||
# Wait for all to finish
|
||||
wait
|
||||
|
||||
# Print out the summary messages
|
||||
cat $LOGS >> $LATENCY_REPORT
|
||||
|
||||
# clean up
|
||||
rm $PIECES
|
||||
rm $LOGS
|
||||
rm $SHUF_FILE
|
||||
}
|
||||
|
||||
START=$SECONDS
|
||||
|
||||
for RUN in `seq 1 $RUNS`; do
|
||||
echo "Run $RUN"
|
||||
for FILE in "$@"; do
|
||||
echo "File $FILE"
|
||||
single_run "$FILE"
|
||||
done
|
||||
echo "----------------------------------------------------------------------"
|
||||
done
|
||||
|
||||
STOP=$SECONDS
|
||||
LINES=`tail -n +2 $LATENCY_REPORT|wc -l`
|
||||
ELAPSED=`expr $STOP - $START`
|
||||
QPS=`expr $LINES / $ELAPSED`
|
||||
echo "QPS estimate (inaccurate for short runs):" $QPS "requests/sec"
|
||||
echo
|
||||
$OUR_PATH/trace_stress_test_percentiles.sh $LATENCY_REPORT | cut -c 1-80
|
||||
echo
|
||||
echo "10 worst latencies:"
|
||||
head -n 10 ${LATENCY_REPORT%%.txt}-sorted.txt
|
||||
echo
|
||||
echo "Status statistics:"
|
||||
tail -n +2 $LATENCY_REPORT | cut -d ' ' -f 2 | sort | uniq -c
|
||||
echo "Full latency report in:" $LATENCY_REPORT
|
||||
|
||||
rm -f $TAIL_HEAD_TEMP
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2012 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# This script takes the output of trace_stress_test.sh and reports cheap and
|
||||
# cheerful median, 75th, 90th, 95th, 99th, and worst latencies.
|
||||
if [ "X$1" == "X" ]; then
|
||||
# work from stdin
|
||||
sorted="/tmp/latency-$$-sorted.txt"
|
||||
sort -r -g -k 1 > $sorted
|
||||
else
|
||||
# construct sorted file name
|
||||
sorted="${1%%.txt}-sorted.txt"
|
||||
sort -r -g -k 1 "$1" > "$sorted"
|
||||
fi
|
||||
echo "Sorted latency data in: $sorted" 1>&2
|
||||
echo "% ms status url" 1>&2
|
||||
lines=$(wc -l < "$sorted")
|
||||
for i in 50 75 90 95 99; do
|
||||
divisor=$((100 / (100 - $i)))
|
||||
echo -n "$i "
|
||||
head -$(($lines / $divisor)) "$sorted" | tail -1
|
||||
done
|
||||
echo -n "mx "
|
||||
head -1 "$sorted"
|
||||
@@ -1,35 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2012 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Makes linux enable fast reuse of sockets in TIME-WAIT state. We need this for
|
||||
# load testing so that we don't run out of connection table slots and fail to
|
||||
# get a socket. This isn't generally a good idea to set on public-facing
|
||||
# servers because of trouble with NATs, but is fine here.
|
||||
#
|
||||
# This scripts prompts you for your su password if TIME-WAIT recycling isn't
|
||||
# already enabled.
|
||||
|
||||
set -e # exit script if any command returns an error
|
||||
set -u # exit the script if any variable is uninitialized
|
||||
|
||||
recycle_file=/proc/sys/net/ipv4/tcp_tw_recycle
|
||||
if [ $(cat $recycle_file) -ne 1 ]; then
|
||||
echo "Putting a '1' in proc/sys/net/ipv4/tcp_tw_recycle to avoid"
|
||||
echo "running out of port numbers (due to old ones being in TIME_WAIT state)."
|
||||
echo 1 >/tmp/1
|
||||
sudo cp /tmp/1 $recycle_file
|
||||
rm /tmp/1
|
||||
fi
|
||||
@@ -1,133 +0,0 @@
|
||||
{
|
||||
Jpeg*Test - chromium_jpeg - Conditional jump or move depends on uninitialised value(s)
|
||||
Memcheck:Cond
|
||||
...
|
||||
fun:chromium_jpeg_*
|
||||
...
|
||||
fun:*Jpeg*Test_*
|
||||
}
|
||||
{
|
||||
Jpeg*Test - chromium_jpeg - Use of uninitialised value of size 8
|
||||
Memcheck:Value8
|
||||
...
|
||||
fun:chromium_jpeg_*
|
||||
...
|
||||
fun:*Jpeg*Test_*
|
||||
}
|
||||
{
|
||||
ImageTest - WebPEncode - Use of uninitialised value of size 8
|
||||
Memcheck:Value8
|
||||
...
|
||||
fun:WebPEncode
|
||||
...
|
||||
fun:*ImageTest_*
|
||||
}
|
||||
{
|
||||
PngOptimizerTest - DGifGetLine - Conditional jump or move depends on uninitialised value(s)
|
||||
Memcheck:Cond
|
||||
...
|
||||
fun:DGifGetLine
|
||||
...
|
||||
fun:*PngOptimizerTest_*
|
||||
}
|
||||
{
|
||||
JpegReaderTest - DecodeAndCompareImagesByPSNRENS - Conditional jump or move depends on uninitialised value(s)
|
||||
Memcheck:Cond
|
||||
...
|
||||
fun:*DecodeAndCompareImagesByPSNRENS*
|
||||
...
|
||||
fun:*JpegReaderTest_*
|
||||
}
|
||||
{
|
||||
re2 leaks a little memory on startup due to InitEmpty - Ignore it
|
||||
Memcheck:Leak
|
||||
...
|
||||
fun:*re2*InitEmpty*
|
||||
...
|
||||
}
|
||||
{
|
||||
protobufs with introspection leak a little memory -- Ignore it
|
||||
Memcheck:Leak
|
||||
...
|
||||
fun:*google*protobuf*internal*InitEmptyString*
|
||||
...
|
||||
}
|
||||
{
|
||||
ImageAnalysisTest - PhotoMetric - Conditional jump or move depends on uninitialised value(s)
|
||||
Memcheck:Cond
|
||||
...
|
||||
fun:*PhotoMetric*
|
||||
...
|
||||
fun:*ImageAnalysisTest_*
|
||||
}
|
||||
{
|
||||
ImageAnalysisTest - PhotoMetric - Use of uninitialised value of size 8
|
||||
Memcheck:Value8
|
||||
...
|
||||
fun:*Histogram*
|
||||
...
|
||||
fun:*ImageAnalysisTest_*
|
||||
}
|
||||
{
|
||||
ScanlineResizerTest - ResizeAndWrite - Conditional jump or move depends on uninitialised value(s)
|
||||
Memcheck:Cond
|
||||
...
|
||||
fun:*chromium_jpeg_write_scanlines*
|
||||
...
|
||||
fun:*ScanlineResizerTest_*
|
||||
}
|
||||
{
|
||||
ScanlineResizerTest - ResizeAndWrite - Conditional jump or move depends on uninitialised value(s)
|
||||
Memcheck:Cond
|
||||
...
|
||||
fun:*chromium_jpeg_finish_compress*
|
||||
...
|
||||
fun:*ScanlineResizerTest_*
|
||||
}
|
||||
{
|
||||
ScanlineResizerTest - ResizeAndWrite - Use of uninitialised value of size 8
|
||||
Memcheck:Value8
|
||||
...
|
||||
fun:*chromium_jpeg_finish_compress*
|
||||
...
|
||||
fun:*ScanlineResizerTest_*
|
||||
}
|
||||
{
|
||||
OpenSSL fancy math
|
||||
Memcheck:Value8
|
||||
fun:bn_mul_mont
|
||||
}
|
||||
{
|
||||
OpenSSL fancy math
|
||||
Memcheck:Value8
|
||||
fun:bn_mul4x_mont
|
||||
}
|
||||
{
|
||||
mod_ssl leak
|
||||
Memcheck:Leak
|
||||
...
|
||||
fun:EC_GROUP_new_by_curve_name
|
||||
...
|
||||
fun:ssl_init_Module
|
||||
}
|
||||
{
|
||||
GFlags bookkeeping
|
||||
Memcheck:Leak
|
||||
...
|
||||
fun:_ZN6google14FlagRegistererC1EPKcS2_S2_S2_PvS3_
|
||||
...
|
||||
}
|
||||
{
|
||||
RE2::Init singleton "leak"
|
||||
Memcheck:Leak
|
||||
...
|
||||
fun:_ZZN3re23RE24InitERKNS_11StringPieceERKNS0_7OptionsEENKUlvE_clEv
|
||||
}
|
||||
|
||||
{
|
||||
grpc error "leak" -- TODO(cheesy): find the root cause and eliminate.
|
||||
Memcheck:Leak
|
||||
...
|
||||
fun:grpc_error_create
|
||||
fun:grpc_server_add_insecure_http2_port
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Given a deb, extract mod_pagespeed.so and mod_pagespeed_ap24.so. This is
|
||||
# useful for running load tests on prior releases. The files are left in a temp
|
||||
# directory, and the path to them is printed to stdout.
|
||||
|
||||
set -e # exit script if any command returns an error
|
||||
set -u # exit the script if any variable is uninitialized
|
||||
|
||||
if [ ! $# -eq 1 ]; then
|
||||
echo "Usage: ./extract_so_from_deb.sh mod-pagespeed-beta_current_amd64.deb"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -e $1 ]; then
|
||||
echo "File '$1' not found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
input_deb=$(readlink -e $1)
|
||||
|
||||
TMP=$(mktemp -d)
|
||||
cd "$TMP"
|
||||
mkdir scratch
|
||||
cd scratch
|
||||
|
||||
ar vx "$input_deb" > /dev/null
|
||||
# all deb files have a data.tar.gz, which is now in the current directory.
|
||||
tar -x --file=data.tar.gz \
|
||||
--wildcards ./usr/lib/apache2/modules/mod_pagespeed\*.so
|
||||
|
||||
mv usr/lib/apache2/modules/* ..
|
||||
cd ..
|
||||
rm -r scratch/
|
||||
|
||||
echo "The .so files are:"
|
||||
for x in $PWD/*; do
|
||||
echo " $x"
|
||||
done | sort -r
|
||||
@@ -1,21 +0,0 @@
|
||||
# Turn on server side include processing for the header inclusion.
|
||||
AddOutputFilter INCLUDES .html
|
||||
Options +Includes
|
||||
Options +FollowSymLinks
|
||||
|
||||
# Make /foo.html available at /foo
|
||||
RewriteEngine on
|
||||
RewriteCond %{REQUEST_FILENAME}.html -f
|
||||
RewriteRule !.*\.html$ %{REQUEST_FILENAME}.html [L]
|
||||
|
||||
# Turn on mod_pagespeed to optimize our docs.
|
||||
ModPagespeed on
|
||||
ModPagespeedRewriteLevel CoreFilters
|
||||
ModPagespeedEnableFilters collapse_whitespace
|
||||
ModPagespeedEnableFilters remove_comments
|
||||
ModPagespeedInPlaceResourceOptimization on
|
||||
|
||||
# Do not optimize these resources which are used for blogpost
|
||||
ModPagespeedDisallow */puzzle_optimized_to_low_quality_webp.webp
|
||||
ModPagespeedDisallow */puzzle_optimized_to_low_quality_webp_and_saved_as_png.png
|
||||
ModPagespeedDisallow */puzzle_original.jpg
|
||||
@@ -1,40 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>mod_pagespeed Security Advisory: Insufficient Hostname Verification</title>
|
||||
<link rel="stylesheet" href="doc.css">
|
||||
</head>
|
||||
<body>
|
||||
<!--#include virtual="_header.html" -->
|
||||
|
||||
|
||||
<div id=content>
|
||||
<h1>mod_pagespeed Security Advisory: Insufficient Hostname Verification</h1>
|
||||
<dl>
|
||||
<dt>CVE Identifier:</dt>
|
||||
<dd>CVE-2012-4001</dd>
|
||||
<dt>Disclosed:</dt>
|
||||
<dd>September 12, 2012</dd>
|
||||
<dt>Versions Affected:</dt>
|
||||
<dd>All versions of mod_pagespeed up to and including 0.10.22.4.</dd>
|
||||
<dt>Summary:</dt>
|
||||
<dd>mod_pagespeed performs insufficient verification of its own host name,
|
||||
which makes it possible to trick it into doing HTTP fetches and resource
|
||||
processing from arbitrary host names, including potentially bypassing
|
||||
firewalls.</dd>
|
||||
<dt>Solution:</dt>
|
||||
<dd>mod_pagespeed 0.10.22.6 has been released with a fix.</dd>
|
||||
<dt>Workaround:</dt>
|
||||
<dd>If you are unable to upgrade to the new version, you can avoid this
|
||||
issue by changing your Apache httpd configuration. Give any virtual host
|
||||
that enables mod_pagespeed (and the global configuration, if it also enables
|
||||
mod_pagespeed) an accurate explicit <code>ServerName</code>, and set the
|
||||
options <code>UseCanonicalName</code> and
|
||||
<code>UseCanonicalPhysicalPort</code> to <code>On</code> in each. Please be
|
||||
aware, however, that depending on the version,
|
||||
<a href="CVE-2012-4360">CVE-2012-4360</a> may also apply.
|
||||
</dd>
|
||||
</div>
|
||||
<!--#include virtual="_footer.html" -->
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,30 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>mod_pagespeed Security Advisory: Cross-Site Scripting</title>
|
||||
<link rel="stylesheet" href="doc.css">
|
||||
</head>
|
||||
<body>
|
||||
<!--#include virtual="_header.html" -->
|
||||
|
||||
|
||||
<div id=content>
|
||||
<h1>mod_pagespeed Security Advisory: Cross-Site Scripting</h1>
|
||||
<dl>
|
||||
<dt>CVE Identifier:</dt>
|
||||
<dd>CVE-2012-4360</dd>
|
||||
<dt>Disclosed:</dt>
|
||||
<dd>September 12, 2012</dd>
|
||||
<dt>Versions Affected:</dt>
|
||||
<dd>mod_pagespeed versions 0.10.19.1 through 0.10.22.4 (inclusive).
|
||||
Versions 0.9.18.6 and earlier are unaffected.</dd>
|
||||
<dt>Summary:</dt>
|
||||
<dd>mod_pagespeed performs insufficient escaping in some cases, which can
|
||||
permit a hostile 3rd party to inject JavaScript running in context of
|
||||
the site.</dd>
|
||||
<dt>Solution:</dt>
|
||||
<dd>mod_pagespeed 0.10.22.6 has been released with a fix.</dd>
|
||||
</div>
|
||||
<!--#include virtual="_footer.html" -->
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,61 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>mod_pagespeed and ngx_pagespeed Security Advisory: Cross-Site Scripting</title>
|
||||
<link rel="stylesheet" href="doc.css">
|
||||
</head>
|
||||
<body>
|
||||
<!--#include virtual="_header.html" -->
|
||||
|
||||
|
||||
<div id=content>
|
||||
<h1>mod_pagespeed and ngx_pagespeed Security Advisory: Cross-Site Scripting</h1>
|
||||
<dl>
|
||||
<dt>CVE Identifier:</dt>
|
||||
<dd><p>CVE-2013-6111</p></dd>
|
||||
<dt>Disclosed:</dt>
|
||||
<dd><p>October 28th, 2013</p></dd>
|
||||
<dt>Versions Affected:</dt>
|
||||
<dd>
|
||||
<ul>
|
||||
<li>mod_pagespeed versions earlier than 1.0</li>
|
||||
<li>mod_pagespeed version 1.0.22.7 (fixed in 1.0.22.8)</li>
|
||||
<li>mod_pagespeed versions 1.1</li>
|
||||
<li>mod_pagespeed 1.2.24.1 (fixed in 1.2.24.2)</li>
|
||||
<li>mod_pagespeed 1.3.25.1 through 1.3.25.4 (fixed in 1.3.25.5)</li>
|
||||
<li>mod_pagespeed 1.4.26.1 through 1.4.26.4 (fixed in 1.4.26.5)</li>
|
||||
<li>mod_pagespeed and ngx_pagespeed 1.5.27.1 through 1.5.27.3 (fixed in 1.5.27.4)</li>
|
||||
<li>mod_pagespeed and ngx_pagespeed 1.6.29.1 through 1.6.29.6 (fixed in 1.6.29.7)</li>
|
||||
</ul>
|
||||
</dd>
|
||||
<dt>Summary:</dt>
|
||||
<dd><p>Some versions of mod_pagespeed and ngx_pagespeed are vulnerable to
|
||||
cross-site scripting (XSS), which can permit a hostile 3rd party to
|
||||
inject javascript running in the context of the site.</p></dd>
|
||||
<dt>Solution:</dt>
|
||||
<dd><p>For mod_pagespeed, update to one of versions 1.0.22.8-stable,
|
||||
1.2.24.2-stable, 1.3.25.5-stable, 1.4.26.5-stable, 1.5.27.4-beta, or
|
||||
1.6.29.7 or newer.</p>
|
||||
|
||||
<p>For ngx_pagespeed, update to 1.6.29.7 or newer.</p>
|
||||
</dd>
|
||||
<dt>Workaround:</dt>
|
||||
<dd>
|
||||
<p>No workaround is available for mod_pagespeed.</p>
|
||||
|
||||
<p>For ngx_pagespeed, you can completely prohibit access to
|
||||
<code>/ngx_pagespeed_statistics</code>,
|
||||
<code>/ngx_pagespeed_global_statistics</code> and
|
||||
<code>/ngx_pagespeed_message</code> (an IP whitelist is insufficient), via
|
||||
options similar to:
|
||||
<pre>
|
||||
location /ngx_pagespeed_global_statistics { deny all; }
|
||||
location /ngx_pagespeed_statistics { deny all; }
|
||||
location /ngx_pagespeed_message { deny all; }
|
||||
</pre>
|
||||
</p>
|
||||
</dd>
|
||||
</div>
|
||||
<!--#include virtual="_footer.html" -->
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,91 +0,0 @@
|
||||
<div id=footer>
|
||||
<!--#include virtual="_navline.html" -->
|
||||
</div>
|
||||
|
||||
<script>
|
||||
function buildTocHelper(node, headers) {
|
||||
if (node.nodeType == 1) {
|
||||
// Element node.
|
||||
var nodeName = node.nodeName.toLowerCase();
|
||||
if (nodeName == "h2" || nodeName == "h3" || nodeName == "h4" ||
|
||||
nodeName == "h5" || nodeName == "h6") {
|
||||
if (node.id) {
|
||||
headers.push([nodeName, node.innerHTML, node.id]);
|
||||
node.appendChild(document.createTextNode("\u00A0")); // nbsp
|
||||
var a = document.createElement("a");
|
||||
a.class = "header-link";
|
||||
a.href = "#" + node.id;
|
||||
a.textContent = "\uD83D\uDD17"; // link symbol
|
||||
node.appendChild(a);
|
||||
}
|
||||
} else {
|
||||
for (var i = 0; i < node.childNodes.length; i++) {
|
||||
buildTocHelper(node.childNodes[i], headers);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function buildToc() {
|
||||
var headers = [];
|
||||
buildTocHelper(document.body, headers);
|
||||
if (headers.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
var toc = document.getElementById("toc");
|
||||
var tocContents = document.createElement("div");
|
||||
tocContents.id = "toc-contents";
|
||||
tocContents.textContent = "Contents";
|
||||
toc.appendChild(tocContents);
|
||||
|
||||
var level = 1;
|
||||
var currentUl = null;
|
||||
for (var i = 0; i < headers.length; i++) {
|
||||
var header = headers[i];
|
||||
var headerLevel = header[0];
|
||||
var headerValue = header[1];
|
||||
var headerId = header[2];
|
||||
|
||||
var newLevel = parseInt(headerLevel.substring(1));
|
||||
while (newLevel > level) {
|
||||
// We loop here to handle the case where we have h2 ... h4. This
|
||||
// isn't a good way to write html, but someone may still do it.
|
||||
|
||||
var newUl = document.createElement("ul");
|
||||
if (currentUl == null) {
|
||||
toc.appendChild(newUl);
|
||||
} else {
|
||||
currentUl.appendChild(newUl);
|
||||
}
|
||||
currentUl = newUl;
|
||||
level++;
|
||||
}
|
||||
while (newLevel < level) {
|
||||
currentUl = currentUl.parentNode;
|
||||
level--;
|
||||
}
|
||||
var li = document.createElement("li");
|
||||
var a = document.createElement("a");
|
||||
a.href = "#" + headerId;
|
||||
a.textContent = headerValue;
|
||||
li.appendChild(a);
|
||||
currentUl.appendChild(li);
|
||||
}
|
||||
}
|
||||
|
||||
function wrapTables() {
|
||||
var tables = document.getElementsByTagName("table");
|
||||
for (var i = 0; i < tables.length; i++) {
|
||||
var table = tables[i];
|
||||
var parent = table.parentNode;
|
||||
var div = document.createElement('div');
|
||||
div.className = "table-wrapper";
|
||||
parent.insertBefore(div, table);
|
||||
div.appendChild(table);
|
||||
}
|
||||
}
|
||||
|
||||
buildToc();
|
||||
wrapTables();
|
||||
</script>
|
||||
@@ -1,15 +0,0 @@
|
||||
<div id=header>
|
||||
<div id=logoline>
|
||||
<div id=logo>
|
||||
<img src="https://www.gstatic.com/images/branding/product/1x/pagespeed_32dp.png"
|
||||
srcset="https://www.gstatic.com/images/branding/product/2x/pagespeed_32dp.png"
|
||||
width=32 height=32 alt="pagespeed logo">
|
||||
</div>
|
||||
<div id=logotext>PageSpeed</div>
|
||||
</div>
|
||||
<div id=navline>
|
||||
<a href="/doc/">← documentation index</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id=toc></div>
|
||||
@@ -1,3 +0,0 @@
|
||||
<div id=navline>
|
||||
<a href="/doc/">← documentation index</a>
|
||||
</div>
|
||||
@@ -1,392 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>PageSpeed Admin Pages</title>
|
||||
<link rel="stylesheet" href="doc.css">
|
||||
</head>
|
||||
<body>
|
||||
<!--#include virtual="_header.html" -->
|
||||
|
||||
|
||||
<div id=content>
|
||||
<h1>PageSpeed Admin Pages</h1>
|
||||
<p>
|
||||
The admin pages are a collection of features that provide visibility
|
||||
into the operation of the PageSpeed optimizations.
|
||||
</p>
|
||||
<p>
|
||||
The pagespeed_admin and pagespeed_global_admin pages aggregate a set of pages
|
||||
showing server state so they can be accessed from a single handler. By
|
||||
organizing all these features under a single admin page, this can be done once,
|
||||
and can serve as a launching point for future administration features.
|
||||
Before <strong>version 1.9.32.1</strong> the admin pages were read-only, but
|
||||
starting in <strong>version 1.9.32.1</strong>, cache purging is supported.
|
||||
</p>
|
||||
<img src="images/admin_config.png" style="border:1px solid black" />
|
||||
<p>
|
||||
The name of the currently active page is underlined in the top navigation bar.
|
||||
</p>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Page</th>
|
||||
<th>Related Options</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Statistics</td>
|
||||
<td>
|
||||
<a href="#statistics"><code>Statistics</code></a><br/>
|
||||
<a href="#virtual-hosts-and-stats"
|
||||
><code>UsePerVHostStatistics</code></a><br/>
|
||||
<code>mod_pagespeed_beacon</code><br/>
|
||||
<code>ngx_pagespeed_beacon</code>
|
||||
</td>
|
||||
<td>
|
||||
Shows server statistics since startup, such as how many
|
||||
resources are being optimized by filters, as well as various
|
||||
latency and cache effectiveness metrics.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Configuration</td>
|
||||
<td><a href="config_filters">Configuring Filters</a><br/>
|
||||
<a href="https_support#spdy_configuration"
|
||||
><code>ModPagespeedIf</code></a> (Apache only)</td>
|
||||
<td>
|
||||
Shows detailed configuration information including all filters,
|
||||
options, and the current cache flush timestamp.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Histograms</td>
|
||||
<td>
|
||||
<a href="filter-instrumentation-add"
|
||||
><code>add_instrumentation</code></a><br/>
|
||||
</td>
|
||||
<td>
|
||||
Shows detailed latency data for Page Load Time, rewriting,
|
||||
caches and HTTP fetching.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Caches</td>
|
||||
<td>
|
||||
<a href="system#memcached"><code>MemcachedServers</code></a>
|
||||
<a href="system#shm_cache"><code>CreateSharedMemoryMetadataCache</code></a>
|
||||
<a href="system#purge_cache"><code>EnableCachePurge</code></a>
|
||||
</td>
|
||||
<td>
|
||||
Shows detailed cache configuration information. When accessed
|
||||
from the Admin handler, it can be used to inspect the contents
|
||||
of the cache, and provides an interface to purge the cache.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Console</td>
|
||||
<td>
|
||||
<a href="admin#statistics"><code>Statistics</code></a><br/>
|
||||
<a href="console#configuring"><code>StatisticsLogging</code></a><br/>
|
||||
<a href="console#configuring"><code>LogDir</code></a>
|
||||
</td>
|
||||
<td>
|
||||
Displays a <a href="console">console</a> of graphs
|
||||
of server optimization behavior over time.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Message History</td>
|
||||
<td>
|
||||
<a href="#message-buffer-size"><code>MessageBufferSize</code></a>
|
||||
</td>
|
||||
<td>
|
||||
Server-wide history of recent logging output from PageSpeed,
|
||||
including messages that are omitted from the server log file based on
|
||||
its log level.
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<p>
|
||||
Before 1.8.31.2, the main admin page is not available, but there
|
||||
are page-specific handlers for statistics, messages, and the
|
||||
console. In 1.8.31.2 and later, the <code>*_pagespeed_*</code> handlers, such
|
||||
as <code>mod_pagespeed_statistics</code>, will continue to be supported:
|
||||
<ul>
|
||||
<li>They provide read-only access to server operation. There may
|
||||
be cases where a site owner wants to share statistics or console
|
||||
information but not the ability to purge the cache.</li>
|
||||
<li>Existing configurations must continue to work after an upgrade to
|
||||
a release that supports pagespeed_admin.</li>
|
||||
<li>The admin pages may later gain support for modifying the server
|
||||
state</li>
|
||||
</ul>
|
||||
</p>
|
||||
<h2 id="config">Configuring Admin Pages</h2>
|
||||
|
||||
<p>
|
||||
In this table we use the term "server" for an Apache VirtualHost and
|
||||
an nginx Server Block. We use the term "global" to mean the entire
|
||||
Apache or nginx system, covering all the configured VirtualHost and
|
||||
Server Blocks.
|
||||
</p>
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Apache Handler</th><th>Nginx Option</th><th>Version</th>
|
||||
<th>Description</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><code>pagespeed_admin</code></td>
|
||||
<td><code>AdminPath</code></td>
|
||||
<td>1.8.31.2+</td><td>Covers all administrative functions for
|
||||
a host in one handler. If you establish this handler,
|
||||
you don't need any of the other server-scoped methods. Only
|
||||
give 'admin' page access to clients that you are comfortable
|
||||
allowing to modify the state of your PageSpeed configuration.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>pagespeed_global_admin</code></td>
|
||||
<td><code>GlobalAdminPath</code></td>
|
||||
<td>1.8.31.2+</td><td>Covers all administrative functions for
|
||||
the entire global state in one handler. If you establish this
|
||||
handler, you don't
|
||||
need <code>mod_pagespeed_global_statistics</code>.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>mod_pagespeed_statistics</code></td>
|
||||
<td><code>StatisticsPath</code> (1.8.31.2+)</td>
|
||||
<td>All</td><td>Launchpad for Statistics, Histograms, and
|
||||
a subset of the Caches page as described above.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>mod_pagespeed_global_statistics</code></td>
|
||||
<td><code>GlobalStatisticsPath</code> (1.8.31.2+)</td>
|
||||
<td>1.1+</td><td>Same as above, but aggregates statistics across all
|
||||
configured servers. You must enable
|
||||
<a href="#virtual-hosts-and-stats"
|
||||
><code>UsePerVHostStatistics</code></a> for separate global
|
||||
statistics to be retained, otherwise all statistics will be global.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>mod_pagespeed_message</code></td>
|
||||
<td><code>MessagesPath</code> (1.8.31.2+)</td>
|
||||
<td>1.0+</td><td>Displays recent log messages printed by PageSpeed,
|
||||
including messages that may be below the current server loglevel
|
||||
threshold such as "Info" messages. Requires that
|
||||
<a href="#message-buffer-size"><code>MessageBufferSize</code></a> be set.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>pagespeed_console</code></td>
|
||||
<td><code>ConsolePath</code> (1.8.31.2+)</td>
|
||||
<td>1.6+</td><td>Displays a <a href="console">console</a> of graphs
|
||||
of server optimization behavior over time.</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<h3 id="handlers">Establishing Handlers in Apache</h2>
|
||||
<p>
|
||||
Each handler is optional; add them individually to enable
|
||||
admin features. Note that when you add handlers for
|
||||
<code>pagespeed_admin</code> and <code>pagespeed_global_admin</code>
|
||||
you are granting read/write access to server-state. The other handlers
|
||||
are read-only. A sample handler that filters on IP address is
|
||||
in the default configuration, whose general form is:
|
||||
</p>
|
||||
<pre class="prettyprint lang-sh">
|
||||
<Location /PATH>
|
||||
Order allow,deny
|
||||
Allow from localhost
|
||||
Allow from 127.0.0.1
|
||||
SetHandler HANDLER_NAME
|
||||
</Location>
|
||||
</pre>
|
||||
<p>
|
||||
You can choose any path for a handler, but you must specify the handler
|
||||
name exactly as it appears in the table above. By convention we use
|
||||
use the handler name for the path. You may also want to
|
||||
employ login-based access to the admin pages, using
|
||||
<code>AllowOverride AuthConfig</code>. Please see the Apache
|
||||
<a href="http://httpd.apache.org/docs/2.2/howto/auth.html">2.2</a>
|
||||
or
|
||||
<a href="http://httpd.apache.org/docs/2.4/howto/auth.html">2.4</a>
|
||||
Documentation for details.
|
||||
</p>
|
||||
<h3 id="handlers">Establishing Handlers in Nginx</h2>
|
||||
<p>
|
||||
In nginx, the handlers must be specified as location blocks.
|
||||
</p>
|
||||
<pre class="prettyprint lang-sh">
|
||||
location /ngx_pagespeed_statistics { allow 127.0.0.1; deny all; }
|
||||
location /ngx_pagespeed_global_statistics { allow 127.0.0.1; deny all; }
|
||||
location /ngx_pagespeed_message { allow 127.0.0.1; deny all; }
|
||||
location /pagespeed_console { allow 127.0.0.1; deny all; }
|
||||
location ~ ^/pagespeed_admin { allow 127.0.0.1; deny all; }
|
||||
location ~ ^/pagespeed_global_admin { allow 127.0.0.1; deny all; }
|
||||
</pre>
|
||||
<p class="note">
|
||||
Note that these handlers must precede the
|
||||
"<code>\.pagespeed\.([a-z]\.)?[a-z]{2}\.[^.]{10}\.[^.]+</code>" location block.
|
||||
</p>
|
||||
<p>
|
||||
In version 1.8.31.2 and later, the above location blocks are
|
||||
needed for each path you elect to enable in PageSpeed options:
|
||||
</p>
|
||||
<pre>
|
||||
pagespeed StatisticsPath /ngx_pagespeed_statistics;
|
||||
pagespeed GlobalStatisticsPath /ngx_pagespeed_global_statistics;
|
||||
pagespeed MessagesPath /ngx_pagespeed_message;
|
||||
pagespeed ConsolePath /pagespeed_console;
|
||||
pagespeed AdminPath /pagespeed_admin;
|
||||
pagespeed GlobalAdminPath /pagespeed_global_admin;
|
||||
</pre>
|
||||
<p>
|
||||
You can choose any path, as long as it's consistent between
|
||||
the <code>pagespeed Path</code> and the <code>location</code>. By
|
||||
convention we use the names as specified in the example.
|
||||
</p>
|
||||
<p>
|
||||
Prior to version 1.8.31.2, the above "Path" settings do not exist,
|
||||
and the failure to specify location blocks leaves the paths active
|
||||
with no access restrictions. The module will service requests
|
||||
to the paths whether the location blocks are specified or not.
|
||||
This applies to <code>/ngx_pagespeed_statistics</code>,
|
||||
<code>/ngx_pagespeed_global_statistics</code>,
|
||||
<code>/ngx_pagespeed_message</code>, and <code>/pagespeed_console</code>.
|
||||
</p>
|
||||
<p class="note">
|
||||
If you define access control for <code>/pagespeed_admin</code> or
|
||||
<code>/pagespeed_console</code>, you must do so earlier in the configuration
|
||||
file than the path to handle <code>.pagespeed</code> resources, to ensure
|
||||
that the handlers are disambiguated.
|
||||
</p>
|
||||
<h3 id="limiting-handlers">Limiting Handler Access</h3>
|
||||
<p class="note"><strong>Note: New feature as of 1.10.33.0</strong></p>
|
||||
<p>
|
||||
Apache's SetHandler access controls are accessible to anyone who can
|
||||
modify <code>.htaccess</code> files, so in a typical shared hosting context
|
||||
the global admin site isn't sufficiently protected. As of 1.10.33.0,
|
||||
PageSpeed allows setting an additional restriction of what domains are allowed
|
||||
to load handlers. For example, to deny access entirely, you could put:
|
||||
</p>
|
||||
<dl>
|
||||
<dt>Apache:<dd><pre class="prettyprint"
|
||||
>ModPagespeedStatisticsDomains Disallow *
|
||||
ModPagespeedGlobalStatisticsDomains Disallow *
|
||||
ModPagespeedMessagesDomains Disallow *
|
||||
ModPagespeedConsoleDomains Disallow *
|
||||
ModPagespeedAdminDomains Disallow *
|
||||
ModPagespeedGlobalAdminDomains Disallow *</pre>
|
||||
<dt>Nginx:<dd><pre class="prettyprint"
|
||||
>pagespeed StatisticsDomains Disallow *;
|
||||
pagespeed GlobalStatisticsDomains Disallow *;
|
||||
pagespeed MessagesDomains Disallow *;
|
||||
pagespeed ConsoleDomains Disallow *;
|
||||
pagespeed AdminDomains Disallow *;
|
||||
pagespeed GlobalAdminDomains Disallow *;</pre>
|
||||
</dl>
|
||||
<p>
|
||||
To allow access only to an admin, define a new VHost
|
||||
like <code>admin.example.com</code>, use standard web-server access control
|
||||
(IP or password) to restrict access to only that admin, and then at the top
|
||||
level of your config put:
|
||||
</p>
|
||||
<dl>
|
||||
<dt>Apache:<dd><pre class="prettyprint"
|
||||
>ModPagespeedStatisticsDomains Allow admin.example.com
|
||||
ModPagespeedGlobalStatisticsDomains Allow admin.example.com
|
||||
ModPagespeedMessagesDomains Allow admin.example.com
|
||||
ModPagespeedConsoleDomains Allow admin.example.com
|
||||
ModPagespeedAdminDomains Allow admin.example.com
|
||||
ModPagespeedGlobalAdminDomains Allow admin.example.com</pre>
|
||||
<dt>Nginx:<dd><pre class="prettyprint"
|
||||
>pagespeed StatisticsDomains Allow admin.example.com;
|
||||
pagespeed GlobalStatisticsDomains Allow admin.example.com;
|
||||
pagespeed MessagesDomains Allow admin.example.com;
|
||||
pagespeed ConsoleDomains Allow admin.example.com;
|
||||
pagespeed AdminDomains Allow admin.example.com;
|
||||
pagespeed GlobalAdminDomains Allow admin.example.com;</pre>
|
||||
</dl>
|
||||
<p>
|
||||
Now when you visit <code>admin.example.com/pagespeed_global_admin</code>
|
||||
you'll see global (server-level) admin information, but users are not able to
|
||||
access this under their own domain or turn the handler on
|
||||
with <code>.htaccess</code>.
|
||||
</p>
|
||||
<p>
|
||||
For all six of these options the default value is <code>Allow *</code>. If
|
||||
you explicitly <code>Allow</code> access to any site, all others are
|
||||
automatically <code>Disallow</code>ed. Wildcards are allowed, and additional
|
||||
directives are applied in sequence. For example, consider the following
|
||||
config:
|
||||
</p>
|
||||
<dl>
|
||||
<dt>Apache:<dd><pre class="prettyprint"
|
||||
>ModPagespeedAdminDomains Allow *.example.*
|
||||
ModPagespeedAdminDomains Disallow *.example.org
|
||||
ModPagespeedAdminDomains Allow www.example.org</pre>
|
||||
<dt>Nginx:<dd><pre class="prettyprint"
|
||||
>pagespeed AdminDomains Allow *.example.*;
|
||||
pagespeed AdminDomains Disallow *.example.org;
|
||||
pagespeed AdminDomains Allow www.example.org;</pre>
|
||||
</dl>
|
||||
<p>
|
||||
This would allow access to <code>www.example.com/pagespeed_admin</code>,
|
||||
and <code>www.example.org/pagespeed_admin</code> but
|
||||
not <code>shared.example.com/pagespeed_admin</code>.
|
||||
</p>
|
||||
|
||||
<h3 id="statistics">Shared Memory Statistics</h2>
|
||||
<p>
|
||||
By default PageSpeed collects cross-process statistics. While
|
||||
they're mostly intended for debugging and evaluation
|
||||
using <code>/mod_pagespeed_statistics</code>, <code>/ngx_pagespeed_statistics</code>,
|
||||
and the <a href="console">PageSpeed Console</a>, statistics are also
|
||||
necessary for limiting concurrent image rewrites
|
||||
and <a href="#rate_limit_background_fetches">background fetches</a>.
|
||||
It's not recommended to turn them off, as their performance impact
|
||||
is minimal, but if you need to you can do so with:
|
||||
<dl>
|
||||
<dt>Apache:<dd><pre class="prettyprint"
|
||||
>ModPagespeedStatistics off</pre></dd></dt>
|
||||
<dt>Nginx:<dd><pre class="prettyprint"
|
||||
>pagespeed Statistics off;</pre></dd></dt>
|
||||
</dl>
|
||||
</p>
|
||||
<h3 id="virtual-hosts-and-stats">Virtual hosts and statistics</h3>
|
||||
<p>
|
||||
You can choose whether PageSpeed aggregates its statistics
|
||||
over all virtual hosts (the default), or to keeps separate counts for each. You
|
||||
can chose the latter by specifying
|
||||
<code>UsePerVHostStatistics on</code>. In that
|
||||
case, <code>/pagespeed_admin</code>, <code>/mod_pagespeed_statistics</code>
|
||||
and <code>/ngx_pagespeed_statistics</code> will show the data for
|
||||
whatever virtual host is being accessed. If you do turn per-virtual
|
||||
host statistics on, you can still access the aggregates
|
||||
under <code>/pagespeed_global_admin</code>, <code>/mod_pagespeed_global_statistics</code>
|
||||
or <code>/ngx_pagespeed_global_statistics</code>.
|
||||
</p>
|
||||
<dl>
|
||||
<dt>Apache:<dd><pre class="prettyprint">ModPagespeedUsePerVhostStatistics on</pre>
|
||||
<dt>Nginx:<dd><pre class="prettyprint">pagespeed UsePerVhostStatistics on;</pre>
|
||||
</dl>
|
||||
|
||||
<h3 id="message-buffer-size">Message Buffer Size</h3>
|
||||
<p>
|
||||
Determines the number of bytes of shared memory to allocate as a circular
|
||||
buffer for holding recent PageSpeed log messages. By default, the size of
|
||||
this buffer is zero, and no messages will be retained.
|
||||
</p>
|
||||
<dl>
|
||||
<dt>Apache:<dd><pre class="prettyprint">ModPagespeedMessageBufferSize 100000</pre>
|
||||
<dt>Nginx:<dd><pre class="prettyprint">pagespeed MessageBufferSize 100000;</pre>
|
||||
</dl>
|
||||
|
||||
</div>
|
||||
<!--#include virtual="_footer.html" -->
|
||||
</body>
|
||||
</html>
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 86 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 33 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user