Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
B
bootstrap
Project overview
Project overview
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Administrator
bootstrap
Commits
6bb73df5
Commit
6bb73df5
authored
Dec 15, 2013
by
Chris Rebert
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #11890 from twbs/generic-caching
make S3 caching significantly more generic
parents
75db3550
95c72b57
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
52 additions
and
32 deletions
+52
-32
.travis.yml
.travis.yml
+2
-2
test-infra/node_modules_cache.py
test-infra/node_modules_cache.py
+50
-30
No files found.
.travis.yml
View file @
6bb73df5
...
@@ -6,9 +6,9 @@ before_install:
...
@@ -6,9 +6,9 @@ before_install:
install
:
install
:
-
if [ "$TWBS_TEST" = validate-html ]; then time gem install jekyll; fi
-
if [ "$TWBS_TEST" = validate-html ]; then time gem install jekyll; fi
-
time npm install -g grunt-cli
-
time npm install -g grunt-cli
-
time ./test-infra/node_modules_cache.py download || time npm install
-
time ./test-infra/node_modules_cache.py download
package.json ./node_modules
|| time npm install
after_script
:
after_script
:
-
if [ "$TWBS_TEST" = core ]; then time ./test-infra/node_modules_cache.py upload; fi
-
if [ "$TWBS_TEST" = core ]; then time ./test-infra/node_modules_cache.py upload
package.json ./node_modules
; fi
env
:
env
:
global
:
global
:
-
SAUCE_USERNAME
:
bootstrap
-
SAUCE_USERNAME
:
bootstrap
...
...
test-infra/node_modules_cache.py
View file @
6bb73df5
...
@@ -3,7 +3,7 @@ from __future__ import absolute_import, unicode_literals, print_function, divisi
...
@@ -3,7 +3,7 @@ from __future__ import absolute_import, unicode_literals, print_function, divisi
from
sys
import
argv
from
sys
import
argv
from
os
import
environ
,
stat
,
remove
as
_delete_file
from
os
import
environ
,
stat
,
remove
as
_delete_file
from
os.path
import
isfile
from
os.path
import
isfile
,
dirname
,
basename
,
abspath
from
hashlib
import
sha256
from
hashlib
import
sha256
from
subprocess
import
check_call
as
run
from
subprocess
import
check_call
as
run
...
@@ -12,7 +12,6 @@ from boto.s3.key import Key
...
@@ -12,7 +12,6 @@ from boto.s3.key import Key
from
boto.exception
import
S3ResponseError
from
boto.exception
import
S3ResponseError
NODE_MODULES_TARBALL
=
'node_modules.tar.gz'
NEED_TO_UPLOAD_MARKER
=
'.need-to-upload'
NEED_TO_UPLOAD_MARKER
=
'.need-to-upload'
BYTES_PER_MB
=
1024
*
1024
BYTES_PER_MB
=
1024
*
1024
try
:
try
:
...
@@ -25,7 +24,9 @@ def _sha256_of_file(filename):
...
@@ -25,7 +24,9 @@ def _sha256_of_file(filename):
hasher
=
sha256
()
hasher
=
sha256
()
with
open
(
filename
,
'rb'
)
as
input_file
:
with
open
(
filename
,
'rb'
)
as
input_file
:
hasher
.
update
(
input_file
.
read
())
hasher
.
update
(
input_file
.
read
())
return
hasher
.
hexdigest
()
file_hash
=
hasher
.
hexdigest
()
print
(
'sha256({}) = {}'
.
format
(
filename
,
file_hash
))
return
file_hash
def
_delete_file_quietly
(
filename
):
def
_delete_file_quietly
(
filename
):
...
@@ -35,52 +36,71 @@ def _delete_file_quietly(filename):
...
@@ -35,52 +36,71 @@ def _delete_file_quietly(filename):
pass
pass
def
_tarball_size
():
def
_tarball_size
(
directory
):
kib
=
stat
(
NODE_MODULES_TARBALL
)
.
st_size
//
BYTES_PER_MB
kib
=
stat
(
_tarball_filename_for
(
directory
)
)
.
st_size
//
BYTES_PER_MB
return
"{} MiB"
.
format
(
kib
)
return
"{} MiB"
.
format
(
kib
)
def
_tarball_filename_for
(
directory
):
return
abspath
(
'./{}.tar.gz'
.
format
(
basename
(
directory
)))
def
_create_tarball
(
directory
):
print
(
"Creating tarball of {}..."
.
format
(
directory
))
run
([
'tar'
,
'-czf'
,
_tarball_filename_for
(
directory
),
'-C'
,
dirname
(
directory
),
basename
(
directory
)])
def
_extract_tarball
(
directory
):
print
(
"Extracting tarball of {}..."
.
format
(
directory
))
run
([
'tar'
,
'-xzf'
,
_tarball_filename_for
(
directory
),
'-C'
,
dirname
(
directory
)])
def
download
(
directory
):
_delete_file_quietly
(
NEED_TO_UPLOAD_MARKER
)
try
:
print
(
"Downloading {} tarball from S3..."
.
format
(
basename
(
directory
)))
key
.
get_contents_to_filename
(
_tarball_filename_for
(
directory
))
except
S3ResponseError
as
err
:
open
(
NEED_TO_UPLOAD_MARKER
,
'a'
)
.
close
()
print
(
err
)
raise
SystemExit
(
"Cached {} download failed!"
.
format
(
basename
(
directory
)))
print
(
"Downloaded {}."
.
format
(
_tarball_size
(
directory
)))
_extract_tarball
(
directory
)
print
(
"{} successfully installed from cache."
.
format
(
directory
))
def
upload
(
directory
):
_create_tarball
(
directory
)
print
(
"Uploading {} tarball to S3... ({})"
.
format
(
basename
(
directory
),
_tarball_size
(
directory
)))
key
.
set_contents_from_filename
(
_tarball_filename_for
(
directory
))
print
(
"{} cache successfully updated."
.
format
(
directory
))
_delete_file_quietly
(
NEED_TO_UPLOAD_MARKER
)
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
# Uses environment variables:
# Uses environment variables:
# AWS_ACCESS_KEY_ID - AWS Access Key ID
# AWS_ACCESS_KEY_ID - AWS Access Key ID
# AWS_SECRET_ACCESS_KEY - AWS Secret Access Key
# AWS_SECRET_ACCESS_KEY - AWS Secret Access Key
argv
.
pop
(
0
)
argv
.
pop
(
0
)
if
len
(
argv
)
!=
1
:
if
len
(
argv
)
!=
3
:
raise
SystemExit
(
"USAGE: node_modules_cache.py <download | upload>"
)
raise
SystemExit
(
"USAGE: node_modules_cache.py <download | upload>
<dependencies file> <directory>
"
)
mode
=
argv
.
pop
()
mode
,
dependencies_file
,
directory
=
argv
conn
=
S3Connection
()
conn
=
S3Connection
()
bucket
=
conn
.
lookup
(
BUCKET_NAME
)
bucket
=
conn
.
lookup
(
BUCKET_NAME
)
if
bucket
is
None
:
if
bucket
is
None
:
raise
SystemExit
(
"Could not access bucket!"
)
raise
SystemExit
(
"Could not access bucket!"
)
package_json_hash
=
_sha256_of_file
(
'package.json'
)
dependencies_file_hash
=
_sha256_of_file
(
dependencies_file
)
print
(
'sha256(package.json) = '
+
package_json_hash
)
key
=
Key
(
bucket
,
package_json
_hash
)
key
=
Key
(
bucket
,
dependencies_file
_hash
)
key
.
storage_class
=
'REDUCED_REDUNDANCY'
key
.
storage_class
=
'REDUCED_REDUNDANCY'
if
mode
==
'download'
:
if
mode
==
'download'
:
_delete_file_quietly
(
NEED_TO_UPLOAD_MARKER
)
download
(
directory
)
try
:
print
(
"Downloading tarball from S3..."
)
key
.
get_contents_to_filename
(
NODE_MODULES_TARBALL
)
except
S3ResponseError
as
err
:
open
(
NEED_TO_UPLOAD_MARKER
,
'a'
)
.
close
()
print
(
err
)
raise
SystemExit
(
"Cached node_modules download failed!"
)
print
(
"Downloaded {}."
.
format
(
_tarball_size
()))
print
(
"Extracting tarball..."
)
run
([
'tar'
,
'xzf'
,
NODE_MODULES_TARBALL
])
print
(
"node_modules successfully installed from cache."
)
elif
mode
==
'upload'
:
elif
mode
==
'upload'
:
if
isfile
(
NEED_TO_UPLOAD_MARKER
):
if
isfile
(
NEED_TO_UPLOAD_MARKER
):
# FIXME
print
(
"Creating tarball..."
)
upload
(
directory
)
run
([
'tar'
,
'czf'
,
NODE_MODULES_TARBALL
,
'node_modules'
])
print
(
"Uploading tarball to S3... ({})"
.
format
(
_tarball_size
()))
key
.
set_contents_from_filename
(
NODE_MODULES_TARBALL
)
print
(
"node_modules cache successfully updated."
)
_delete_file_quietly
(
NEED_TO_UPLOAD_MARKER
)
else
:
else
:
print
(
"No need to upload anything."
)
print
(
"No need to upload anything."
)
else
:
else
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment