From 68cc690ca4295cd0bd9afb64974ff33a4d13441b Mon Sep 17 00:00:00 2001 From: Eli Bendersky Date: Thu, 2 Sep 2021 06:42:17 -0700 Subject: [PATCH] Add a wrapper script for tools/upload.go The script sets up the -region and -bucket flags to point to the right S3 location. AWS_* env vars should still be set manually. The old Ruby uploading code is moved to upload-ruby-old for now (will be deleted later) --- tools/upload | 43 ++----------------------------------------- tools/upload-ruby-old | 42 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 41 deletions(-) create mode 100755 tools/upload-ruby-old diff --git a/tools/upload b/tools/upload index b1eda68..5ca34ef 100755 --- a/tools/upload +++ b/tools/upload @@ -1,42 +1,3 @@ -#!/usr/bin/env ruby +#!/bin/bash -# Upload the contents in public/ to the S3 bucket from which we serve -# gobyexample.com. We use this instead of `aws iam sync` because that command -# doesn't correctly guess the text/html mime time of the extension-less files. -# We didn't write this in Go because we had already written it in Ruby for -# another website and didn't want to re-write it. - -require "aws-sdk" -require "set" - -s3 = Aws::S3::Client.new( - region: "us-east-1", - credentials: Aws::Credentials.new(ENV["AWS_ACCESS_KEY_ID"], ENV["AWS_SECRET_ACCESS_KEY"]) -) - -# (Re-)upload each file to S3. We're not worried about what's currently there. -Dir.glob("./public/**/**").each do |local_path| - next if File.directory?(local_path) - - # Derive final path. - s3_path = local_path.sub("./public/", "") - - # Infer content type, including for HTML files that need pretty URLs. - content_type = - case s3_path - when /\.ico$/ then "image/x-icon" - when /\.png$/ then "image/png" - when /\.css$/ then "text/css" - else "text/html" - end - - puts("Uploading #{s3_path} (#{content_type})") - - File.open(local_path, "rb") do |local_file| - s3.put_object( - bucket: "gobyexample.com", - key: s3_path, - content_type: content_type, - body: local_file) - end -end +exec go run tools/upload.go -region us-east-1 -bucket gobyexample.com diff --git a/tools/upload-ruby-old b/tools/upload-ruby-old new file mode 100755 index 0000000..b1eda68 --- /dev/null +++ b/tools/upload-ruby-old @@ -0,0 +1,42 @@ +#!/usr/bin/env ruby + +# Upload the contents in public/ to the S3 bucket from which we serve +# gobyexample.com. We use this instead of `aws iam sync` because that command +# doesn't correctly guess the text/html mime time of the extension-less files. +# We didn't write this in Go because we had already written it in Ruby for +# another website and didn't want to re-write it. + +require "aws-sdk" +require "set" + +s3 = Aws::S3::Client.new( + region: "us-east-1", + credentials: Aws::Credentials.new(ENV["AWS_ACCESS_KEY_ID"], ENV["AWS_SECRET_ACCESS_KEY"]) +) + +# (Re-)upload each file to S3. We're not worried about what's currently there. +Dir.glob("./public/**/**").each do |local_path| + next if File.directory?(local_path) + + # Derive final path. + s3_path = local_path.sub("./public/", "") + + # Infer content type, including for HTML files that need pretty URLs. + content_type = + case s3_path + when /\.ico$/ then "image/x-icon" + when /\.png$/ then "image/png" + when /\.css$/ then "text/css" + else "text/html" + end + + puts("Uploading #{s3_path} (#{content_type})") + + File.open(local_path, "rb") do |local_file| + s3.put_object( + bucket: "gobyexample.com", + key: s3_path, + content_type: content_type, + body: local_file) + end +end