forked from gocd/build_utilities
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcreate-repositories
executable file
·109 lines (82 loc) · 4.07 KB
/
create-repositories
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
#!/usr/bin/env ruby
if File.basename($PROGRAM_NAME) != 'rake'
require 'shellwords'
puts "bundle exec rake -f #{Shellwords.escape($PROGRAM_NAME)} #{Shellwords.shelljoin(ARGV)}"
exec "bundle exec rake -f #{Shellwords.escape($PROGRAM_NAME)} #{Shellwords.shelljoin(ARGV)}"
end
require 'rubygems'
require 'json'
require 'yaml'
require 'open-uri'
require 'pathname'
require_relative 'docker/tag'
S3_BUCKET_CONFIG = YAML.load(File.read(File.expand_path('~/.s3.yml')))
S3_BUCKET = S3_BUCKET_CONFIG[ENV['BUCKET_TYPE'] || 'downloads-experimental']
$stderr.puts "*** Using bucket s3://#{S3_BUCKET} for creating repositories"
target_dir = Pathname.new('target')
desc 'clean'
task :clean do
rm_rf target_dir
end
desc 'init'
task :init do
mkdir_p target_dir
end
def terminate_docker_container(name)
sh("docker stop --time=2 '#{name}' > /dev/null 2>&1 || true") # in case the image does not exist
sh("docker rm '#{name}' > /dev/null 2>&1 || true") # in case the image does not exist
end
desc 'create debian repository'
task :apt do
sh("aws s3 sync --exclude='*' --include '*.deb' s3://#{S3_BUCKET} #{target_dir.join('repo')}")
mkdir_p 'target/tmp'
require 'tmpdir'
sh("docker pull gocdcontrib/aptrepo:#{DOCKER_TAG}")
terminate_docker_container('aptrepo')
Dir.mktmpdir(nil, 'target/tmp') do |tmp_dir|
tmp_dir = Pathname.new(tmp_dir).expand_path
cp_r File.expand_path('~/.gnupg'), tmp_dir
sh("docker run --privileged=false --interactive=true --attach stdout --attach stderr --rm --env 'GPG_SIGNING_KEY_ID=8816C449' --env 'SIGNING_USER=#{Process.uid}' --env 'SIGNING_GROUP=#{Process.gid}' --volume #{target_dir.expand_path}/repo:/repo --volume #{tmp_dir}/.gnupg:/root/.gnupg --name 'aptrepo' 'gocdcontrib/aptrepo:#{DOCKER_TAG}'")
end
sh('gpg --armor --export 8816C449 > target/repo/GOCD-GPG-KEY.asc ')
at_exit { terminate_docker_container('aptrepo') }
end
desc 'create yum repository'
task :yum do
sh("aws s3 sync --exclude='*' --include '*.rpm' s3://#{S3_BUCKET} #{target_dir.join('repo')}")
mkdir_p 'target/tmp'
require 'tmpdir'
sh("docker pull gocdcontrib/yumrepo:#{DOCKER_TAG}")
terminate_docker_container('yumrepo')
Dir.mktmpdir(nil, 'target/tmp') do |tmp_dir|
tmp_dir = Pathname.new(tmp_dir).expand_path
cp_r File.expand_path('~/.gnupg'), tmp_dir
sh("docker run --privileged=false --interactive=false --attach stdout --attach stderr --rm --env 'GPG_SIGNING_KEY_ID=8816C449' --env 'SIGNING_USER=#{Process.uid}' --env 'SIGNING_GROUP=#{Process.gid}' --volume #{target_dir.expand_path}/repo:/repo --volume #{tmp_dir}/.gnupg:/root/.gnupg --name 'yumrepo' 'gocdcontrib/yumrepo:#{DOCKER_TAG}'")
end
sh('gpg --armor --export 8816C449 > target/repo/GOCD-GPG-KEY.asc ')
at_exit { terminate_docker_container('yumrepo') }
end
desc "create releases.json file"
task :releases_json do
sh("aws s3 sync --exclude='*' --include '**/metadata.json' s3://#{S3_BUCKET} #{target_dir.join('repo')}")
json = Dir["#{target_dir.join('repo', '**', '*', 'metadata.json')}"].sort.collect do |f|
JSON.parse(File.read(f))
end
open(target_dir.join('repo', 'releases.json'), 'w') do |f|
f.puts(JSON.generate(json))
end
end
desc "upload the repository metadata"
task :upload do
%w(GOCD-GPG-KEY.asc InRelease Packages Packages.bz2 Packages.gz Release Release.gpg releases.json).each do |f|
# low cache ttl
sh("aws s3 cp #{target_dir.join('repo', f)} s3://#{S3_BUCKET}/#{f} --acl public-read --cache-control 'max-age=600'")
end
# yum repomd.xml (low cache ttl)
sh("aws s3 sync #{target_dir.join('repo', 'repodata')} s3://#{S3_BUCKET}/repodata/ --delete --acl public-read --cache-control 'max-age=600' --exclude '*' --include 'repomd.xml*'")
# rest of the yum metadata (high cache ttl)
sh("aws s3 sync #{target_dir.join('repo', 'repodata')} s3://#{S3_BUCKET}/repodata/ --delete --acl public-read --cache-control 'max-age=31536000'")
# yum repoview (low cache ttl)
sh("aws s3 sync #{target_dir.join('repo', 'repoview')} s3://#{S3_BUCKET}/repoview/ --delete --acl public-read --cache-control 'max-age=600'")
end
task default: [:clean, :init, :apt, :yum, :releases_json, :upload]