From 862cc891e71dbfbc3cd392df12d5653ead705717 Mon Sep 17 00:00:00 2001 From: Tatsuki Sugiura Date: Tue, 10 May 2016 17:54:31 +0900 Subject: [PATCH] Pertial upload support by target_dir argument. --- lib/osdn/cli/command/frs_upload.rb | 253 ++++++++++++++++++++----------------- 1 file changed, 137 insertions(+), 116 deletions(-) diff --git a/lib/osdn/cli/command/frs_upload.rb b/lib/osdn/cli/command/frs_upload.rb index 64349c6..a8938e6 100644 --- a/lib/osdn/cli/command/frs_upload.rb +++ b/lib/osdn/cli/command/frs_upload.rb @@ -5,8 +5,8 @@ module OSDN; module CLI; module Command puts "Options:" puts " -n --dry-run Do noting (use with global -v to inspect)" puts " -p --project= Target project (numeric id or name)" - #puts " --package= Target package (numeric id)" - #puts " --release= Target release (numeric id)" + puts " --package= Target package (numeric id)" + puts " --release= Target release (numeric id)" puts " -v --visibility=" puts " Default visibility for newly created items" puts " --force-digest Calc local file digest forcely" @@ -17,7 +17,8 @@ module OSDN; module CLI; module Command opts = GetoptLong.new( [ '--dry-run', '-n', GetoptLong::NO_ARGUMENT ], [ '--project', '-p', GetoptLong::REQUIRED_ARGUMENT ], - [ '--release', '-r', GetoptLong::REQUIRED_ARGUMENT ], + [ '--package', GetoptLong::REQUIRED_ARGUMENT ], + [ '--release', GetoptLong::REQUIRED_ARGUMENT ], [ '--visibility', '-v', GetoptLong::REQUIRED_ARGUMENT ], [ '--force-digest', GetoptLong::NO_ARGUMENT], ) @@ -26,12 +27,12 @@ module OSDN; module CLI; module Command when '--project' arg.empty? or @target_proj = arg - #when '--release' - # arg.empty? or - # @target_release = arg - #when '--package' - # arg.empty? or - # @target_package = arg + when '--release' + arg.empty? or + @target_release = arg + when '--package' + arg.empty? or + @target_package = arg when '--visibility' unless %w(public private hidden).member?(arg) logger.fatal "Invalid visibility status: #{arg}" @@ -48,127 +49,147 @@ module OSDN; module CLI; module Command @target_dir = Pathname.new(ARGV.shift || '.') proj_info = api.get_project target_proj # check project existance - Pathname.glob(@target_dir+'*').sort.each do |pdir| - if cur_pkgid = load_variables(pdir).package_id - # check package existance on server + vars = load_variables(@target_dir) + + if @target_release || vars.release_id + process_release(@target_dir) + elsif @target_package || vars.package_id + process_package(@target_dir) + else + Pathname.glob(@target_dir+'*').sort.each do |pdir| + process_package(pdir) + end + end + end + + def self.description + "Upload local file tree and create package/release implicitly." + end + + def process_package(pdir) + if cur_pkgid = load_variables(pdir).package_id + # check package existance on server + begin + api.get_package target_proj, target_package(pdir) + rescue OSDNClient::ApiError => e begin - api.get_package target_proj, target_package(pdir) - rescue OSDNClient::ApiError => e - begin - err = JSON.parse(e.response_body) - rescue - raise e - end - if err['status'] == 404 - logger.warn "Package ##{cur_pkgid} has been deleted on server and local directory '#{pdir}' remains. You can delete the local directory or delete '#{pdir}/.osdn.vars' file to create a package again with new ID." - next - end + err = JSON.parse(e.response_body) + rescue raise e end - else - logger.info "Createing new package '#{pdir.basename}'" - if @dry_run - pinfo = Hashie::Mash.new id: '(dry-run)', name: pdir.basename, url: '(dry-run)' - else - pinfo = api.create_package target_proj, pdir.basename, visibility: @visibility - update_variables pdir, package_id: pinfo.id + if err['status'] == 404 + logger.warn "Package ##{cur_pkgid} has been deleted on server and local directory '#{pdir}' remains. You can delete the local directory or delete '#{pdir}/.osdn.vars' file to create a package again with new ID." + return false end - $stdout.puts "New package '#{pinfo.name}' has been created; #{pinfo.url}" + raise e end + else + logger.info "Createing new package '#{pdir.basename}'" + if @dry_run + pinfo = Hashie::Mash.new id: '(dry-run)', name: pdir.basename, url: '(dry-run)' + else + pinfo = api.create_package target_proj, pdir.basename, visibility: @visibility + update_variables pdir, package_id: pinfo.id + end + $stdout.puts "New package '#{pinfo.name}' has been created; #{pinfo.url}" + end - Pathname.glob(pdir + '*').sort.each do |rdir| - if !rdir.directory? - logger.warn "Skip normal file '#{rdir}' in release level" - next - end + Pathname.glob(pdir + '*').sort.each do |rdir| + process_release(rdir) + end + end + + def process_release(rdir) + if !rdir.directory? + logger.warn "Skip normal file '#{rdir}' in release level" + return false + end - vars = load_variables(rdir) - rinfo = nil - if vars.release_id - begin - rinfo = api.get_release target_proj, target_package(rdir), target_release(rdir) - rescue OSDNClient::ApiError => e - begin - err = JSON.parse(e.response_body) - rescue - raise e - end - if err['status'] == 404 - logger.warn "Release ##{vars.release_id} has been deleted on server and local directory '#{rdir}' remains. You can delete the local directory or delete '#{rdir}/.osdn.vars' file to create a release again with new ID." - next - end - raise e - end - else vars.release_id - logger.info "Createing new release '#{rdir.basename}'" - if @dry_run - rinfo = Hashie::Mash.new id: '(dry-run)', name: rdir.basename, url: '(dry-run)', files: [] - else - rinfo = api.create_release target_proj, target_package(rdir), rdir.basename, visibility: @visibility - update_variables rdir, release_id: rinfo.id - end - $stdout.puts "New release '#{rinfo.name}' has been created; #{rinfo.url}" + vars = load_variables(rdir) + rinfo = nil + if vars.release_id + begin + rinfo = api.get_release target_proj, target_package(rdir), target_release(rdir) + rescue OSDNClient::ApiError => e + begin + err = JSON.parse(e.response_body) + rescue + raise e end - - Pathname.glob(rdir + '*').sort.each do |file| - if file.directory? - logger.error "Skip direcotry #{file}" - next - end - - vars = load_variables(rdir) - digests = nil - if !@force_digest && vars.local_file_info && - vars.local_file_info[file.basename.to_s] - finfo = vars.local_file_info[file.basename.to_s] - if finfo[:size] == file.size && finfo.mtime == file.mtime - digests = vars.local_file_info[file.basename.to_s].digests - end - end - - unless digests - logger.info "Calculating digest for #{file}..." - digests = { - sha256: hexdigest(Digest::SHA256, file), - sha1: hexdigest(Digest::SHA1, file), - md5: hexdigest(Digest::MD5, file), - } - update_variables rdir, {local_file_info: {file.basename.to_s => {digests: digests, mtime: file.mtime, size: file.size}}} - end - if remote_f = rinfo.files.find { |f| f.name == file.basename.to_s } - if digests.find { |type, dig| dig != remote_f.send("digest_#{type}") } - logger.error "#{file} was changed from remote file! Please delete remote file before uploading new one." - end - logger.info "Skip already uploaded file '#{file}'" - else - logger.info "Uploading file #{file} (#{file.size} bytes)" - if @dry_run - finfo = Hashie::Mash.new id: '(dry-run)', url: '(dry-run)' - else - logger.level <= Logger::INFO and - OSDN::CLI._show_progress = true - fio = file.open - logger.info "Starting upload #{file}..." - finfo = api.create_release_file target_proj, target_package(rdir), target_release(rdir), fio, visibility: @visibility - fio.close - OSDN::CLI._show_progress = false - if digests.find { |type, dig| dig != finfo.send("digest_#{type}") } - logger.error "File digests are mismatch! Upload file #{file} may be broken! Please check." - else - logger.info "Upload complete." - end - end - $stdout.puts "New file '#{file}' has been uploaded; #{finfo.url}" - end + if err['status'] == 404 + logger.warn "Release ##{vars.release_id} has been deleted on server and local directory '#{rdir}' remains. You can delete the local directory or delete '#{rdir}/.osdn.vars' file to create a release again with new ID." + return false end + raise e end + else vars.release_id + logger.info "Createing new release '#{rdir.basename}'" + if @dry_run + rinfo = Hashie::Mash.new id: '(dry-run)', name: rdir.basename, url: '(dry-run)', files: [] + else + rinfo = api.create_release target_proj, target_package(rdir), rdir.basename, visibility: @visibility + update_variables rdir, release_id: rinfo.id + end + $stdout.puts "New release '#{rinfo.name}' has been created; #{rinfo.url}" + end + Pathname.glob(rdir + '*').sort.each do |file| + process_file(file, rdir, rinfo) end end - def self.description - "Upload local file tree and create package/release implicitly." - end + def process_file(file, rdir, rinfo) + if file.directory? + logger.error "Skip direcotry #{file}" + return false + end + + vars = load_variables(rdir) + digests = nil + if !@force_digest && vars.local_file_info && + vars.local_file_info[file.basename.to_s] + finfo = vars.local_file_info[file.basename.to_s] + if finfo[:size] == file.size && finfo.mtime == file.mtime + digests = vars.local_file_info[file.basename.to_s].digests + end + end + + unless digests + logger.info "Calculating digest for #{file}..." + digests = { + sha256: hexdigest(Digest::SHA256, file), + sha1: hexdigest(Digest::SHA1, file), + md5: hexdigest(Digest::MD5, file), + } + update_variables rdir, {local_file_info: {file.basename.to_s => {digests: digests, mtime: file.mtime, size: file.size}}} + end + if remote_f = rinfo.files.find { |f| f.name == file.basename.to_s } + if digests.find { |type, dig| dig != remote_f.send("digest_#{type}") } + logger.error "#{file} was changed from remote file! Please delete remote file before uploading new one." + end + logger.info "Skip already uploaded file '#{file}'" + return + end + logger.info "Uploading file #{file} (#{file.size} bytes)" + if @dry_run + finfo = Hashie::Mash.new id: '(dry-run)', url: '(dry-run)' + else + logger.level <= Logger::INFO and + OSDN::CLI._show_progress = true + fio = file.open + logger.info "Starting upload #{file}..." + finfo = api.create_release_file target_proj, target_package(rdir), target_release(rdir), fio, visibility: @visibility + fio.close + OSDN::CLI._show_progress = false + if digests.find { |type, dig| dig != finfo.send("digest_#{type}") } + logger.error "File digests are mismatch! Upload file #{file} may be broken! Please check." + else + logger.info "Upload complete." + end + end + $stdout.puts "New file '#{file}' has been uploaded; #{finfo.url}" + end + private def target_proj @target_proj and return @target_proj -- 2.11.0