puts "Options:"
puts " -n --dry-run Do noting (use with global -v to inspect)"
puts " -p --project=<project> Target project (numeric id or name)"
- #puts " --package=<project> Target package (numeric id)"
- #puts " --release=<project> Target release (numeric id)"
+ puts " --package=<package-id> Target package (numeric id)"
+ puts " --release=<release-id> Target release (numeric id)"
puts " -v --visibility=<public|private|hidden>"
puts " Default visibility for newly created items"
puts " --force-digest Calc local file digest forcely"
opts = GetoptLong.new(
[ '--dry-run', '-n', GetoptLong::NO_ARGUMENT ],
[ '--project', '-p', GetoptLong::REQUIRED_ARGUMENT ],
- [ '--release', '-r', GetoptLong::REQUIRED_ARGUMENT ],
+ [ '--package', GetoptLong::REQUIRED_ARGUMENT ],
+ [ '--release', GetoptLong::REQUIRED_ARGUMENT ],
[ '--visibility', '-v', GetoptLong::REQUIRED_ARGUMENT ],
[ '--force-digest', GetoptLong::NO_ARGUMENT],
)
when '--project'
arg.empty? or
@target_proj = arg
- #when '--release'
- # arg.empty? or
- # @target_release = arg
- #when '--package'
- # arg.empty? or
- # @target_package = arg
+ when '--release'
+ arg.empty? or
+ @target_release = arg
+ when '--package'
+ arg.empty? or
+ @target_package = arg
when '--visibility'
unless %w(public private hidden).member?(arg)
logger.fatal "Invalid visibility status: #{arg}"
end
end
- @target_dir = Pathname.new(ARGV.shift || '.')
+ (ARGV.empty? ? ['.'] : ARGV).each do |d|
+ @target_dir = Pathname.new(d)
+ process_target
+ end
+ end
+
+ def process_target
proj_info = api.get_project target_proj # check project existance
- Pathname.glob(@target_dir+'*').sort.each do |pdir|
- unless load_variables(pdir).package_id
- logger.info "Createing new package '#{pdir.basename}'"
- if @dry_run
- pinfo = Hashie::Mash.new id: '(dry-run)', name: pdir.basename, url: '(dry-run)'
- else
- pinfo = api.create_package target_proj, pdir.basename, visibility: @visibility
- update_variables pdir, package_id: pinfo.id
- end
- $stdout.puts "New package '#{pinfo.name}' has been created; #{pinfo.url}"
+ vars = load_variables(@target_dir)
+ parent_vars = load_variables(@target_dir.parent)
+
+ if @target_release || vars.release_id ||
+ parent_vars.package_id && !vars.release_id # new release case...
+ process_release(@target_dir)
+ elsif @target_package || vars.package_id
+ process_package(@target_dir)
+ else
+ Pathname.glob(@target_dir+'*').sort.each do |pdir|
+ process_package(pdir)
end
+ end
+ end
- Pathname.glob(pdir + '*').sort.each do |rdir|
- if !rdir.directory?
- logger.warn "Skip normal file '#{rdir}' in release level"
- next
- end
+ def self.description
+ "Upload local file tree and create package/release implicitly."
+ end
- vars = load_variables(rdir)
- rinfo = nil
- if vars.release_id
- rinfo = api.get_release target_proj, target_package(rdir), target_release(rdir)
- else vars.release_id
- logger.info "Createing new release '#{rdir.basename}'"
- if @dry_run
- rinfo = Hashie::Mash.new id: '(dry-run)', name: rdir.basename, url: '(dry-run)', files: []
- else
- rinfo = api.create_release target_proj, target_package(rdir), rdir.basename, visibility: @visibility
- update_variables rdir, release_id: rinfo.id
- end
- $stdout.puts "New release '#{rinfo.name}' has been created; #{rinfo.url}"
+ def process_package(pdir)
+ if cur_pkgid = load_variables(pdir).package_id
+ # check package existance on server
+ begin
+ api.get_package target_proj, target_package(pdir)
+ rescue OSDNClient::ApiError => e
+ begin
+ err = JSON.parse(e.response_body)
+ rescue
+ raise e
end
-
- Pathname.glob(rdir + '*').sort.each do |file|
- if file.directory?
- logger.error "Skip direcotry #{file}"
- next
- end
-
- vars = load_variables(rdir)
- digests = nil
- if !@force_digest && vars.local_file_info &&
- vars.local_file_info[file.basename.to_s]
- finfo = vars.local_file_info[file.basename.to_s]
- if finfo[:size] == file.size && finfo.mtime == file.mtime
- digests = vars.local_file_info[file.basename.to_s].digests
- end
- end
-
- unless digests
- logger.info "Calculating digest for #{file}..."
- digests = {
- sha256: hexdigest(Digest::SHA256, file),
- sha1: hexdigest(Digest::SHA1, file),
- md5: hexdigest(Digest::MD5, file),
- }
- update_variables rdir, {local_file_info: {file.basename.to_s => {digests: digests, mtime: file.mtime, size: file.size}}}
- end
- if remote_f = rinfo.files.find { |f| f.name == file.basename.to_s }
- if digests.find { |type, dig| dig != remote_f.send("digest_#{type}") }
- logger.error "#{file} was changed from remote file! Please delete remote file before uploading new one."
- end
- logger.info "Skip already uploaded file '#{file}'"
- else
- logger.info "Uploading file #{file} (#{file.size} bytes)"
- if @dry_run
- finfo = Hashie::Mash.new id: '(dry-run)', url: '(dry-run)'
- else
- logger.level <= Logger::INFO and
- OSDN::CLI._show_progress = true
- fio = file.open
- logger.info "Starting upload #{file}..."
- finfo = api.create_release_file target_proj, target_package(rdir), target_release(rdir), fio, visibility: @visibility
- fio.close
- OSDN::CLI._show_progress = false
- if digests.find { |type, dig| dig != finfo.send("digest_#{type}") }
- logger.error "File digests are mismatch! Upload file #{file} may be broken! Please check."
- else
- logger.info "Upload complete."
- end
- end
- $stdout.puts "New file '#{file}' has been uploaded; #{finfo.url}"
- end
+ if err['status'] == 404
+ logger.warn "Package ##{cur_pkgid} has been deleted on server and local directory '#{pdir}' remains. You can delete the local directory or delete '#{pdir}/.osdn.vars' file to create a package again with new ID."
+ return false
end
+ raise e
+ end
+ else
+ logger.info "Createing new package '#{pdir.basename}'"
+ if @dry_run
+ pinfo = Hashie::Mash.new id: '(dry-run)', name: pdir.basename, url: '(dry-run)'
+ else
+ pinfo = api.create_package target_proj, pdir.basename, visibility: @visibility
+ update_variables pdir, package_id: pinfo.id
end
+ $stdout.puts "New package '#{pinfo.name}' has been created; #{pinfo.url}"
+ end
+
+ Pathname.glob(pdir + '*').sort.each do |rdir|
+ process_release(rdir)
end
end
- def self.description
- "Upload local file tree and create package/release implicitly."
+ def process_release(rdir)
+ if !rdir.directory?
+ logger.warn "Skip normal file '#{rdir}' in release level"
+ return false
+ end
+
+ vars = load_variables(rdir)
+ rinfo = nil
+ if vars.release_id
+ begin
+ rinfo = api.get_release target_proj, target_package(rdir), target_release(rdir)
+ rescue OSDNClient::ApiError => e
+ begin
+ err = JSON.parse(e.response_body)
+ rescue
+ raise e
+ end
+ if err['status'] == 404
+ logger.warn "Release ##{vars.release_id} has been deleted on server and local directory '#{rdir}' remains. You can delete the local directory or delete '#{rdir}/.osdn.vars' file to create a release again with new ID."
+ return false
+ end
+ raise e
+ end
+ else
+ logger.info "Createing new release '#{rdir.basename}'"
+ if @dry_run
+ rinfo = Hashie::Mash.new id: '(dry-run)', name: rdir.basename, url: '(dry-run)', files: []
+ else
+ rinfo = api.create_release target_proj, target_package(rdir), rdir.basename, visibility: @visibility
+ update_variables rdir, release_id: rinfo.id
+ end
+ $stdout.puts "New release '#{rinfo.name}' has been created; #{rinfo.url}"
+ end
+ Pathname.glob(rdir + '*').sort.each do |file|
+ process_file(file, rdir, rinfo)
+ end
end
+ def process_file(file, rdir, rinfo)
+ if file.directory?
+ logger.error "Skip direcotry #{file}"
+ return false
+ end
+
+ vars = load_variables(rdir)
+ digests = nil
+ if !@force_digest && vars.local_file_info &&
+ vars.local_file_info[file.basename.to_s]
+ finfo = vars.local_file_info[file.basename.to_s]
+ if finfo[:size] == file.size && finfo.mtime == file.mtime
+ digests = vars.local_file_info[file.basename.to_s].digests
+ end
+ end
+
+ unless digests
+ logger.info "Calculating digest for #{file}..."
+ digests = {
+ sha256: hexdigest(Digest::SHA256, file),
+ sha1: hexdigest(Digest::SHA1, file),
+ md5: hexdigest(Digest::MD5, file),
+ }
+ update_variables rdir, {local_file_info: {file.basename.to_s => {digests: digests, mtime: file.mtime, size: file.size}}}
+ end
+ if remote_f = rinfo.files.find { |f| f.name == file.basename.to_s }
+ if digests.find { |type, dig| dig != remote_f.send("digest_#{type}") }
+ logger.error "#{file} was changed from remote file! Please delete remote file before uploading new one."
+ end
+ logger.info "Skip already uploaded file '#{file}'"
+ return
+ end
+
+ logger.info "Uploading file #{file} (#{file.size} bytes)"
+ if @dry_run
+ finfo = Hashie::Mash.new id: '(dry-run)', url: '(dry-run)'
+ else
+ logger.level <= Logger::INFO and
+ OSDN::CLI._show_progress = true
+ fio = file.open
+ logger.info "Starting upload #{file}..."
+ finfo = api.create_release_file target_proj, target_package(rdir), target_release(rdir), fio, visibility: @visibility
+ fio.close
+ OSDN::CLI._show_progress = false
+ if digests.find { |type, dig| dig != finfo.send("digest_#{type}") }
+ logger.error "File digests are mismatch! Upload file #{file} may be broken! Please check."
+ else
+ logger.info "Upload complete."
+ end
+ end
+ $stdout.puts "New file '#{file}' has been uploaded; #{finfo.url}"
+ end
+
private
def target_proj
@target_proj and return @target_proj