aboutsummaryrefslogtreecommitdiffstats
path: root/app
diff options
context:
space:
mode:
authorRobert2017-08-21 05:13:11 +0200
committerRobert2017-08-21 05:26:55 +0200
commit327b1ec771a056451b0635ba80cc346fa1765299 (patch)
tree3790696ce944f0412a96fea21fca6c8667f1bd64 /app
parent8eaa9249825f6eb6d667291fd9aa26bbde1ac7e3 (diff)
downloadchouette-core-327b1ec771a056451b0635ba80cc346fa1765299.tar.bz2
Refs: #4273@20h; Reverse Engeneeiring RubyZip fixed ZipService
- Replzed Lazy Enum over get_next_entry and group by with explicit looping and yielding to an instance level enumerator that yields streams per subdir (wich represents a referential). - Fixtures and Specs that use a REAL usecase. Next: Get rid of metadata kludge (should fix failing specs)
Diffstat (limited to 'app')
-rw-r--r--app/services/http_service.rb2
-rw-r--r--app/services/zip_service.rb74
-rw-r--r--app/workers/workbench_import_worker.rb22
3 files changed, 53 insertions, 45 deletions
diff --git a/app/services/http_service.rb b/app/services/http_service.rb
index ceaa89b2f..ae7d0e413 100644
--- a/app/services/http_service.rb
+++ b/app/services/http_service.rb
@@ -39,8 +39,6 @@ module HTTPService extend self
params.update( name => Faraday::UploadIO.new(value, mime_type, as_name ) )
end
- require 'pry'
- binding.pry
return c.post path, params
end
end
diff --git a/app/services/zip_service.rb b/app/services/zip_service.rb
index 778bfd06d..f2e7fc2ed 100644
--- a/app/services/zip_service.rb
+++ b/app/services/zip_service.rb
@@ -1,55 +1,63 @@
class ZipService
- attr_reader :current_entry, :zip_data
+ class Subdir < Struct.new( :name, :stream)
+ end
+
+ attr_reader :current_key, :current_output, :yielder
def initialize data
- @zip_data = data
- @current_entry = nil
- end
-
- class << self
- def convert_entries entries
- -> output_stream do
- entries.each do |e|
- output_stream.put_next_entry e.name
- output_stream.write e.get_input_stream.read
- end
- end
- end
+ @zip_data = StringIO.new(data)
+ end
- def entries input_stream
- Enumerator.new do |enum|
- loop{ enum << input_stream.get_next_entry }
- end.lazy.take_while{ |e| e }
+ def subdirs
+ Enumerator.new do |yielder|
+ @yielder = yielder
+ Zip::File.open_buffer(@zip_data, &(method :_subdirs))
end
end
- def entry_groups
- self.class.entries(input_stream).group_by(&method(:entry_key))
+ def _subdirs zip_file
+ zip_file.each do | entry |
+ add_entry entry
+ end
+ finish_current_output
end
- def entry_group_streams
- entry_groups.map(&method(:make_stream)).to_h
+ def add_entry entry
+ key = entry_key entry
+ unless key == current_key
+ finish_current_output
+ open_new_output key
+ end
+ add_to_current_output entry
end
- def entry_key entry
- entry.name.split('/', -1)[-2]
+ def add_to_current_output entry
+ current_output.put_next_entry entry.name
+ write_to_current_output entry.get_input_stream
end
- def make_stream pair
- name, entries = pair
- [name, make_stream_from( entries )]
+ def write_to_current_output input_stream
+ return if Zip::NullInputStream == input_stream
+ current_output.write input_stream.read
end
- def make_stream_from entries
- Zip::OutputStream.write_buffer(&self.class.convert_entries(entries))
+ def finish_current_output
+ if current_output
+ @yielder << Subdir.new(
+ current_key,
+ # Second part of the solution, yield the closed stream
+ current_output.close_buffer)
+ end
end
- def next_entry
- @current_entry = input_stream.get_next_entry
+ def open_new_output entry_key
+ @current_key = entry_key
+ # First piece of the solution, use internal way to create a Zip::OutputStream
+ @current_output = Zip::OutputStream.new(StringIO.new(''), true, nil)
end
- def input_stream
- @__input_stream__ ||= Zip::InputStream.open(StringIO.new(zip_data))
+ def entry_key entry
+ entry.name.split('/', -1)[-2]
end
end
diff --git a/app/workers/workbench_import_worker.rb b/app/workers/workbench_import_worker.rb
index 6fc709d51..f91093806 100644
--- a/app/workers/workbench_import_worker.rb
+++ b/app/workers/workbench_import_worker.rb
@@ -25,14 +25,14 @@ class WorkbenchImportWorker
params: {token: @workbench_import.token_download}).body
end
- def execute_post eg_name, eg_stream
+ def execute_post eg_name, eg_file
logger.info "HTTP POST #{export_url} (for #{complete_entry_group_name(eg_name)})"
HTTPService.post_resource(
host: export_host,
path: export_path,
token: token(eg_name),
params: params,
- upload: {file: [eg_stream, 'application/zip', eg_name]})
+ upload: {file: [eg_file, 'application/zip', eg_name]})
end
def log_failure reason, count
@@ -43,17 +43,15 @@ class WorkbenchImportWorker
raise RetryService::Retry
end
- def try_upload_entry_group eg_name, eg_stream
- result = execute_post eg_name, eg_stream
- require 'pry'
- binding.pry
+ def try_upload_entry_group eg_name, eg_file
+ result = execute_post eg_name, eg_file
return result if result && result.status < 400
@response = result.body
try_again
end
def upload zip_service
- entry_group_streams = zip_service.entry_group_streams
+ entry_group_streams = zip_service.subdirs
@workbench_import.update_attributes total_steps: entry_group_streams.size
entry_group_streams.each_with_index(&method(:upload_entry_group))
rescue StopIteration
@@ -71,10 +69,14 @@ class WorkbenchImportWorker
end
def upload_entry_group_proc entry_pair
- eg_name, eg_stream = entry_pair
- # This should be fn.try_upload_entry_group(eg_name, eg_stream) ;(
+ eg_name = entry_pair.name
+ eg_stream = entry_pair.stream
+ eg_file = Tempfile.new("WorkbenchImport_#{eg_name}_#{$$}").tap do |file|
+ eg_stream.read
+ end
+ eg_file.rewind
-> do
- try_upload_entry_group(eg_name, eg_stream)
+ try_upload_entry_group(eg_name, eg_file)
end
end