aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorTeddy Wing2017-06-12 10:30:22 +0200
committerTeddy Wing2017-06-12 10:30:22 +0200
commit6dfdb22139eda7561fa1ae5a9c54b9fdadbfa4b3 (patch)
tree6f01fffa87db2c78d23beae2688c0499342e3dec
parent0b9e41d57fdba19bf3d9a61029e2fd688fbf61f2 (diff)
parent45af9cc3dab4040e5bdd3481ebff6c8b37556462 (diff)
downloadchouette-core-6dfdb22139eda7561fa1ae5a9c54b9fdadbfa4b3.tar.bz2
Merge remote-tracking branch 'origin/master' into 3479-refactor-table_builder-helper
-rw-r--r--Gemfile2
-rw-r--r--Gemfile.lock2
-rw-r--r--INSTALL.md14
-rw-r--r--app/assets/javascripts/es6_browserified/vehicle_journeys/components/tools/DuplicateVehicleJourney.js2
-rw-r--r--app/models/chouette/time_table.rb4
-rw-r--r--app/models/clean_up.rb39
-rw-r--r--app/models/referential.rb4
-rw-r--r--app/models/referential_cloning.rb3
-rw-r--r--app/workers/referential_cloning_worker.rb8
-rw-r--r--config/initializers/active_record.rb4
-rw-r--r--db/schema.rb490
-rw-r--r--lib/af83/schema_cloner.rb147
-rw-r--r--lib/af83/stored_procedures.rb44
-rw-r--r--lib/sql/clone_schema.sql114
-rw-r--r--spec/lib/af83/cloning/clone_schema_spec.rb113
-rw-r--r--spec/lib/af83/stored_procedure_spec.rb20
-rw-r--r--spec/lib/af83/stored_procedures/clone_schema_spec.rb167
-rw-r--r--spec/models/clean_up_spec.rb80
-rw-r--r--spec/support/bare_sql.rb58
-rw-r--r--spec/support/hash.rb6
-rw-r--r--spec/support/pg_catalog.rb57
-rw-r--r--spec/workers/referential_cloning_worker_spec.rb30
22 files changed, 744 insertions, 664 deletions
diff --git a/Gemfile b/Gemfile
index 6520f73de..203945992 100644
--- a/Gemfile
+++ b/Gemfile
@@ -110,6 +110,8 @@ gem 'ransack'
gem "squeel", github: 'activerecord-hackery/squeel'
gem 'active_attr'
+gem 'sequel'
+
gem 'draper'
gem 'enumerize', '~> 0.10.0'
diff --git a/Gemfile.lock b/Gemfile.lock
index f0b7630c6..921f0f367 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -463,6 +463,7 @@ GEM
rdoc (~> 4.0)
select2-rails (4.0.3)
thor (~> 0.14)
+ sequel (4.47.0)
shoulda-matchers (3.1.1)
activesupport (>= 4.0.0)
sidekiq (4.2.10)
@@ -649,6 +650,7 @@ DEPENDENCIES
sawyer (~> 0.6.0)
sdoc (~> 0.4.0)
select2-rails (~> 4.0, >= 4.0.3)
+ sequel
shoulda-matchers (~> 3.1)
sidekiq
simple_form (~> 3.1.0)
diff --git a/INSTALL.md b/INSTALL.md
index a9ec7969d..ea6fbd6ab 100644
--- a/INSTALL.md
+++ b/INSTALL.md
@@ -19,7 +19,7 @@ Go into your local repro and install the gems
bundle
-### Caveats
+### Installation Caveats
`libv8` might cause you troubles, depending on your local configuration. If you have `libv8` installed (probably because of `node.js`) you might need to tell bundler/Rubygems to use the system version.
@@ -41,6 +41,18 @@ You will get the correct value of `<version>` from bundler's error message.
As documented [here](https://github.com/dryade/georuby-ext/issues/2) we need some more libs before we can start the `rake` setup tasks. On mac/OS the easiest way is just to install `postgis` now with `homebrew` as this will
install all needed libraries.
+Also if on Linux you might discover a problem as late as when launching `rake`.
+In case of a stacktrace similar to this one
+
+```
+$ bundle exec rake --trace -T
+rake aborted!
+LoadError: library names list must not be empty
+```
+
+you need to install `libproj4-dev` on your system.
+
+
### Postgres
#### Create user
diff --git a/app/assets/javascripts/es6_browserified/vehicle_journeys/components/tools/DuplicateVehicleJourney.js b/app/assets/javascripts/es6_browserified/vehicle_journeys/components/tools/DuplicateVehicleJourney.js
index f5e01de7c..7f5f6bcbe 100644
--- a/app/assets/javascripts/es6_browserified/vehicle_journeys/components/tools/DuplicateVehicleJourney.js
+++ b/app/assets/javascripts/es6_browserified/vehicle_journeys/components/tools/DuplicateVehicleJourney.js
@@ -61,7 +61,7 @@ class DuplicateVehicleJourney extends Component {
<form>
<div className='modal-body'>
<div className='row'>
- <div className='col-lg-3 col-md-3 col-sm-3 col-xs-3'>
+ <div className={'col-lg-3 col-md-3 col-sm-3 col-xs-3 ' + (actions.getSelected(this.props.vehicleJourneys).length > 1 ? 'hidden' : '' )}>
<div className='form-group'>
<label className='control-label is-required'>Horaire de départ</label>
<span className={'input-group time' + (actions.getSelected(this.props.vehicleJourneys).length > 1 ? ' disabled' : '')}>
diff --git a/app/models/chouette/time_table.rb b/app/models/chouette/time_table.rb
index 2d329bcc6..151570f20 100644
--- a/app/models/chouette/time_table.rb
+++ b/app/models/chouette/time_table.rb
@@ -135,6 +135,10 @@ class Chouette::TimeTable < Chouette::TridentActiveRecord
[Chouette::TimeTable.maximum(:end_date)].compact.max
end
+ def add_exclude_date(in_out, date)
+ self.dates.create!({in_out: in_out, date: date})
+ end
+
def actualize
self.dates.clear
self.periods.clear
diff --git a/app/models/clean_up.rb b/app/models/clean_up.rb
index b14067b73..a2b150ecc 100644
--- a/app/models/clean_up.rb
+++ b/app/models/clean_up.rb
@@ -19,6 +19,9 @@ class CleanUp < ActiveRecord::Base
result['time_table'] = send("destroy_time_tables_#{self.date_type}").try(:count)
result['time_table_date'] = send("destroy_time_tables_dates_#{self.date_type}").try(:count)
result['time_table_period'] = send("destroy_time_tables_periods_#{self.date_type}").try(:count)
+ self.overlapping_periods.each do |period|
+ exclude_dates_in_overlapping_period(period)
+ end
end
end
@@ -61,6 +64,42 @@ class CleanUp < ActiveRecord::Base
Chouette::TimeTablePeriod.where('period_start >= ? AND period_end <= ?', self.begin_date, self.end_date).destroy_all
end
+ def overlapping_periods
+ self.end_date = self.begin_date if self.date_type != 'between'
+ Chouette::TimeTablePeriod.where('(period_start, period_end) OVERLAPS (?, ?)', self.begin_date, self.end_date)
+ end
+
+ def exclude_dates_in_overlapping_period(period)
+ days_in_period = period.period_start..period.period_end
+ day_out = period.time_table.dates.where(in_out: false).map(&:date)
+ # check if day is greater or less then cleanup date
+ if date_type != 'between'
+ operator = date_type == 'after' ? '>' : '<'
+ to_exclude_days = days_in_period.map do |day|
+ day if day.public_send(operator, self.begin_date)
+ end
+ else
+ days_in_cleanup_periode = (self.begin_date..self.end_date)
+ to_exclude_days = days_in_period & days_in_cleanup_periode
+ end
+
+ to_exclude_days.to_a.compact.each do |day|
+ # we ensure day is not already an exclude date
+ if !day_out.include?(day)
+ self.add_exclude_date(period.time_table, day)
+ end
+ end
+ end
+
+ def add_exclude_date(time_table, day)
+ day_in = time_table.dates.where(in_out: true).map(&:date)
+ unless day_in.include?(day)
+ time_table.add_exclude_date(false, day)
+ else
+ time_table.dates.where(date: day).take.update_attribute(:in_out, false)
+ end
+ end
+
def destroy_time_tables(time_tables)
time_tables.each do |time_table|
time_table.vehicle_journeys.map(&:destroy)
diff --git a/app/models/referential.rb b/app/models/referential.rb
index 83d507320..824320c27 100644
--- a/app/models/referential.rb
+++ b/app/models/referential.rb
@@ -127,7 +127,7 @@ class Referential < ActiveRecord::Base
end
def self.new_from(from, organisation:)
- Referential.new({
+ Referential.new(
name: I18n.t("activerecord.copy", :name => from.name),
slug: "#{from.slug}_clone",
prefix: from.prefix,
@@ -139,7 +139,7 @@ class Referential < ActiveRecord::Base
workbench: from.workbench,
created_from: from,
metadatas: from.metadatas.map { |m| ReferentialMetadata.new_from(m) }
- })
+ )
end
def self.available_srids
diff --git a/app/models/referential_cloning.rb b/app/models/referential_cloning.rb
index 2f34093e2..5bf283814 100644
--- a/app/models/referential_cloning.rb
+++ b/app/models/referential_cloning.rb
@@ -6,7 +6,8 @@ class ReferentialCloning < ActiveRecord::Base
private
def perform_clone
- ReferentialCloningWorker.perform_async(self.id)
+ ReferentialCloningWorker.perform_async(id)
+ # ReferentialCloningWorker.new.perform(id)
end
aasm column: :status do
diff --git a/app/workers/referential_cloning_worker.rb b/app/workers/referential_cloning_worker.rb
index 87725ce5a..6592160ec 100644
--- a/app/workers/referential_cloning_worker.rb
+++ b/app/workers/referential_cloning_worker.rb
@@ -6,7 +6,7 @@ class ReferentialCloningWorker
ref_cloning = ReferentialCloning.find id
source_schema = ref_cloning.source_referential.slug
- target_schema = "#{source_schema}_tmp"
+ target_schema = ref_cloning.target_referential.slug
clone_schema ref_cloning, source_schema, target_schema
end
@@ -16,9 +16,9 @@ class ReferentialCloningWorker
def clone_schema ref_cloning, source_schema, target_schema
ref_cloning.run!
- StoredProcedures.invoke_stored_procedure(:clone_schema, source_schema, target_schema, true)
- execute_sql "DROP SCHEMA #{source_schema} CASCADE;"
- execute_sql "ALTER SCHEMA #{target_schema} RENAME TO #{source_schema};"
+ AF83::SchemaCloner
+ .new(source_schema, target_schema)
+ .clone_schema
ref_cloning.successful!
rescue Exception => e
diff --git a/config/initializers/active_record.rb b/config/initializers/active_record.rb
index bdf9e0b4b..0a7378532 100644
--- a/config/initializers/active_record.rb
+++ b/config/initializers/active_record.rb
@@ -1,5 +1 @@
-require_relative '../../lib/af83/stored_procedures'
-
ActiveRecord::ConnectionAdapters::PostgreSQLAdapter::NATIVE_DATABASE_TYPES[:primary_key] = "bigserial primary key"
-
-StoredProcedures.create_stored_procedure(:clone_schema)
diff --git a/db/schema.rb b/db/schema.rb
index be976c3f1..d9a4fc5f8 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -21,12 +21,12 @@ ActiveRecord::Schema.define(version: 20170607141317) do
create_table "access_links", id: :bigserial, force: :cascade do |t|
t.integer "access_point_id", limit: 8
t.integer "stop_area_id", limit: 8
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
- t.string "name", limit: 255
- t.string "comment", limit: 255
- t.decimal "link_distance", precision: 19, scale: 2
+ t.string "creator_id"
+ t.string "name"
+ t.string "comment"
+ t.decimal "link_distance", precision: 19, scale: 2
t.boolean "lift_availability"
t.boolean "mobility_restricted_suitability"
t.boolean "stairs_availability"
@@ -34,9 +34,9 @@ ActiveRecord::Schema.define(version: 20170607141317) do
t.time "frequent_traveller_duration"
t.time "occasional_traveller_duration"
t.time "mobility_restricted_traveller_duration"
- t.string "link_type", limit: 255
+ t.string "link_type"
t.integer "int_user_needs"
- t.string "link_orientation", limit: 255
+ t.string "link_orientation"
t.datetime "created_at"
t.datetime "updated_at"
end
@@ -44,26 +44,26 @@ ActiveRecord::Schema.define(version: 20170607141317) do
add_index "access_links", ["objectid"], name: "access_links_objectid_key", unique: true, using: :btree
create_table "access_points", id: :bigserial, force: :cascade do |t|
- t.string "objectid", limit: 255
+ t.string "objectid"
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
- t.string "name", limit: 255
- t.string "comment", limit: 255
- t.decimal "longitude", precision: 19, scale: 16
- t.decimal "latitude", precision: 19, scale: 16
- t.string "long_lat_type", limit: 255
- t.string "country_code", limit: 255
- t.string "street_name", limit: 255
- t.string "contained_in", limit: 255
+ t.string "creator_id"
+ t.string "name"
+ t.string "comment"
+ t.decimal "longitude", precision: 19, scale: 16
+ t.decimal "latitude", precision: 19, scale: 16
+ t.string "long_lat_type"
+ t.string "country_code"
+ t.string "street_name"
+ t.string "contained_in"
t.time "openning_time"
t.time "closing_time"
- t.string "access_type", limit: 255
+ t.string "access_type"
t.boolean "lift_availability"
t.boolean "mobility_restricted_suitability"
t.boolean "stairs_availability"
t.integer "stop_area_id", limit: 8
- t.string "zip_code", limit: 255
- t.string "city_name", limit: 255
+ t.string "zip_code"
+ t.string "city_name"
t.text "import_xml"
t.datetime "created_at"
t.datetime "updated_at"
@@ -73,18 +73,18 @@ ActiveRecord::Schema.define(version: 20170607141317) do
create_table "api_keys", id: :bigserial, force: :cascade do |t|
t.integer "referential_id", limit: 8
- t.string "token", limit: 255
- t.string "name", limit: 255
+ t.string "token"
+ t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "calendars", id: :bigserial, force: :cascade do |t|
- t.string "name", limit: 255
- t.string "short_name", limit: 255
- t.daterange "date_ranges", array: true
- t.date "dates", array: true
- t.boolean "shared", default: false
+ t.string "name"
+ t.string "short_name"
+ t.daterange "date_ranges", array: true
+ t.date "dates", array: true
+ t.boolean "shared", default: false
t.integer "organisation_id", limit: 8
t.datetime "created_at"
t.datetime "updated_at"
@@ -94,7 +94,7 @@ ActiveRecord::Schema.define(version: 20170607141317) do
add_index "calendars", ["short_name"], name: "index_calendars_on_short_name", unique: true, using: :btree
create_table "clean_up_results", id: :bigserial, force: :cascade do |t|
- t.string "message_key", limit: 255
+ t.string "message_key"
t.hstore "message_attributs"
t.integer "clean_up_id", limit: 8
t.datetime "created_at"
@@ -104,7 +104,7 @@ ActiveRecord::Schema.define(version: 20170607141317) do
add_index "clean_up_results", ["clean_up_id"], name: "index_clean_up_results_on_clean_up_id", using: :btree
create_table "clean_ups", id: :bigserial, force: :cascade do |t|
- t.string "status", limit: 255
+ t.string "status"
t.datetime "started_at"
t.datetime "ended_at"
t.integer "referential_id", limit: 8
@@ -118,20 +118,20 @@ ActiveRecord::Schema.define(version: 20170607141317) do
add_index "clean_ups", ["referential_id"], name: "index_clean_ups_on_referential_id", using: :btree
create_table "companies", id: :bigserial, force: :cascade do |t|
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
- t.string "name", limit: 255
- t.string "short_name", limit: 255
- t.string "organizational_unit", limit: 255
- t.string "operating_department_name", limit: 255
- t.string "code", limit: 255
- t.string "phone", limit: 255
- t.string "fax", limit: 255
- t.string "email", limit: 255
- t.string "registration_number", limit: 255
- t.string "url", limit: 255
- t.string "time_zone", limit: 255
+ t.string "creator_id"
+ t.string "name"
+ t.string "short_name"
+ t.string "organizational_unit"
+ t.string "operating_department_name"
+ t.string "code"
+ t.string "phone"
+ t.string "fax"
+ t.string "email"
+ t.string "registration_number"
+ t.string "url"
+ t.string "time_zone"
t.integer "line_referential_id", limit: 8
t.text "import_xml"
t.datetime "created_at"
@@ -145,13 +145,13 @@ ActiveRecord::Schema.define(version: 20170607141317) do
create_table "connection_links", id: :bigserial, force: :cascade do |t|
t.integer "departure_id", limit: 8
t.integer "arrival_id", limit: 8
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
- t.string "name", limit: 255
- t.string "comment", limit: 255
- t.decimal "link_distance", precision: 19, scale: 2
- t.string "link_type", limit: 255
+ t.string "creator_id"
+ t.string "name"
+ t.string "comment"
+ t.decimal "link_distance", precision: 19, scale: 2
+ t.string "link_type"
t.time "default_duration"
t.time "frequent_traveller_duration"
t.time "occasional_traveller_duration"
@@ -166,31 +166,15 @@ ActiveRecord::Schema.define(version: 20170607141317) do
add_index "connection_links", ["objectid"], name: "connection_links_objectid_key", unique: true, using: :btree
- create_table "delayed_jobs", id: :bigserial, force: :cascade do |t|
- t.integer "priority", default: 0
- t.integer "attempts", default: 0
- t.text "handler"
- t.text "last_error"
- t.datetime "run_at"
- t.datetime "locked_at"
- t.datetime "failed_at"
- t.string "locked_by", limit: 255
- t.string "queue", limit: 255
- t.datetime "created_at"
- t.datetime "updated_at"
- end
-
- add_index "delayed_jobs", ["priority", "run_at"], name: "delayed_jobs_priority", using: :btree
-
create_table "exports", id: :bigserial, force: :cascade do |t|
t.integer "referential_id", limit: 8
- t.string "status", limit: 255
- t.string "type", limit: 255
- t.string "options", limit: 255
+ t.string "status"
+ t.string "type"
+ t.string "options"
t.datetime "created_at"
t.datetime "updated_at"
- t.string "references_type", limit: 255
- t.string "reference_ids", limit: 255
+ t.string "references_type"
+ t.string "reference_ids"
end
add_index "exports", ["referential_id"], name: "index_exports_on_referential_id", using: :btree
@@ -200,23 +184,23 @@ ActiveRecord::Schema.define(version: 20170607141317) do
t.integer "line_id", limit: 8
t.integer "connection_link_id", limit: 8
t.integer "stop_point_id", limit: 8
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
t.datetime "creation_time"
- t.string "creator_id", limit: 255
- t.string "name", limit: 255
- t.string "comment", limit: 255
- t.string "description", limit: 255
+ t.string "creator_id"
+ t.string "name"
+ t.string "comment"
+ t.string "description"
t.boolean "free_access"
- t.decimal "longitude", precision: 19, scale: 16
- t.decimal "latitude", precision: 19, scale: 16
- t.string "long_lat_type", limit: 255
- t.decimal "x", precision: 19, scale: 2
- t.decimal "y", precision: 19, scale: 2
- t.string "projection_type", limit: 255
- t.string "country_code", limit: 255
- t.string "street_name", limit: 255
- t.string "contained_in", limit: 255
+ t.decimal "longitude", precision: 19, scale: 16
+ t.decimal "latitude", precision: 19, scale: 16
+ t.string "long_lat_type"
+ t.decimal "x", precision: 19, scale: 2
+ t.decimal "y", precision: 19, scale: 2
+ t.string "projection_type"
+ t.string "country_code"
+ t.string "street_name"
+ t.string "contained_in"
end
add_index "facilities", ["objectid"], name: "facilities_objectid_key", unique: true, using: :btree
@@ -228,8 +212,8 @@ ActiveRecord::Schema.define(version: 20170607141317) do
create_table "footnotes", id: :bigserial, force: :cascade do |t|
t.integer "line_id", limit: 8
- t.string "code", limit: 255
- t.string "label", limit: 255
+ t.string "code"
+ t.string "label"
t.datetime "created_at"
t.datetime "updated_at"
end
@@ -240,12 +224,12 @@ ActiveRecord::Schema.define(version: 20170607141317) do
end
create_table "group_of_lines", id: :bigserial, force: :cascade do |t|
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
- t.string "name", limit: 255
- t.string "comment", limit: 255
- t.string "registration_number", limit: 255
+ t.string "creator_id"
+ t.string "name"
+ t.string "comment"
+ t.string "registration_number"
t.integer "line_referential_id", limit: 8
t.text "import_xml"
t.datetime "created_at"
@@ -262,7 +246,7 @@ ActiveRecord::Schema.define(version: 20170607141317) do
create_table "import_messages", id: :bigserial, force: :cascade do |t|
t.integer "criticity"
- t.string "message_key", limit: 255
+ t.string "message_key"
t.hstore "message_attributs"
t.integer "import_id", limit: 8
t.integer "resource_id", limit: 8
@@ -276,31 +260,31 @@ ActiveRecord::Schema.define(version: 20170607141317) do
create_table "import_resources", id: :bigserial, force: :cascade do |t|
t.integer "import_id", limit: 8
- t.string "status", limit: 255
+ t.string "status"
t.datetime "created_at"
t.datetime "updated_at"
- t.string "type", limit: 255
- t.string "reference", limit: 255
- t.string "name", limit: 255
+ t.string "type"
+ t.string "reference"
+ t.string "name"
t.hstore "metrics"
end
add_index "import_resources", ["import_id"], name: "index_import_resources_on_import_id", using: :btree
create_table "imports", id: :bigserial, force: :cascade do |t|
- t.string "status", limit: 255
- t.string "current_step_id", limit: 255
+ t.string "status"
+ t.string "current_step_id"
t.float "current_step_progress"
t.integer "workbench_id", limit: 8
t.integer "referential_id", limit: 8
- t.string "name", limit: 255
+ t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
- t.string "file", limit: 255
+ t.string "file"
t.datetime "started_at"
t.datetime "ended_at"
- t.string "token_download", limit: 255
- t.string "type", limit: 255
+ t.string "token_download"
+ t.string "type"
end
add_index "imports", ["referential_id"], name: "index_imports_on_referential_id", using: :btree
@@ -334,16 +318,16 @@ ActiveRecord::Schema.define(version: 20170607141317) do
create_table "journey_patterns", id: :bigserial, force: :cascade do |t|
t.integer "route_id", limit: 8
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
- t.string "name", limit: 255
- t.string "comment", limit: 255
- t.string "registration_number", limit: 255
- t.string "published_name", limit: 255
+ t.string "creator_id"
+ t.string "name"
+ t.string "comment"
+ t.string "registration_number"
+ t.string "published_name"
t.integer "departure_stop_point_id", limit: 8
t.integer "arrival_stop_point_id", limit: 8
- t.integer "section_status", default: 0, null: false
+ t.integer "section_status", default: 0, null: false
t.datetime "created_at"
t.datetime "updated_at"
end
@@ -365,7 +349,7 @@ ActiveRecord::Schema.define(version: 20170607141317) do
create_table "line_referential_sync_messages", id: :bigserial, force: :cascade do |t|
t.integer "criticity"
- t.string "message_key", limit: 255
+ t.string "message_key"
t.hstore "message_attributs"
t.integer "line_referential_sync_id", limit: 8
t.datetime "created_at"
@@ -380,42 +364,42 @@ ActiveRecord::Schema.define(version: 20170607141317) do
t.datetime "updated_at"
t.datetime "started_at"
t.datetime "ended_at"
- t.string "status", limit: 255
+ t.string "status"
end
add_index "line_referential_syncs", ["line_referential_id"], name: "index_line_referential_syncs_on_line_referential_id", using: :btree
create_table "line_referentials", id: :bigserial, force: :cascade do |t|
- t.string "name", limit: 255
+ t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
- t.integer "sync_interval", default: 1
+ t.integer "sync_interval", default: 1
end
create_table "lines", id: :bigserial, force: :cascade do |t|
t.integer "network_id", limit: 8
t.integer "company_id", limit: 8
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
- t.string "name", limit: 255
- t.string "number", limit: 255
- t.string "published_name", limit: 255
- t.string "transport_mode", limit: 255
- t.string "registration_number", limit: 255
- t.string "comment", limit: 255
+ t.string "creator_id"
+ t.string "name"
+ t.string "number"
+ t.string "published_name"
+ t.string "transport_mode"
+ t.string "registration_number"
+ t.string "comment"
t.boolean "mobility_restricted_suitability"
t.integer "int_user_needs"
t.boolean "flexible_service"
- t.string "url", limit: 255
+ t.string "url"
t.string "color", limit: 6
t.string "text_color", limit: 6
- t.string "stable_id", limit: 255
+ t.string "stable_id"
t.integer "line_referential_id", limit: 8
- t.boolean "deactivated", default: false
+ t.boolean "deactivated", default: false
t.text "import_xml"
- t.string "transport_submode", limit: 255
- t.integer "secondary_company_ids", limit: 8, array: true
+ t.string "transport_submode"
+ t.integer "secondary_company_ids", limit: 8, array: true
t.datetime "created_at"
t.datetime "updated_at"
t.boolean "seasonal"
@@ -427,17 +411,17 @@ ActiveRecord::Schema.define(version: 20170607141317) do
add_index "lines", ["secondary_company_ids"], name: "index_lines_on_secondary_company_ids", using: :gin
create_table "networks", id: :bigserial, force: :cascade do |t|
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
+ t.string "creator_id"
t.date "version_date"
- t.string "description", limit: 255
- t.string "name", limit: 255
- t.string "registration_number", limit: 255
- t.string "source_name", limit: 255
- t.string "source_type", limit: 255
- t.string "source_identifier", limit: 255
- t.string "comment", limit: 255
+ t.string "description"
+ t.string "name"
+ t.string "registration_number"
+ t.string "source_name"
+ t.string "source_type"
+ t.string "source_identifier"
+ t.string "comment"
t.text "import_xml"
t.integer "line_referential_id", limit: 8
t.datetime "created_at"
@@ -449,11 +433,11 @@ ActiveRecord::Schema.define(version: 20170607141317) do
add_index "networks", ["registration_number"], name: "networks_registration_number_key", using: :btree
create_table "organisations", id: :bigserial, force: :cascade do |t|
- t.string "name", limit: 255
+ t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
- t.string "data_format", limit: 255, default: "neptune"
- t.string "code", limit: 255
+ t.string "data_format", default: "neptune"
+ t.string "code"
t.datetime "synced_at"
t.hstore "sso_attributes"
end
@@ -464,12 +448,12 @@ ActiveRecord::Schema.define(version: 20170607141317) do
t.integer "start_of_link_id", limit: 8
t.integer "end_of_link_id", limit: 8
t.integer "route_id", limit: 8
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
- t.string "name", limit: 255
- t.string "comment", limit: 255
- t.decimal "link_distance", precision: 19, scale: 2
+ t.string "creator_id"
+ t.string "name"
+ t.string "comment"
+ t.decimal "link_distance", precision: 19, scale: 2
t.datetime "created_at"
t.datetime "updated_at"
end
@@ -477,7 +461,7 @@ ActiveRecord::Schema.define(version: 20170607141317) do
add_index "pt_links", ["objectid"], name: "pt_links_objectid_key", unique: true, using: :btree
create_table "referential_clonings", id: :bigserial, force: :cascade do |t|
- t.string "status", limit: 255
+ t.string "status"
t.datetime "started_at"
t.datetime "ended_at"
t.integer "source_referential_id", limit: 8
@@ -498,30 +482,30 @@ ActiveRecord::Schema.define(version: 20170607141317) do
t.daterange "periodes", array: true
end
- add_index "referential_metadata", ["line_ids"], name: "index_referential_metadata_on_line_ids", using: :btree
+ add_index "referential_metadata", ["line_ids"], name: "index_referential_metadata_on_line_ids", using: :gin
add_index "referential_metadata", ["referential_id"], name: "index_referential_metadata_on_referential_id", using: :btree
add_index "referential_metadata", ["referential_source_id"], name: "index_referential_metadata_on_referential_source_id", using: :btree
create_table "referentials", id: :bigserial, force: :cascade do |t|
- t.string "name", limit: 255
- t.string "slug", limit: 255
+ t.string "name"
+ t.string "slug"
t.datetime "created_at"
t.datetime "updated_at"
- t.string "prefix", limit: 255
- t.string "projection_type", limit: 255
- t.string "time_zone", limit: 255
- t.string "bounds", limit: 255
+ t.string "prefix"
+ t.string "projection_type"
+ t.string "time_zone"
+ t.string "bounds"
t.integer "organisation_id", limit: 8
t.text "geographical_bounds"
t.integer "user_id", limit: 8
- t.string "user_name", limit: 255
- t.string "data_format", limit: 255
+ t.string "user_name"
+ t.string "data_format"
t.integer "line_referential_id", limit: 8
t.integer "stop_area_referential_id", limit: 8
t.integer "workbench_id", limit: 8
t.datetime "archived_at"
t.integer "created_from_id", limit: 8
- t.boolean "ready", default: false
+ t.boolean "ready", default: false
end
add_index "referentials", ["created_from_id"], name: "index_referentials_on_created_from_id", using: :btree
@@ -529,29 +513,29 @@ ActiveRecord::Schema.define(version: 20170607141317) do
create_table "route_sections", id: :bigserial, force: :cascade do |t|
t.integer "departure_id", limit: 8
t.integer "arrival_id", limit: 8
- t.string "objectid", limit: 255, null: false
+ t.geometry "input_geometry", limit: {:srid=>4326, :type=>"line_string"}
+ t.geometry "processed_geometry", limit: {:srid=>4326, :type=>"line_string"}
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
+ t.string "creator_id"
t.float "distance"
t.boolean "no_processing"
- t.geometry "input_geometry", limit: {:srid=>4326, :type=>"line_string"}
- t.geometry "processed_geometry", limit: {:srid=>4326, :type=>"line_string"}
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "routes", id: :bigserial, force: :cascade do |t|
t.integer "line_id", limit: 8
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
- t.string "name", limit: 255
- t.string "comment", limit: 255
+ t.string "creator_id"
+ t.string "name"
+ t.string "comment"
t.integer "opposite_route_id", limit: 8
- t.string "published_name", limit: 255
- t.string "number", limit: 255
- t.string "direction", limit: 255
- t.string "wayback", limit: 255
+ t.string "published_name"
+ t.string "number"
+ t.string "direction"
+ t.string "wayback"
t.datetime "created_at"
t.datetime "updated_at"
end
@@ -559,14 +543,14 @@ ActiveRecord::Schema.define(version: 20170607141317) do
add_index "routes", ["objectid"], name: "routes_objectid_key", unique: true, using: :btree
create_table "routing_constraint_zones", id: :bigserial, force: :cascade do |t|
- t.string "name", limit: 255
+ t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
+ t.string "creator_id"
t.integer "route_id", limit: 8
- t.integer "stop_point_ids", limit: 8, array: true
+ t.integer "stop_point_ids", limit: 8, array: true
end
create_table "routing_constraints_lines", id: false, force: :cascade do |t|
@@ -576,7 +560,7 @@ ActiveRecord::Schema.define(version: 20170607141317) do
create_table "rule_parameter_sets", id: :bigserial, force: :cascade do |t|
t.text "parameters"
- t.string "name", limit: 255
+ t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "organisation_id", limit: 8
@@ -590,7 +574,7 @@ ActiveRecord::Schema.define(version: 20170607141317) do
create_table "stop_area_referential_sync_messages", id: :bigserial, force: :cascade do |t|
t.integer "criticity"
- t.string "message_key", limit: 255
+ t.string "message_key"
t.hstore "message_attributs"
t.integer "stop_area_referential_sync_id", limit: 8
t.datetime "created_at"
@@ -605,48 +589,48 @@ ActiveRecord::Schema.define(version: 20170607141317) do
t.datetime "updated_at"
t.datetime "ended_at"
t.datetime "started_at"
- t.string "status", limit: 255
+ t.string "status"
end
add_index "stop_area_referential_syncs", ["stop_area_referential_id"], name: "index_stop_area_referential_syncs_on_stop_area_referential_id", using: :btree
create_table "stop_area_referentials", id: :bigserial, force: :cascade do |t|
- t.string "name", limit: 255
+ t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "stop_areas", id: :bigserial, force: :cascade do |t|
t.integer "parent_id", limit: 8
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
- t.string "name", limit: 255
- t.string "comment", limit: 255
- t.string "area_type", limit: 255
- t.string "registration_number", limit: 255
- t.string "nearest_topic_name", limit: 255
+ t.string "creator_id"
+ t.string "name"
+ t.string "comment"
+ t.string "area_type"
+ t.string "registration_number"
+ t.string "nearest_topic_name"
t.integer "fare_code"
- t.decimal "longitude", precision: 19, scale: 16
- t.decimal "latitude", precision: 19, scale: 16
- t.string "long_lat_type", limit: 255
- t.string "country_code", limit: 255
- t.string "street_name", limit: 255
+ t.decimal "longitude", precision: 19, scale: 16
+ t.decimal "latitude", precision: 19, scale: 16
+ t.string "long_lat_type"
+ t.string "country_code"
+ t.string "street_name"
t.boolean "mobility_restricted_suitability"
t.boolean "stairs_availability"
t.boolean "lift_availability"
t.integer "int_user_needs"
- t.string "zip_code", limit: 255
- t.string "city_name", limit: 255
- t.string "url", limit: 255
- t.string "time_zone", limit: 255
+ t.string "zip_code"
+ t.string "city_name"
+ t.string "url"
+ t.string "time_zone"
t.integer "stop_area_referential_id", limit: 8
- t.string "status", limit: 255
+ t.string "status"
t.text "import_xml"
t.datetime "deleted_at"
t.datetime "created_at"
t.datetime "updated_at"
- t.string "stif_type", limit: 255
+ t.string "stif_type"
end
add_index "stop_areas", ["name"], name: "index_stop_areas_on_name", using: :btree
@@ -662,12 +646,12 @@ ActiveRecord::Schema.define(version: 20170607141317) do
create_table "stop_points", id: :bigserial, force: :cascade do |t|
t.integer "route_id", limit: 8
t.integer "stop_area_id", limit: 8
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
+ t.string "creator_id"
t.integer "position"
- t.string "for_boarding", limit: 255
- t.string "for_alighting", limit: 255
+ t.string "for_boarding"
+ t.string "for_alighting"
t.datetime "created_at"
t.datetime "updated_at"
end
@@ -677,9 +661,9 @@ ActiveRecord::Schema.define(version: 20170607141317) do
create_table "taggings", id: :bigserial, force: :cascade do |t|
t.integer "tag_id", limit: 8
t.integer "taggable_id", limit: 8
- t.string "taggable_type", limit: 255
+ t.string "taggable_type"
t.integer "tagger_id", limit: 8
- t.string "tagger_type", limit: 255
+ t.string "tagger_type"
t.string "context", limit: 128
t.datetime "created_at"
end
@@ -688,8 +672,8 @@ ActiveRecord::Schema.define(version: 20170607141317) do
add_index "taggings", ["taggable_id", "taggable_type", "context"], name: "index_taggings_on_taggable_id_and_taggable_type_and_context", using: :btree
create_table "tags", id: :bigserial, force: :cascade do |t|
- t.string "name", limit: 255
- t.integer "taggings_count", default: 0
+ t.string "name"
+ t.integer "taggings_count", default: 0
end
add_index "tags", ["name"], name: "index_tags_on_name", unique: true, using: :btree
@@ -713,18 +697,18 @@ ActiveRecord::Schema.define(version: 20170607141317) do
add_index "time_table_periods", ["time_table_id"], name: "index_time_table_periods_on_time_table_id", using: :btree
create_table "time_tables", id: :bigserial, force: :cascade do |t|
- t.string "objectid", limit: 255, null: false
- t.integer "object_version", limit: 8, default: 1
- t.string "creator_id", limit: 255
- t.string "version", limit: 255
- t.string "comment", limit: 255
- t.integer "int_day_types", default: 0
+ t.string "objectid", null: false
+ t.integer "object_version", limit: 8, default: 1
+ t.string "creator_id"
+ t.string "version"
+ t.string "comment"
+ t.integer "int_day_types", default: 0
t.date "start_date"
t.date "end_date"
t.integer "calendar_id", limit: 8
t.datetime "created_at"
t.datetime "updated_at"
- t.string "color", limit: 255
+ t.string "color"
t.integer "created_from_id"
end
@@ -741,49 +725,49 @@ ActiveRecord::Schema.define(version: 20170607141317) do
add_index "time_tables_vehicle_journeys", ["vehicle_journey_id"], name: "index_time_tables_vehicle_journeys_on_vehicle_journey_id", using: :btree
create_table "timebands", id: :bigserial, force: :cascade do |t|
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
- t.string "name", limit: 255
- t.time "start_time", null: false
- t.time "end_time", null: false
+ t.string "creator_id"
+ t.string "name"
+ t.time "start_time", null: false
+ t.time "end_time", null: false
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "users", id: :bigserial, force: :cascade do |t|
- t.string "email", limit: 255, default: "", null: false
- t.string "encrypted_password", limit: 255, default: ""
- t.string "reset_password_token", limit: 255
+ t.string "email", default: "", null: false
+ t.string "encrypted_password", default: ""
+ t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
- t.integer "sign_in_count", default: 0
+ t.integer "sign_in_count", default: 0
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
- t.string "current_sign_in_ip", limit: 255
- t.string "last_sign_in_ip", limit: 255
+ t.string "current_sign_in_ip"
+ t.string "last_sign_in_ip"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "organisation_id", limit: 8
- t.string "name", limit: 255
- t.string "confirmation_token", limit: 255
+ t.string "name"
+ t.string "confirmation_token"
t.datetime "confirmed_at"
t.datetime "confirmation_sent_at"
- t.string "unconfirmed_email", limit: 255
- t.integer "failed_attempts", default: 0
- t.string "unlock_token", limit: 255
+ t.string "unconfirmed_email"
+ t.integer "failed_attempts", default: 0
+ t.string "unlock_token"
t.datetime "locked_at"
- t.string "authentication_token", limit: 255
- t.string "invitation_token", limit: 255
+ t.string "authentication_token"
+ t.string "invitation_token"
t.datetime "invitation_sent_at"
t.datetime "invitation_accepted_at"
t.integer "invitation_limit"
t.integer "invited_by_id", limit: 8
- t.string "invited_by_type", limit: 255
+ t.string "invited_by_type"
t.datetime "invitation_created_at"
- t.string "username", limit: 255
+ t.string "username"
t.datetime "synced_at"
- t.string "permissions", limit: 255, array: true
+ t.string "permissions", array: true
end
add_index "users", ["email"], name: "index_users_on_email", unique: true, using: :btree
@@ -794,14 +778,14 @@ ActiveRecord::Schema.define(version: 20170607141317) do
create_table "vehicle_journey_at_stops", id: :bigserial, force: :cascade do |t|
t.integer "vehicle_journey_id", limit: 8
t.integer "stop_point_id", limit: 8
- t.string "connecting_service_id", limit: 255
- t.string "boarding_alighting_possibility", limit: 255
+ t.string "connecting_service_id"
+ t.string "boarding_alighting_possibility"
t.time "arrival_time"
t.time "departure_time"
- t.string "for_boarding", limit: 255
- t.string "for_alighting", limit: 255
- t.integer "departure_day_offset", default: 0
- t.integer "arrival_day_offset", default: 0
+ t.string "for_boarding"
+ t.string "for_alighting"
+ t.integer "departure_day_offset", default: 0
+ t.integer "arrival_day_offset", default: 0
end
add_index "vehicle_journey_at_stops", ["stop_point_id"], name: "index_vehicle_journey_at_stops_on_stop_pointid", using: :btree
@@ -811,20 +795,20 @@ ActiveRecord::Schema.define(version: 20170607141317) do
t.integer "route_id", limit: 8
t.integer "journey_pattern_id", limit: 8
t.integer "company_id", limit: 8
- t.string "objectid", limit: 255, null: false
+ t.string "objectid", null: false
t.integer "object_version", limit: 8
- t.string "creator_id", limit: 255
- t.string "comment", limit: 255
- t.string "status_value", limit: 255
- t.string "transport_mode", limit: 255
- t.string "published_journey_name", limit: 255
- t.string "published_journey_identifier", limit: 255
- t.string "facility", limit: 255
- t.string "vehicle_type_identifier", limit: 255
+ t.string "creator_id"
+ t.string "comment"
+ t.string "status_value"
+ t.string "transport_mode"
+ t.string "published_journey_name"
+ t.string "published_journey_identifier"
+ t.string "facility"
+ t.string "vehicle_type_identifier"
t.integer "number", limit: 8
t.boolean "mobility_restricted_suitability"
t.boolean "flexible_service"
- t.integer "journey_category", default: 0, null: false
+ t.integer "journey_category", default: 0, null: false
t.datetime "created_at"
t.datetime "updated_at"
end
@@ -833,7 +817,7 @@ ActiveRecord::Schema.define(version: 20170607141317) do
add_index "vehicle_journeys", ["route_id"], name: "index_vehicle_journeys_on_route_id", using: :btree
create_table "workbenches", id: :bigserial, force: :cascade do |t|
- t.string "name", limit: 255
+ t.string "name"
t.integer "organisation_id", limit: 8
t.datetime "created_at"
t.datetime "updated_at"
@@ -845,18 +829,24 @@ ActiveRecord::Schema.define(version: 20170607141317) do
add_index "workbenches", ["organisation_id"], name: "index_workbenches_on_organisation_id", using: :btree
add_index "workbenches", ["stop_area_referential_id"], name: "index_workbenches_on_stop_area_referential_id", using: :btree
- add_foreign_key "access_links", "access_points", name: "aclk_acpt_fkey", on_delete: :cascade
+ create_table "yyy", id: false, force: :cascade do |t|
+ t.text "value"
+ end
+
+ add_foreign_key "access_links", "access_points", name: "aclk_acpt_fkey"
add_foreign_key "group_of_lines_lines", "group_of_lines", name: "groupofline_group_fkey", on_delete: :cascade
- add_foreign_key "journey_frequencies", "timebands", name: "journey_frequencies_timeband_id_fk", on_delete: :nullify
- add_foreign_key "journey_frequencies", "vehicle_journeys", name: "journey_frequencies_vehicle_journey_id_fk", on_delete: :nullify
- add_foreign_key "journey_pattern_sections", "journey_patterns", name: "journey_pattern_sections_journey_pattern_id_fk", on_delete: :cascade
- add_foreign_key "journey_pattern_sections", "route_sections", name: "journey_pattern_sections_route_section_id_fk", on_delete: :cascade
+ add_foreign_key "journey_frequencies", "timebands", on_delete: :nullify
+ add_foreign_key "journey_frequencies", "vehicle_journeys", on_delete: :nullify
+ add_foreign_key "journey_pattern_sections", "journey_patterns", on_delete: :cascade
+ add_foreign_key "journey_pattern_sections", "route_sections", on_delete: :cascade
add_foreign_key "journey_patterns", "routes", name: "jp_route_fkey", on_delete: :cascade
add_foreign_key "journey_patterns", "stop_points", column: "arrival_stop_point_id", name: "arrival_point_fkey", on_delete: :nullify
add_foreign_key "journey_patterns", "stop_points", column: "departure_stop_point_id", name: "departure_point_fkey", on_delete: :nullify
add_foreign_key "journey_patterns_stop_points", "journey_patterns", name: "jpsp_jp_fkey", on_delete: :cascade
add_foreign_key "journey_patterns_stop_points", "stop_points", name: "jpsp_stoppoint_fkey", on_delete: :cascade
- add_foreign_key "routes", "routes", column: "opposite_route_id", name: "route_opposite_route_fkey", on_delete: :nullify
+ add_foreign_key "route_sections", "stop_areas", column: "arrival_id"
+ add_foreign_key "route_sections", "stop_areas", column: "departure_id"
+ add_foreign_key "routes", "routes", column: "opposite_route_id", name: "route_opposite_route_fkey"
add_foreign_key "stop_areas", "stop_areas", column: "parent_id", name: "area_parent_fkey", on_delete: :nullify
add_foreign_key "stop_areas_stop_areas", "stop_areas", column: "child_id", name: "stoparea_child_fkey", on_delete: :cascade
add_foreign_key "stop_areas_stop_areas", "stop_areas", column: "parent_id", name: "stoparea_parent_fkey", on_delete: :cascade
diff --git a/lib/af83/schema_cloner.rb b/lib/af83/schema_cloner.rb
new file mode 100644
index 000000000..40c04f400
--- /dev/null
+++ b/lib/af83/schema_cloner.rb
@@ -0,0 +1,147 @@
+module AF83
+ class SchemaCloner
+
+ attr_reader :source_schema, :target_schema
+
+ def clone_schema
+ assure_schema_preconditons
+ create_target_schema
+ end
+
+ private
+
+ def adjust_default table_name, column_name, default_val
+ changed_default = default_val.sub(%r{\Anextval\('#{source_schema}}, "nextval('#{target_schema}")
+ execute "ALTER TABLE #{target_schema}.#{table_name} ALTER COLUMN #{column_name} SET DEFAULT #{changed_default}"
+ end
+
+ def adjust_defaults table_name
+ pairs = execute <<-EOSQL
+ SELECT column_name, column_default
+ FROM information_schema.columns
+ WHERE table_schema = '#{target_schema}' AND table_name = '#{table_name}' AND
+ column_default LIKE 'nextval(''#{source_schema}%::regclass)'
+ EOSQL
+ pairs.each do | pair |
+ adjust_default table_name, pair['column_name'], pair['column_default']
+ end
+ end
+
+ def alter_sequence sequence_name
+ seq_props = execute_get_ostruct( "SELECT * FROM #{source_schema}.#{sequence_name}" )
+ cycle_on_off = seq_props.is_cycled == 't' ? '' : 'NO'
+ execute <<-EOSQL
+ ALTER SEQUENCE #{target_schema}.#{sequence_name}
+ INCREMENT BY #{seq_props.increment_by}
+ MINVALUE #{seq_props.min_value}
+ MAXVALUE #{seq_props.max_value}
+ START WITH #{seq_props.start_value}
+ RESTART WITH #{seq_props.last_value}
+ CACHE #{seq_props.cache_value}
+ #{cycle_on_off} CYCLE;
+
+ -- TODO: What is this good for?
+ SELECT setval('#{target_schema}.#{sequence_name}', #{seq_props.last_value}, '#{seq_props.is_called}');
+ EOSQL
+ end
+
+ def assure_schema_preconditons
+ raise RuntimeError, "Target Schema #{target_schema} does already exist" unless
+ execute("SELECT oid FROM pg_namespace WHERE nspname = '#{target_schema}' LIMIT 1").empty?
+
+ raise RuntimeError, "Source Schema #{source_schema} does not exist" unless source
+ end
+
+ def clone_foreign_key fk_desc
+ relname, conname, constraint_def = fk_desc.values_at(*%w[relname conname constraint_def])
+ constraint_def = constraint_def.sub(" REFERENCES #{source_schema}.", " REFERENCES #{target_schema}.")
+ execute <<-EOSQL
+ ALTER TABLE #{target_schema}.#{relname} ADD CONSTRAINT #{conname} #{constraint_def}
+ EOSQL
+ end
+ def clone_foreign_keys
+ get_foreign_keys.each(&method(:clone_foreign_key))
+ end
+
+ def clone_sequence sequence_name
+ create_sequence sequence_name
+ alter_sequence sequence_name
+ end
+ def clone_sequences
+ source_sequence_names.each(&method(:clone_sequence))
+ end
+
+ def clone_table table_name
+ create_table table_name
+ end
+
+ def clone_tables
+ table_names.each(&method(:clone_table))
+ end
+
+ def create_sequence sequence_name
+ execute "CREATE SEQUENCE #{target_schema}.#{sequence_name}"
+ end
+ def create_table table_name
+ execute "CREATE TABLE #{target_schema}.#{table_name} (LIKE #{source_schema}.#{table_name} INCLUDING ALL)"
+ execute "INSERT INTO #{target_schema}.#{table_name} SELECT * FROM #{source_schema}.#{table_name}"
+ adjust_defaults table_name
+ end
+ def create_target_schema
+ execute("CREATE SCHEMA #{target_schema}")
+ clone_sequences
+ clone_tables
+ clone_foreign_keys
+ end
+ def execute(str)
+ connection.execute(str).to_a
+ end
+ def execute_get_first(str)
+ execute(str).first
+ end
+ def execute_get_ostruct(str)
+ OpenStruct.new(execute_get_first(str))
+ end
+ def execute_get_values(str)
+ execute(str).flat_map(&:values)
+ end
+
+ def get_foreign_keys
+ execute <<-EOS
+ SELECT rn.relname, ct.conname, pg_get_constraintdef(ct.oid) AS constraint_def
+ FROM pg_constraint ct JOIN pg_class rn ON rn.oid = ct.conrelid
+ WHERE connamespace = #{source['oid']} AND rn.relkind = 'r' AND ct.contype = 'f'
+ EOS
+ end
+ def get_columns(table_name)
+ end
+
+ def initialize(source_schema, target_schema)
+ @source_schema = source_schema
+ @target_schema = target_schema
+ end
+
+ #
+ # Memvars
+ # -------
+ def connection
+ @__connection__ ||= ActiveRecord::Base.connection
+ end
+
+ def source
+ @__source__ ||= execute("SELECT oid FROM pg_namespace WHERE nspname = '#{source_schema}' LIMIT 1").first;
+ end
+ def source_sequence_names
+ @__source_sequence_names__ ||=
+ execute_get_values \
+ "SELECT sequence_name::text FROM information_schema.sequences WHERE sequence_schema = '#{source_schema}'"
+ end
+ def source_oid
+ @__source_oid__ ||= source["oid"].to_i;
+ end
+ def table_names
+ @__table_names__ ||= execute_get_values \
+ "SELECT TABLE_NAME::text FROM information_schema.tables WHERE table_schema = '#{ source_schema }' AND table_type = 'BASE TABLE'"
+ end
+ end
+end
diff --git a/lib/af83/stored_procedures.rb b/lib/af83/stored_procedures.rb
deleted file mode 100644
index 698f3861d..000000000
--- a/lib/af83/stored_procedures.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-module StoredProcedures extend self
-
- def invoke_stored_procedure(name, *params)
- name = name.to_s
- raise ArgumentError, "no such stored procedure #{name.inspect}" unless stored_procedures[name]
- invocation = "#{name}(#{quote_params(params)})"
- ActiveRecord::Base.connection.execute "SELECT #{invocation}"
- end
-
- def create_stored_procedure(name)
- name = name.to_s
- sql_file = File.expand_path("../../sql/#{name}.sql", __FILE__)
- raise ArgumentError, "missing sql file #{sql_file.inspect}" unless File.readable? sql_file
-
- # We could store the file's content for reload without application restart if desired.
- stored_procedures[name] = true
-
- ActiveRecord::Base.connection.execute File.read(sql_file)
- end
-
- private
- def quote_params(params)
- params
- .map(&method(:quote_param))
- .join(", ")
- end
-
- def quote_param(param)
- case param
- when String
- "'#{param}'"
- when TrueClass
- "TRUE"
- when FalseClass
- "FALSE"
- else
- param
- end
- end
-
- def stored_procedures
- @__stored_procedures__ ||= {}
- end
-end
diff --git a/lib/sql/clone_schema.sql b/lib/sql/clone_schema.sql
deleted file mode 100644
index 9fb88466b..000000000
--- a/lib/sql/clone_schema.sql
+++ /dev/null
@@ -1,114 +0,0 @@
-CREATE OR REPLACE FUNCTION clone_schema( source_schema text, dest_schema text, include_recs boolean) RETURNS void AS
-$BODY$
- DECLARE
- src_oid oid;
- tbl_oid oid;
- func_oid oid;
- object text;
- buffer text;
- srctbl text;
- default_ text;
- column_ text;
- qry text;
- dest_qry text;
- v_def text;
- seqval bigint;
- sq_last_value bigint;
- sq_max_value bigint;
- sq_start_value bigint;
- sq_increment_by bigint;
- sq_min_value bigint;
- sq_cache_value bigint;
- sq_log_cnt bigint;
- sq_is_called boolean;
- sq_is_cycled boolean;
- sq_cycled char(10);
- BEGIN
- -- Assure that source_schema exists
- SELECT oid INTO src_oid FROM pg_namespace WHERE nspname = quote_ident(source_schema);
- IF NOT FOUND THEN
- RAISE NOTICE 'source schema % does not exist!', source_schema;
- RETURN;
- END IF;
-
- -- Refute that dest_schema exists and then create it
- PERFORM nspname FROM pg_namespace WHERE nspname = quote_ident(dest_schema);
- IF FOUND THEN
- RAISE NOTICE 'dest schema % already exists!', dest_schema;
- RETURN;
- END IF;
- EXECUTE 'CREATE SCHEMA ' || quote_ident(dest_schema) ;
-
- -- loop over sequences, creating the same one in the destination namespace, ...
- FOR object IN
- SELECT sequence_name::text FROM information_schema.sequences WHERE sequence_schema = quote_ident(source_schema)
- LOOP
- EXECUTE 'CREATE SEQUENCE ' || quote_ident(dest_schema) || '.' || quote_ident(object);
-
- -- ...storing the attributes of the sequence from the source namespace in local variables, ...
- srctbl := quote_ident(source_schema) || '.' || quote_ident(object);
- EXECUTE 'SELECT last_value, max_value, start_value, increment_by, min_value, cache_value, log_cnt, is_cycled, is_called FROM ' || srctbl || ';' INTO sq_last_value, sq_max_value, sq_start_value, sq_increment_by, sq_min_value, sq_cache_value, sq_log_cnt, sq_is_cycled, sq_is_called;
- IF sq_is_cycled THEN
- sq_cycled := 'CYCLE';
- ELSE
- sq_cycled := 'NO CYCLE';
- END IF;
- EXECUTE 'ALTER SEQUENCE ' || (dest_schema) || '.' || quote_ident(object) || ' INCREMENT BY ' || sq_increment_by || ' MINVALUE ' || sq_min_value || ' MAXVALUE ' || sq_max_value || ' START WITH ' || sq_start_value || ' RESTART ' || sq_min_value || ' CACHE ' || sq_cache_value || sq_cycled || ' ;' ;
-
- buffer := quote_ident(dest_schema) || '.' || quote_ident(object);
- IF include_recs THEN
- EXECUTE 'SELECT setval( ''' || buffer || ''', ' || sq_last_value || ', ' || sq_is_called || ');' ;
- ELSE
- EXECUTE 'SELECT setval( ''' || buffer || ''', ' || sq_start_value || ', ' || sq_is_called || ');' ;
- END IF;
- END LOOP;
-
- -- loop over tables in source schema,...
- FOR object IN
- SELECT TABLE_NAME::text FROM information_schema.tables WHERE table_schema = quote_ident(source_schema) AND table_type = 'BASE TABLE'
- LOOP
- -- ...creating the table in the destination schema, potentially including the records
- buffer := dest_schema || '.' || quote_ident(object);
- EXECUTE 'CREATE TABLE ' || buffer || '(LIKE ' || quote_ident(source_schema) || '.' || quote_ident(object) || ' INCLUDING ALL)';
- IF include_recs THEN
- EXECUTE 'INSERT INTO ' || buffer || ' SELECT * FROM ' || quote_ident(source_schema) || '.' || quote_ident(object) || ';';
- END IF;
-
- -- alter table, assuring the destination schema's table has:
- -- * the same defaults
- FOR column_, default_
- IN SELECT column_name::text, REPLACE(column_default::text, source_schema, dest_schema)
- FROM information_schema.COLUMNS
- WHERE table_schema = dest_schema AND TABLE_NAME = object AND column_default LIKE 'nextval(%' || quote_ident(source_schema) || '%::regclass)'
- LOOP
- EXECUTE 'ALTER TABLE ' || buffer || ' ALTER COLUMN ' || column_ || ' SET DEFAULT ' || default_;
- END LOOP;
- END LOOP;
-
- /* SELECT 'ALTER TABLE ' || quote_ident(dest_schema) || '.' || quote_ident(rn.relname) || ' ADD CONSTRAINT ' || quote_ident(ct.conname) || ' ' || pg_get_constraintdef(ct.oid) || ';' FROM pg_constraint ct JOIN pg_class rn ON rn.oid = ct.conrelid WHERE connamespace = src_oid AND rn.relkind = 'r' AND ct.contype = 'f' LIMIT 2; */
- -- apply all constraints on tables in destination schema
- FOR qry IN
- SELECT 'ALTER TABLE ' || quote_ident(dest_schema) || '.' || quote_ident(rn.relname) || ' ADD CONSTRAINT ' || quote_ident(ct.conname) || ' ' || REPLACE(pg_get_constraintdef(ct.oid), source_schema || '.', dest_schema || '.') || ';'
- FROM pg_constraint ct JOIN pg_class rn ON rn.oid = ct.conrelid WHERE connamespace = src_oid AND rn.relkind = 'r' AND ct.contype = 'f'
- LOOP
- EXECUTE qry;
- END LOOP;
-
- -- create views from source schema in destination schema
- FOR object IN
- SELECT table_name::text, view_definition FROM information_schema.views WHERE table_schema = quote_ident(source_schema)
- LOOP
- buffer := dest_schema || '.' || quote_ident(object);
- SELECT view_definition INTO v_def FROM information_schema.views WHERE table_schema = quote_ident(source_schema) AND table_name = quote_ident(object);
- EXECUTE 'CREATE OR REPLACE VIEW ' || buffer || ' AS ' || v_def || ';' ;
- END LOOP;
-
- FOR func_oid IN SELECT oid FROM pg_proc WHERE pronamespace = src_oid
- LOOP
- SELECT pg_get_functiondef(func_oid) INTO qry;
- SELECT replace(qry, source_schema, dest_schema) INTO dest_qry;
- EXECUTE dest_qry;
- END LOOP;
- RETURN;
- END;
-$BODY$ LANGUAGE plpgsql VOLATILE COST 100;
diff --git a/spec/lib/af83/cloning/clone_schema_spec.rb b/spec/lib/af83/cloning/clone_schema_spec.rb
new file mode 100644
index 000000000..3d541f3e9
--- /dev/null
+++ b/spec/lib/af83/cloning/clone_schema_spec.rb
@@ -0,0 +1,113 @@
+RSpec.describe AF83::SchemaCloner, type: :pg_catalog do
+ let( :source_schema ){ "source_schema" }
+ let( :target_schema ){ "target_schema" }
+ let( :child_table ){ "children" }
+ let( :parent_table ){ "parents" }
+
+ subject { described_class.new source_schema, target_schema }
+
+ context "after cloning" do
+ before do
+ create_schema_with_tables
+ subject.clone_schema
+ end
+
+ it "table information is correctly duplicated" do
+ expect(get_table_information(source_schema, child_table))
+ .to eq([{"table_schema"=>"source_schema",
+ "table_name"=>"children",
+ "table_type"=>"BASE TABLE",
+ "self_referencing_column_name"=>nil,
+ "reference_generation"=>nil,
+ "user_defined_type_catalog"=>nil,
+ "user_defined_type_schema"=>nil,
+ "user_defined_type_name"=>nil,
+ "is_insertable_into"=>"YES",
+ "is_typed"=>"NO",
+ "commit_action"=>nil}])
+
+ expect( get_table_information(target_schema, child_table))
+ .to eq([{"table_schema"=>"target_schema",
+ "table_name"=>"children",
+ "table_type"=>"BASE TABLE",
+ "self_referencing_column_name"=>nil,
+ "reference_generation"=>nil,
+ "user_defined_type_catalog"=>nil,
+ "user_defined_type_schema"=>nil,
+ "user_defined_type_name"=>nil,
+ "is_insertable_into"=>"YES",
+ "is_typed"=>"NO",
+ "commit_action"=>nil}])
+ end
+
+ it "table content is the same and sequences are synchronized" do
+ expect_same_content(parent_table)
+ expect_same_content(child_table)
+
+ expect_same_sequence_params("#{parent_table}_id_seq")
+ expect_same_sequence_params("#{child_table}_id_seq")
+ end
+
+ it "has correctly updated default values" do
+ child_table_pk_default = get_columns(target_schema, child_table)
+ .find{ |col| col["column_name"] == "id" }["column_default"]
+ expect( child_table_pk_default ).to eq("nextval('#{target_schema}.children_id_seq'::regclass)")
+ end
+
+ it "has the correct foreign keys" do
+ expect( get_foreign_keys(target_schema, child_table) )
+ .to eq([{
+ "constraint_name" => "children_parents",
+ "constraint_def" => "FOREIGN KEY (parents_id) REFERENCES target_schema.parents(id)"}])
+ end
+
+ xit "it has the correct unique keys UNTESTABLE SO FAR" do
+ insert source_schema, child_table, "#{parent_table}_id" => 1, some_key: 400
+ insert target_schema, child_table, "#{parent_table}_id" => 1, some_key: 400
+ reinsert_sql = "INSERT INTO #{source_schema}.#{child_table} (#{parent_table}_id, some_key) VALUES (1, 400)"
+ expect{ execute(reinsert_sql) rescue nil}.not_to change{ execute("SELECT COUNT(*) FROM #{source_schema}.#{child_table}") }
+
+ # expect{ insert(target_schema, child_table, "#{parent_table}_id" => 1, some_key: 400) }.to raise_error(ActiveRecord::RecordNotUnique)
+ end
+
+ it "inserts are independent" do
+ insert source_schema, child_table, "#{parent_table}_id" => 1, some_key: 400
+ insert target_schema, child_table, "#{parent_table}_id" => 1, some_key: 400
+ last_source = get_content(source_schema, child_table).last
+ last_target = get_content(target_schema, child_table).last
+
+ expect( last_source ).to eq("id"=>"3", "parents_id"=>"1", "some_key"=>"400", "is_orphan"=>"f")
+ expect( last_target ).to eq("id"=>"3", "parents_id"=>"1", "some_key"=>"400", "is_orphan"=>"f")
+ end
+
+ end
+
+ def create_schema_with_tables
+ execute <<-EOSQL
+ DROP SCHEMA IF EXISTS #{source_schema} CASCADE;
+ CREATE SCHEMA #{source_schema};
+
+ CREATE TABLE #{source_schema}.#{parent_table} (
+ id bigserial PRIMARY KEY
+ );
+ CREATE TABLE #{source_schema}.#{child_table} (
+ id bigserial PRIMARY KEY,
+ #{parent_table}_id bigint,
+ some_key bigint NOT NULL,
+ is_orphan boolean DEFAULT false
+ );
+
+ CREATE UNIQUE INDEX #{child_table}_some_key_idx ON #{source_schema}.#{child_table} (some_key);
+
+ ALTER TABLE #{source_schema}.#{child_table}
+ ADD CONSTRAINT #{child_table}_#{parent_table}
+ FOREIGN KEY( #{parent_table}_id ) REFERENCES #{source_schema}.#{parent_table}(id);
+
+ INSERT INTO #{source_schema}.#{parent_table} VALUES (DEFAULT);
+ INSERT INTO #{source_schema}.#{parent_table} VALUES (DEFAULT);
+ EOSQL
+ insert source_schema, child_table, "#{parent_table}_id" => 1, some_key: 200
+ insert source_schema, child_table, "#{parent_table}_id" => 2, some_key: 300, is_orphan: true
+ end
+
+end
diff --git a/spec/lib/af83/stored_procedure_spec.rb b/spec/lib/af83/stored_procedure_spec.rb
deleted file mode 100644
index 2530d7fc1..000000000
--- a/spec/lib/af83/stored_procedure_spec.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-require 'rails_helper'
-
-RSpec.describe StoredProcedures do
-
-
- before do
- described_class.create_stored_procedure(:clone_schema)
- end
-
- let( :source_schema_name ){ "parissudest201604" }
- let( :dest_schema_name ){ "#{source_schema_name}_v1"}
-
- context "Error cases" do
- it "raises an error if stored procedure does not exist" do
- expect{ described_class.invoke_stored_procedure(:idonotexist) }
- .to raise_error(ArgumentError, %r{no such stored procedure "idonotexist"})
- end
- end
-
-end
diff --git a/spec/lib/af83/stored_procedures/clone_schema_spec.rb b/spec/lib/af83/stored_procedures/clone_schema_spec.rb
deleted file mode 100644
index c387ddc7d..000000000
--- a/spec/lib/af83/stored_procedures/clone_schema_spec.rb
+++ /dev/null
@@ -1,167 +0,0 @@
-require 'spec_helper'
-
-include Support::PGCatalog
-
-RSpec.describe StoredProcedures do
- let( :source_schema ){ "source_schema" }
- let( :target_schema ){ "target_schema" }
- let( :child_table ){ "children" }
- let( :parent_table ){ "parents" }
-
- before do
- create_schema_with_tables
- StoredProcedures.create_stored_procedure :clone_schema
- end
-
- # :meta specs are not run, as the describe the testing methd and not the application
- context "meta specs describe source schema's introspection", :meta do
- it "table information is correctly read" do
- expect(get_table_information(source_schema, child_table))
- .to eq([{"table_schema"=>"source_schema",
- "table_name"=>"children",
- "table_type"=>"BASE TABLE",
- "self_referencing_column_name"=>nil,
- "reference_generation"=>nil,
- "user_defined_type_catalog"=>nil,
- "user_defined_type_schema"=>nil,
- "user_defined_type_name"=>nil,
- "is_insertable_into"=>"YES",
- "is_typed"=>"NO",
- "commit_action"=>nil}])
-
- expect( get_table_information(target_schema, child_table) ).to be_empty
- end
-
- it "sequences are correctly read" do
- expect(get_sequences(source_schema, child_table))
- .to eq([{"sequence_name"=>"#{child_table}_id_seq",
- "last_value"=>"1",
- "start_value"=>"1",
- "increment_by"=>"1",
- "max_value"=>"9223372036854775807",
- "min_value"=>"1",
- "cache_value"=>"1",
- "log_cnt"=>"0",
- "is_cycled"=>"f",
- "is_called"=>"f"}])
-
- expect(get_sequences(source_schema, parent_table))
- .to eq([{"sequence_name"=>"#{parent_table}_id_seq",
- "last_value"=>"1",
- "start_value"=>"1",
- "increment_by"=>"1",
- "max_value"=>"9223372036854775807",
- "min_value"=>"1",
- "cache_value"=>"1",
- "log_cnt"=>"0",
- "is_cycled"=>"f",
- "is_called"=>"f"}])
- end
-
- it "shows foreign key constraints are correctly read" do
- expect( get_foreign_keys(source_schema, child_table) )
- .to eq([{
- "constraint_name" => "children_parents",
- "constraint_def" => "FOREIGN KEY (parents_id) REFERENCES source_schema.parents(id)"}])
- end
- end
-
- context "before cloning" do
- it "target schema does not exist" do
- expect( get_schema_oid(target_schema) ).to be_nil
- end
- end
-
- context "after cloning" do
- before do
- described_class.invoke_stored_procedure(:clone_schema, source_schema, target_schema, false)
- end
-
- it "target schema does exist" do
- expect( get_schema_oid(target_schema) ).not_to be_nil
- end
-
- it "table information is correctly read" do
- expect(get_table_information(source_schema, child_table))
- .to eq([{"table_schema"=>"source_schema",
- "table_name"=>"children",
- "table_type"=>"BASE TABLE",
- "self_referencing_column_name"=>nil,
- "reference_generation"=>nil,
- "user_defined_type_catalog"=>nil,
- "user_defined_type_schema"=>nil,
- "user_defined_type_name"=>nil,
- "is_insertable_into"=>"YES",
- "is_typed"=>"NO",
- "commit_action"=>nil}])
-
- expect( get_table_information(target_schema, child_table))
- .to eq([{"table_schema"=>"target_schema",
- "table_name"=>"children",
- "table_type"=>"BASE TABLE",
- "self_referencing_column_name"=>nil,
- "reference_generation"=>nil,
- "user_defined_type_catalog"=>nil,
- "user_defined_type_schema"=>nil,
- "user_defined_type_name"=>nil,
- "is_insertable_into"=>"YES",
- "is_typed"=>"NO",
- "commit_action"=>nil}])
- end
-
- it "has the correct sequences" do
- expect(get_sequences(target_schema, child_table))
- .to eq([{"sequence_name"=>"#{child_table}_id_seq",
- "last_value"=>"1",
- "start_value"=>"1",
- "increment_by"=>"1",
- "max_value"=>"9223372036854775807",
- "min_value"=>"1",
- "cache_value"=>"1",
- "log_cnt"=>"0",
- "is_cycled"=>"f",
- "is_called"=>"f"}])
-
- expect(get_sequences(target_schema, parent_table))
- .to eq([{"sequence_name"=>"#{parent_table}_id_seq",
- "last_value"=>"1",
- "start_value"=>"1",
- "increment_by"=>"1",
- "max_value"=>"9223372036854775807",
- "min_value"=>"1",
- "cache_value"=>"1",
- "log_cnt"=>"0",
- "is_cycled"=>"f",
- "is_called"=>"f"}])
- end
-
- it "has the correct foreign keys" do
- expect( get_foreign_keys(target_schema, child_table) )
- .to eq([{
- "constraint_name" => "children_parents",
- "constraint_def" => "FOREIGN KEY (parents_id) REFERENCES target_schema.parents(id)"}])
- end
-
- end
-
-end
-
-def create_schema_with_tables
- execute("CREATE SCHEMA IF NOT EXISTS #{source_schema}")
- execute <<-EOSQL
- DROP SCHEMA IF EXISTS #{source_schema} CASCADE;
- CREATE SCHEMA #{source_schema};
-
- CREATE TABLE #{source_schema}.#{parent_table} (
- id bigserial PRIMARY KEY
- );
- CREATE TABLE #{source_schema}.#{child_table} (
- id bigserial PRIMARY KEY,
- #{parent_table}_id bigint
- );
- ALTER TABLE #{source_schema}.#{child_table}
- ADD CONSTRAINT #{child_table}_#{parent_table}
- FOREIGN KEY( #{parent_table}_id ) REFERENCES #{source_schema}.#{parent_table}(id);
- EOSQL
-end
-
diff --git a/spec/models/clean_up_spec.rb b/spec/models/clean_up_spec.rb
index d6aa7652c..b4cf5e1af 100644
--- a/spec/models/clean_up_spec.rb
+++ b/spec/models/clean_up_spec.rb
@@ -6,6 +6,86 @@ RSpec.describe CleanUp, :type => :model do
it { should validate_presence_of(:date_type) }
it { should belong_to(:referential) }
+ context '#exclude_dates_in_overlapping_period with :before date_type' do
+ let(:time_table) { create(:time_table) }
+ let(:period) { time_table.periods[0] }
+ let(:cleaner) { create(:clean_up, date_type: :before) }
+
+ it 'should add exclude date into period for overlapping period' do
+ days_in_period = (period.period_start..period.period_end).count
+ cleaner.begin_date = period.period_end
+
+ expect { cleaner.exclude_dates_in_overlapping_period(period) }.to change {
+ time_table.dates.where(in_out: false).count
+ }.by(days_in_period - 1)
+ end
+
+ it 'should not add exclude date if no overlapping found' do
+ cleaner.begin_date = period.period_start
+ expect { cleaner.exclude_dates_in_overlapping_period(period) }.to_not change {
+ time_table.dates.where(in_out: false).count
+ }
+ end
+ end
+
+ context '#exclude_dates_in_overlapping_period with :after date_type' do
+ let(:time_table) { create(:time_table) }
+ let(:period) { time_table.periods[0] }
+ let(:cleaner) { create(:clean_up, date_type: :after) }
+
+ it 'should add exclude date into period for overlapping period' do
+ days_in_period = (period.period_start..period.period_end).count
+ cleaner.begin_date = period.period_start + 1.day
+ expect { cleaner.exclude_dates_in_overlapping_period(period) }.to change {
+ time_table.dates.where(in_out: false).count
+ }.by(days_in_period - 2)
+ end
+
+ it 'should not add exclude date if no overlapping found' do
+ cleaner.begin_date = period.period_end
+ expect { cleaner.exclude_dates_in_overlapping_period(period) }.to_not change {
+ time_table.dates.where(in_out: false).count
+ }
+ end
+ end
+
+ context '#exclude_dates_in_overlapping_period with :between date_type' do
+ let(:time_table) { create(:time_table) }
+ let(:period) { time_table.periods[0] }
+ let(:cleaner) { create(:clean_up, date_type: :between, begin_date: period.period_start + 3.day, end_date: period.period_end) }
+
+ it 'should add exclude date into period for overlapping period' do
+ expected_day_out = (cleaner.begin_date..cleaner.end_date).count
+ expect { cleaner.exclude_dates_in_overlapping_period(period) }.to change {
+ time_table.dates.where(in_out: false).count
+ }.by(expected_day_out)
+ end
+
+ it 'should not add exclude date if no overlapping found' do
+ cleaner.begin_date = period.period_end + 1.day
+ cleaner.end_date = cleaner.begin_date + 1.day
+
+ expect { cleaner.exclude_dates_in_overlapping_period(period) }.to_not change {
+ time_table.dates.where(in_out: false).count
+ }
+ end
+ end
+
+ context '#overlapping_periods' do
+ let(:cleaner) { create(:clean_up, date_type: :before, end_date: nil) }
+ let(:time_table) { create(:time_table) }
+
+ it 'should detect overlapping periods' do
+ cleaner.begin_date = time_table.periods[0].period_start
+ expect(cleaner.overlapping_periods).to include(time_table.periods[0])
+ end
+
+ it 'should not return none overlapping periods' do
+ cleaner.begin_date = time_table.periods[0].period_start - 1.day
+ expect(cleaner.overlapping_periods).to_not include(time_table.periods[0])
+ end
+ end
+
context '#clean' do
let(:cleaner) { create(:clean_up, date_type: :before) }
diff --git a/spec/support/bare_sql.rb b/spec/support/bare_sql.rb
new file mode 100644
index 000000000..03a50ef77
--- /dev/null
+++ b/spec/support/bare_sql.rb
@@ -0,0 +1,58 @@
+module Support
+ module BareSQL
+
+ def insert(schema, table, values)
+ execute "INSERT INTO #{schema}.#{table} (#{_keys(values)}) VALUES (#{_values values})"
+ end
+
+ def execute(sql)
+ base_connection.execute(sql)
+ end
+
+ def expect_same_content(table_name)
+ expected_content = get_content(source_schema, table_name)
+ actual_content = get_content(target_schema, table_name)
+ expect( actual_content ).to eq(expected_content)
+ end
+
+ def expect_same_sequence_params(sequence_name)
+ expected_seq = Hash.without(get_sequences(source_schema, sequence_name).first, 'log_cnt')
+ actual_seq = Hash.without(get_sequences(target_schema, sequence_name).first, 'log_cnt')
+ expect( actual_seq ).to eq(expected_seq)
+ end
+
+ def get_content(schema_name, table_name)
+ execute("SELECT * FROM #{schema_name}.#{table_name}").to_a
+ end
+
+ private
+
+ def base_connection
+ ActiveRecord::Base.connection
+ end
+
+ def _keys(values)
+ values.keys.map(&:to_s).join(", ")
+ end
+
+ def _values(values)
+ values
+ .values
+ .map(&method(:_format))
+ .join(', ')
+ end
+
+ def _format(val)
+ case val
+ when String
+ "'#{val}'"
+ when TrueClass
+ "'t'"
+ when FalseClass
+ "'f'"
+ else
+ val.to_s
+ end
+ end
+ end
+end
diff --git a/spec/support/hash.rb b/spec/support/hash.rb
new file mode 100644
index 000000000..ec9a2f895
--- /dev/null
+++ b/spec/support/hash.rb
@@ -0,0 +1,6 @@
+class << Hash
+ def without(hash, *keys)
+ nk = hash.keys - keys
+ Hash[*nk.zip(hash.values_at(*nk)).flatten]
+ end
+end
diff --git a/spec/support/pg_catalog.rb b/spec/support/pg_catalog.rb
index bb61adba5..ca02f2550 100644
--- a/spec/support/pg_catalog.rb
+++ b/spec/support/pg_catalog.rb
@@ -1,11 +1,12 @@
+require_relative 'bare_sql'
module Support
module PGCatalog
- # TODO: Check what of the follwowing can be done with ActiveRecord. E.g.
- # @connection.foreign_keys(table)...
+ include Support::BareSQL
def get_columns(schema_name, table_name)
- execute("SELECT * from information_schema.columns WHERE table_name = '#{table_name}' AND table_schema = '#{schema_name}'")
+ execute("SELECT column_name, column_default FROM information_schema.columns WHERE table_name = '#{table_name}' AND table_schema = '#{schema_name}'").to_a
end
+
def get_foreign_keys(schema_oid, table_name)
schema_oid = get_schema_oid(schema_oid) unless Integer === schema_oid
return [] unless schema_oid
@@ -20,11 +21,8 @@ module Support
.first
end
- def get_sequences(schema_name, table_name)
- sequences = execute <<-EOSQL
- SELECT sequence_name FROM information_schema.sequences
- WHERE sequence_schema = '#{schema_name}' AND sequence_name LIKE '#{table_name}%'
- EOSQL
+ def get_sequences(schema_name, sequence_name)
+ sequences = execute(sequence_query(schema_name, sequence_name))
sequences.values.flatten.map do | sequence |
execute "SELECT * from #{schema_name}.#{sequence}"
end.flat_map(&:to_a)
@@ -38,39 +36,20 @@ module Support
private
- def base_connection
- ActiveRecord::Base.connection
- end
-
- def execute(sql)
- base_connection.execute(sql)
- end
def foreign_key_query(schema_oid, table_name)
- key = [:foreign_key_query, schema_oid, table_name]
- get_or_create_query(key){ <<-EOQ
- SELECT ct.conname AS constraint_name, pg_get_constraintdef(ct.oid) AS constraint_def
- FROM pg_constraint ct JOIN pg_class rn ON rn.oid = ct.conrelid
- WHERE connamespace = #{schema_oid} AND rn.relname = '#{table_name}' AND rn.relkind = 'r' AND ct.contype = 'f'
- EOQ
- }
- end
-
- def sequence_properties_query(schema_name, sequence_name)
- key = [:sequence_properies_query, schema_name, sequence_name]
- get_or_create_query(key){ <<-EOQ
- Coming Soon
- EOQ
- }
-
- end
-
- def get_or_create_query(query_key, &query_value)
- queries.fetch(query_key){ queries[query_key] = query_value.() }
+ <<-EOQ
+ SELECT ct.conname AS constraint_name, pg_get_constraintdef(ct.oid) AS constraint_def
+ FROM pg_constraint ct JOIN pg_class rn ON rn.oid = ct.conrelid
+ WHERE connamespace = #{schema_oid} AND rn.relname = '#{table_name}' AND rn.relkind = 'r' AND ct.contype = 'f'
+ EOQ
end
- def queries
- @__queries__ ||= {}
+ def sequence_query(schema_name, sequence_name)
+ <<-EOQ
+ SELECT sequence_name FROM information_schema.sequences
+ WHERE sequence_schema = '#{schema_name}' AND sequence_name = '#{sequence_name}'
+ EOQ
end
def without_keys(*keys)
@@ -82,3 +61,7 @@ module Support
end
end
end
+
+RSpec.configure do | conf |
+ conf.include Support::PGCatalog, type: :pg_catalog
+end
diff --git a/spec/workers/referential_cloning_worker_spec.rb b/spec/workers/referential_cloning_worker_spec.rb
index 85d771742..52ed8913b 100644
--- a/spec/workers/referential_cloning_worker_spec.rb
+++ b/spec/workers/referential_cloning_worker_spec.rb
@@ -9,37 +9,29 @@ RSpec.describe ReferentialCloningWorker do
let( :worker ){ described_class.new }
+ def make_referential(schema_name)
+ return OpenStruct.new( slug: schema_name )
+ end
let( :source_schema ){ "source_schema" }
- let( :target_schema ){ "#{source_schema}_tmp" }
- let( :referential_cloning ){ OpenStruct.new(source_referential: OpenStruct.new(slug: source_schema)) }
+ let( :target_schema ){ "target_schema" }
+ let( :referential_cloning ){ OpenStruct.new(source_referential: make_referential(source_schema),
+ target_referential: make_referential(target_schema)) }
+ let( :cloner ){ 'cloner' }
+
before do
expect( ReferentialCloning ).to receive(:find).with(id).and_return(referential_cloning)
- expect( StoredProcedures )
- .to receive(:invoke_stored_procedure)
- .with(:clone_schema, source_schema, target_schema, true)
-
- expect( worker ).to receive(:execute_sql).with( "DROP SCHEMA #{source_schema} CASCADE;" )
+ expect( AF83::SchemaCloner ).to receive(:new).with( source_schema, target_schema ).and_return(cloner)
+ expect( cloner ).to receive(:clone_schema)
expect( referential_cloning ).to receive(:run!)
end
it "invokes the correct stored procedure, updates the database and the AASM" do
- expect( worker ).to receive(:execute_sql).with( "ALTER SCHEMA #{target_schema} RENAME TO #{source_schema};" )
expect( referential_cloning ).to receive(:successful!)
worker.perform(id)
end
-
- it "handles failure correctly" do
- expect( worker )
- .to receive(:execute_sql)
- .with( "ALTER SCHEMA #{target_schema} RENAME TO #{source_schema};" )
- .and_raise(RuntimeError)
-
- expect( referential_cloning ).to receive(:failed!)
- worker.perform(id)
- end
end
-
+
end