|
| 1 | +defmodule Csv2sql do |
| 2 | + def main(args) do |
| 3 | + Csv2sql.Helpers.greet() |
| 4 | + # Load configuration varaibles dynamically for escripts, this is required |
| 5 | + # since configuration variables are set to whatever they where when the |
| 6 | + # escript was build and cannot be changed later |
| 7 | + dashboard = update_config(args) |
| 8 | + |
| 9 | + # Start supervision tree |
| 10 | + {:ok, sup_pid} = Csv2sql.Application.start(:no_args, :no_args) |
| 11 | + |
| 12 | + # Wait for finish and stop supervion tree |
| 13 | + # This is done in separate Task to reply back to the caller(dashbaord GUI) |
| 14 | + # immediately after the supervision tree is started successfully |
| 15 | + Task.start(fn -> wait_for_finish(sup_pid) end) |
| 16 | + |
| 17 | + # If error tracker server is not running, start it. |
| 18 | + # If block executes for first time when the app is started from "dashboard" app |
| 19 | + if !Process.whereis(:error_tracker), do: Csv2sql.ErrorTracker.start_link(:no_args) |
| 20 | + |
| 21 | + # Regiter the main supervisor pid with error tracker |
| 22 | + # Error tracker will stop supervisor incase of errors |
| 23 | + Csv2sql.ErrorTracker.register_supervisor(sup_pid) |
| 24 | + |
| 25 | + unless dashboard do |
| 26 | + # In escripts as soon as the main() function return, the escript ends, |
| 27 | + # this allows the escript to keep running, when the app is used without the dashboard. |
| 28 | + receive do |
| 29 | + {:wait} -> |
| 30 | + System.halt(0) |
| 31 | + end |
| 32 | + end |
| 33 | + |
| 34 | + sup_pid |
| 35 | + end |
| 36 | + |
| 37 | + defp wait_for_finish(sup_pid) do |
| 38 | + Csv2sql.Observer.get_stage() |
| 39 | + |> case do |
| 40 | + :error -> |
| 41 | + nil |
| 42 | + |
| 43 | + :finish -> |
| 44 | + # Finish and stop supervisors after a second |
| 45 | + :timer.sleep(1000) |
| 46 | + Supervisor.stop(sup_pid) |
| 47 | + |
| 48 | + _ -> |
| 49 | + wait_for_finish(sup_pid) |
| 50 | + end |
| 51 | + end |
| 52 | + |
| 53 | + defp update_config(args) do |
| 54 | + {opts, _, _} = |
| 55 | + OptionParser.parse(args, |
| 56 | + strict: [ |
| 57 | + dashboard: :boolean, |
| 58 | + schema_file_path: :string, |
| 59 | + source_csv_directory: :string, |
| 60 | + imported_csv_directory: :string, |
| 61 | + validated_csv_directory: :string, |
| 62 | + skip_make_schema: :boolean, |
| 63 | + skip_insert_schema: :boolean, |
| 64 | + skip_insert_data: :boolean, |
| 65 | + skip_validate_import: :boolean, |
| 66 | + db_connection_string: :string, |
| 67 | + connection_socket: :string, |
| 68 | + varchar_limit: :integer, |
| 69 | + schema_infer_chunk_size: :integer, |
| 70 | + worker_count: :integer, |
| 71 | + db_worker_count: :integer, |
| 72 | + insertion_chunk_size: :integer, |
| 73 | + job_count_limit: :integer, |
| 74 | + log: :string, |
| 75 | + timeout: :integer, |
| 76 | + connect_timeout: :integer, |
| 77 | + pool_size: :integer, |
| 78 | + queue_target: :integer, |
| 79 | + queue_interval: :integer |
| 80 | + ] |
| 81 | + ) |
| 82 | + |
| 83 | + source_csv_directory = opts[:source_csv_directory] || "." |
| 84 | + schema_file_path = opts[:schema_file_path] || source_csv_directory |
| 85 | + imported_csv_directory = opts[:imported_csv_directory] || "#{source_csv_directory}/imported" |
| 86 | + |
| 87 | + validated_csv_directory = |
| 88 | + opts[:validated_csv_directory] || "#{source_csv_directory}/validated" |
| 89 | + |
| 90 | + make_schema = if opts[:skip_make_schema], do: false, else: true |
| 91 | + insert_schema = if opts[:skip_insert_schema], do: false, else: true |
| 92 | + insert_data = if opts[:skip_insert_data], do: false, else: true |
| 93 | + validate_import = if opts[:skip_validate_import], do: false, else: true |
| 94 | + |
| 95 | + [db_type, username, password, host, database_name] = |
| 96 | + if opts[:db_connection_string] do |
| 97 | + str = opts[:db_connection_string] |
| 98 | + [db_type, username, tmp] = String.split(str, ":") |
| 99 | + [password, tmp] = String.split(tmp, "@") |
| 100 | + [host, database_name] = String.split(tmp, "/") |
| 101 | + [db_type, username, password, host, database_name] |
| 102 | + end |
| 103 | + |
| 104 | + connection_socket = opts[:connection_socket] || "/var/run/mysqld/mysqld.sock" |
| 105 | + |
| 106 | + varchar_limit = opts[:varchar_limit] || 100 |
| 107 | + schema_infer_chunk_size = opts[:schema_infer_chunk_size] || 100 |
| 108 | + worker_count = opts[:worker_count] || 10 |
| 109 | + db_worker_count = opts[:db_worker_count] || 15 |
| 110 | + insertion_chunk_size = opts[:insertion_chunk_size] || 100 |
| 111 | + job_count_limit = opts[:job_count_limit] || 10 |
| 112 | + log = if opts[:log], do: String.to_atom(opts[:log]), else: false |
| 113 | + timeout = opts[:timeout] || 60_000 |
| 114 | + connect_timeout = opts[:connect_timeout] || 60_000 |
| 115 | + pool_size = opts[:pool_size] || 20 |
| 116 | + queue_target = opts[:queue_target] || 5000 |
| 117 | + queue_interval = opts[:queue_interval] || 1000 |
| 118 | + |
| 119 | + repo_config = [ |
| 120 | + username: username, |
| 121 | + password: password, |
| 122 | + host: host, |
| 123 | + insertion_chunk_size: insertion_chunk_size, |
| 124 | + job_count_limit: job_count_limit, |
| 125 | + log: log, |
| 126 | + timeout: timeout, |
| 127 | + connect_timeout: connect_timeout, |
| 128 | + pool_size: pool_size, |
| 129 | + queue_target: queue_target, |
| 130 | + queue_interval: queue_interval |
| 131 | + ] |
| 132 | + |
| 133 | + repo_config = |
| 134 | + if db_type == "postgres" do |
| 135 | + {Csv2sql.PostgreSQLRepo, repo_config ++ [database: database_name]} |
| 136 | + else |
| 137 | + {Csv2sql.MySQLRepo, |
| 138 | + repo_config ++ |
| 139 | + [ |
| 140 | + database_name: database_name, |
| 141 | + socket: connection_socket |
| 142 | + ]} |
| 143 | + end |
| 144 | + |
| 145 | + current_config = [ |
| 146 | + csv2sql: [ |
| 147 | + {Csv2sql.SchemaMaker, |
| 148 | + [ |
| 149 | + varchar_limit: varchar_limit, |
| 150 | + schema_file_path: schema_file_path, |
| 151 | + schema_infer_chunk_size: schema_infer_chunk_size |
| 152 | + ]}, |
| 153 | + {Csv2sql.MainServer, |
| 154 | + [ |
| 155 | + worker_count: worker_count, |
| 156 | + db_worker_count: db_worker_count, |
| 157 | + source_csv_directory: source_csv_directory, |
| 158 | + imported_csv_directory: imported_csv_directory, |
| 159 | + validated_csv_directory: validated_csv_directory, |
| 160 | + set_validate: validate_import, |
| 161 | + db_type: db_type |
| 162 | + ]}, |
| 163 | + {Csv2sql.Worker, |
| 164 | + [ |
| 165 | + set_make_schema: make_schema, |
| 166 | + set_insert_schema: insert_schema, |
| 167 | + set_insert_data: insert_data |
| 168 | + ]}, |
| 169 | + repo_config |
| 170 | + ] |
| 171 | + ] |
| 172 | + |
| 173 | + Application.put_all_env(current_config) |
| 174 | + |
| 175 | + opts[:dashboard] |
| 176 | + end |
| 177 | + |
| 178 | + def get_repo() do |
| 179 | + db_type = Application.get_env(:csv2sql, Csv2sql.MainServer)[:db_type] |
| 180 | + |
| 181 | + if db_type == "postgres", do: Csv2sql.PostgreSQLRepo, else: Csv2sql.MySQLRepo |
| 182 | + end |
| 183 | + |
| 184 | + def get_db_type() do |
| 185 | + if Application.get_env(:csv2sql, Csv2sql.MainServer)[:db_type] == "postgres", |
| 186 | + do: :postgres, |
| 187 | + else: :mysql |
| 188 | + end |
| 189 | +end |
0 commit comments