mirror of
https://github.com/github/rails.git
synced 2026-01-29 16:28:09 -05:00
git-svn-id: http://svn-commit.rubyonrails.org/rails/trunk@7492 5ecf4fe2-1ee6-0310-87b1-e25e094e27de
This commit is contained in:
@@ -1,5 +1,7 @@
|
||||
*SVN*
|
||||
|
||||
* Extract Firebird, FronBase, and OpenBase adapters into gems. #9508, #9509, #9510 [Jeremy Kemper]
|
||||
|
||||
* RubyGem database adapters: expects a gem named activerecord-<database>-adapter with active_record/connection_adapters/<database>_adapter.rb in its load path. [Jeremy Kemper]
|
||||
|
||||
* Added block-acceptance to JavaScriptHelper#javascript_tag #7527 [BobSilva/tarmo/rmm5t]
|
||||
|
||||
@@ -1,728 +0,0 @@
|
||||
# Author: Ken Kunz <kennethkunz@gmail.com>
|
||||
|
||||
require 'active_record/connection_adapters/abstract_adapter'
|
||||
|
||||
module FireRuby # :nodoc: all
|
||||
NON_EXISTENT_DOMAIN_ERROR = "335544569"
|
||||
class Database
|
||||
def self.db_string_for(config)
|
||||
unless config.has_key?(:database)
|
||||
raise ArgumentError, "No database specified. Missing argument: database."
|
||||
end
|
||||
host_string = config.values_at(:host, :service, :port).compact.first(2).join("/") if config[:host]
|
||||
[host_string, config[:database]].join(":")
|
||||
end
|
||||
|
||||
def self.new_from_config(config)
|
||||
db = new db_string_for(config)
|
||||
db.character_set = config[:charset]
|
||||
return db
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
module ActiveRecord
|
||||
class << Base
|
||||
def firebird_connection(config) # :nodoc:
|
||||
require_library_or_gem 'fireruby'
|
||||
unless defined? FireRuby::SQLType
|
||||
raise AdapterNotFound,
|
||||
'The Firebird adapter requires FireRuby version 0.4.0 or greater; you appear ' <<
|
||||
'to be running an older version -- please update FireRuby (gem install fireruby).'
|
||||
end
|
||||
config.symbolize_keys!
|
||||
db = FireRuby::Database.new_from_config(config)
|
||||
connection_params = config.values_at(:username, :password)
|
||||
connection = db.connect(*connection_params)
|
||||
ConnectionAdapters::FirebirdAdapter.new(connection, logger, connection_params)
|
||||
end
|
||||
end
|
||||
|
||||
module ConnectionAdapters
|
||||
class FirebirdColumn < Column # :nodoc:
|
||||
VARCHAR_MAX_LENGTH = 32_765
|
||||
BLOB_MAX_LENGTH = 32_767
|
||||
|
||||
def initialize(name, domain, type, sub_type, length, precision, scale, default_source, null_flag)
|
||||
@firebird_type = FireRuby::SQLType.to_base_type(type, sub_type).to_s
|
||||
|
||||
super(name.downcase, nil, @firebird_type, !null_flag)
|
||||
|
||||
@default = parse_default(default_source) if default_source
|
||||
@limit = decide_limit(length)
|
||||
@domain, @sub_type, @precision, @scale = domain, sub_type, precision, scale.abs
|
||||
end
|
||||
|
||||
def type
|
||||
if @domain =~ /BOOLEAN/
|
||||
:boolean
|
||||
elsif @type == :binary and @sub_type == 1
|
||||
:text
|
||||
else
|
||||
@type
|
||||
end
|
||||
end
|
||||
|
||||
def default
|
||||
type_cast(decide_default) if @default
|
||||
end
|
||||
|
||||
def self.value_to_boolean(value)
|
||||
%W(#{FirebirdAdapter.boolean_domain[:true]} true t 1).include? value.to_s.downcase
|
||||
end
|
||||
|
||||
private
|
||||
def parse_default(default_source)
|
||||
default_source =~ /^\s*DEFAULT\s+(.*)\s*$/i
|
||||
return $1 unless $1.upcase == "NULL"
|
||||
end
|
||||
|
||||
def decide_default
|
||||
if @default =~ /^'?(\d*\.?\d+)'?$/ or
|
||||
@default =~ /^'(.*)'$/ && [:text, :string, :binary, :boolean].include?(type)
|
||||
$1
|
||||
else
|
||||
firebird_cast_default
|
||||
end
|
||||
end
|
||||
|
||||
# Submits a _CAST_ query to the database, casting the default value to the specified SQL type.
|
||||
# This enables Firebird to provide an actual value when context variables are used as column
|
||||
# defaults (such as CURRENT_TIMESTAMP).
|
||||
def firebird_cast_default
|
||||
sql = "SELECT CAST(#{@default} AS #{column_def}) FROM RDB$DATABASE"
|
||||
if connection = Base.active_connections.values.detect { |conn| conn && conn.adapter_name == 'Firebird' }
|
||||
connection.execute(sql).to_a.first['CAST']
|
||||
else
|
||||
raise ConnectionNotEstablished, "No Firebird connections established."
|
||||
end
|
||||
end
|
||||
|
||||
def decide_limit(length)
|
||||
if text? or number?
|
||||
length
|
||||
elsif @firebird_type == 'BLOB'
|
||||
BLOB_MAX_LENGTH
|
||||
end
|
||||
end
|
||||
|
||||
def column_def
|
||||
case @firebird_type
|
||||
when 'BLOB' then "VARCHAR(#{VARCHAR_MAX_LENGTH})"
|
||||
when 'CHAR', 'VARCHAR' then "#{@firebird_type}(#{@limit})"
|
||||
when 'NUMERIC', 'DECIMAL' then "#{@firebird_type}(#{@precision},#{@scale.abs})"
|
||||
when 'DOUBLE' then "DOUBLE PRECISION"
|
||||
else @firebird_type
|
||||
end
|
||||
end
|
||||
|
||||
def simplified_type(field_type)
|
||||
if field_type == 'TIMESTAMP'
|
||||
:datetime
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# The Firebird adapter relies on the FireRuby[http://rubyforge.org/projects/fireruby/]
|
||||
# extension, version 0.4.0 or later (available as a gem or from
|
||||
# RubyForge[http://rubyforge.org/projects/fireruby/]). FireRuby works with
|
||||
# Firebird 1.5.x on Linux, OS X and Win32 platforms.
|
||||
#
|
||||
# == Usage Notes
|
||||
#
|
||||
# === Sequence (Generator) Names
|
||||
# The Firebird adapter supports the same approach adopted for the Oracle
|
||||
# adapter. See ActiveRecord::Base#set_sequence_name for more details.
|
||||
#
|
||||
# Note that in general there is no need to create a <tt>BEFORE INSERT</tt>
|
||||
# trigger corresponding to a Firebird sequence generator when using
|
||||
# ActiveRecord. In other words, you don't have to try to make Firebird
|
||||
# simulate an <tt>AUTO_INCREMENT</tt> or +IDENTITY+ column. When saving a
|
||||
# new record, ActiveRecord pre-fetches the next sequence value for the table
|
||||
# and explicitly includes it in the +INSERT+ statement. (Pre-fetching the
|
||||
# next primary key value is the only reliable method for the Firebird
|
||||
# adapter to report back the +id+ after a successful insert.)
|
||||
#
|
||||
# === BOOLEAN Domain
|
||||
# Firebird 1.5 does not provide a native +BOOLEAN+ type. But you can easily
|
||||
# define a +BOOLEAN+ _domain_ for this purpose, e.g.:
|
||||
#
|
||||
# CREATE DOMAIN D_BOOLEAN AS SMALLINT CHECK (VALUE IN (0, 1) OR VALUE IS NULL);
|
||||
#
|
||||
# When the Firebird adapter encounters a column that is based on a domain
|
||||
# that includes "BOOLEAN" in the domain name, it will attempt to treat
|
||||
# the column as a +BOOLEAN+.
|
||||
#
|
||||
# By default, the Firebird adapter will assume that the BOOLEAN domain is
|
||||
# defined as above. This can be modified if needed. For example, if you
|
||||
# have a legacy schema with the following +BOOLEAN+ domain defined:
|
||||
#
|
||||
# CREATE DOMAIN BOOLEAN AS CHAR(1) CHECK (VALUE IN ('T', 'F'));
|
||||
#
|
||||
# ...you can add the following line to your <tt>environment.rb</tt> file:
|
||||
#
|
||||
# ActiveRecord::ConnectionAdapters::FirebirdAdapter.boolean_domain = { :true => 'T', :false => 'F' }
|
||||
#
|
||||
# === BLOB Elements
|
||||
# The Firebird adapter currently provides only limited support for +BLOB+
|
||||
# columns. You cannot currently retrieve or insert a +BLOB+ as an IO stream.
|
||||
# When selecting a +BLOB+, the entire element is converted into a String.
|
||||
# When inserting or updating a +BLOB+, the entire value is included in-line
|
||||
# in the SQL statement, limiting you to values <= 32KB in size.
|
||||
#
|
||||
# === Column Name Case Semantics
|
||||
# Firebird and ActiveRecord have somewhat conflicting case semantics for
|
||||
# column names.
|
||||
#
|
||||
# [*Firebird*]
|
||||
# The standard practice is to use unquoted column names, which can be
|
||||
# thought of as case-insensitive. (In fact, Firebird converts them to
|
||||
# uppercase.) Quoted column names (not typically used) are case-sensitive.
|
||||
# [*ActiveRecord*]
|
||||
# Attribute accessors corresponding to column names are case-sensitive.
|
||||
# The defaults for primary key and inheritance columns are lowercase, and
|
||||
# in general, people use lowercase attribute names.
|
||||
#
|
||||
# In order to map between the differing semantics in a way that conforms
|
||||
# to common usage for both Firebird and ActiveRecord, uppercase column names
|
||||
# in Firebird are converted to lowercase attribute names in ActiveRecord,
|
||||
# and vice-versa. Mixed-case column names retain their case in both
|
||||
# directions. Lowercase (quoted) Firebird column names are not supported.
|
||||
# This is similar to the solutions adopted by other adapters.
|
||||
#
|
||||
# In general, the best approach is to use unqouted (case-insensitive) column
|
||||
# names in your Firebird DDL (or if you must quote, use uppercase column
|
||||
# names). These will correspond to lowercase attributes in ActiveRecord.
|
||||
#
|
||||
# For example, a Firebird table based on the following DDL:
|
||||
#
|
||||
# CREATE TABLE products (
|
||||
# id BIGINT NOT NULL PRIMARY KEY,
|
||||
# "TYPE" VARCHAR(50),
|
||||
# name VARCHAR(255) );
|
||||
#
|
||||
# ...will correspond to an ActiveRecord model class called +Product+ with
|
||||
# the following attributes: +id+, +type+, +name+.
|
||||
#
|
||||
# ==== Quoting <tt>"TYPE"</tt> and other Firebird reserved words:
|
||||
# In ActiveRecord, the default inheritance column name is +type+. The word
|
||||
# _type_ is a Firebird reserved word, so it must be quoted in any Firebird
|
||||
# SQL statements. Because of the case mapping described above, you should
|
||||
# always reference this column using quoted-uppercase syntax
|
||||
# (<tt>"TYPE"</tt>) within Firebird DDL or other SQL statements (as in the
|
||||
# example above). This holds true for any other Firebird reserved words used
|
||||
# as column names as well.
|
||||
#
|
||||
# === Migrations
|
||||
# The Firebird Adapter now supports Migrations.
|
||||
#
|
||||
# ==== Create/Drop Table and Sequence Generators
|
||||
# Creating or dropping a table will automatically create/drop a
|
||||
# correpsonding sequence generator, using the default naming convension.
|
||||
# You can specify a different name using the <tt>:sequence</tt> option; no
|
||||
# generator is created if <tt>:sequence</tt> is set to +false+.
|
||||
#
|
||||
# ==== Rename Table
|
||||
# The Firebird #rename_table Migration should be used with caution.
|
||||
# Firebird 1.5 lacks built-in support for this feature, so it is
|
||||
# implemented by making a copy of the original table (including column
|
||||
# definitions, indexes and data records), and then dropping the original
|
||||
# table. Constraints and Triggers are _not_ properly copied, so avoid
|
||||
# this method if your original table includes constraints (other than
|
||||
# the primary key) or triggers. (Consider manually copying your table
|
||||
# or using a view instead.)
|
||||
#
|
||||
# == Connection Options
|
||||
# The following options are supported by the Firebird adapter. None of the
|
||||
# options have default values.
|
||||
#
|
||||
# <tt>:database</tt>::
|
||||
# <i>Required option.</i> Specifies one of: (i) a Firebird database alias;
|
||||
# (ii) the full path of a database file; _or_ (iii) a full Firebird
|
||||
# connection string. <i>Do not specify <tt>:host</tt>, <tt>:service</tt>
|
||||
# or <tt>:port</tt> as separate options when using a full connection
|
||||
# string.</i>
|
||||
# <tt>:host</tt>::
|
||||
# Set to <tt>"remote.host.name"</tt> for remote database connections.
|
||||
# May be omitted for local connections if a full database path is
|
||||
# specified for <tt>:database</tt>. Some platforms require a value of
|
||||
# <tt>"localhost"</tt> for local connections when using a Firebird
|
||||
# database _alias_.
|
||||
# <tt>:service</tt>::
|
||||
# Specifies a service name for the connection. Only used if <tt>:host</tt>
|
||||
# is provided. Required when connecting to a non-standard service.
|
||||
# <tt>:port</tt>::
|
||||
# Specifies the connection port. Only used if <tt>:host</tt> is provided
|
||||
# and <tt>:service</tt> is not. Required when connecting to a non-standard
|
||||
# port and <tt>:service</tt> is not defined.
|
||||
# <tt>:username</tt>::
|
||||
# Specifies the database user. May be omitted or set to +nil+ (together
|
||||
# with <tt>:password</tt>) to use the underlying operating system user
|
||||
# credentials on supported platforms.
|
||||
# <tt>:password</tt>::
|
||||
# Specifies the database password. Must be provided if <tt>:username</tt>
|
||||
# is explicitly specified; should be omitted if OS user credentials are
|
||||
# are being used.
|
||||
# <tt>:charset</tt>::
|
||||
# Specifies the character set to be used by the connection. Refer to
|
||||
# Firebird documentation for valid options.
|
||||
class FirebirdAdapter < AbstractAdapter
|
||||
TEMP_COLUMN_NAME = 'AR$TEMP_COLUMN'
|
||||
|
||||
@@boolean_domain = { :name => "d_boolean", :type => "smallint", :true => 1, :false => 0 }
|
||||
cattr_accessor :boolean_domain
|
||||
|
||||
def initialize(connection, logger, connection_params = nil)
|
||||
super(connection, logger)
|
||||
@connection_params = connection_params
|
||||
end
|
||||
|
||||
def adapter_name # :nodoc:
|
||||
'Firebird'
|
||||
end
|
||||
|
||||
def supports_migrations? # :nodoc:
|
||||
true
|
||||
end
|
||||
|
||||
def native_database_types # :nodoc:
|
||||
{
|
||||
:primary_key => "BIGINT NOT NULL PRIMARY KEY",
|
||||
:string => { :name => "varchar", :limit => 255 },
|
||||
:text => { :name => "blob sub_type text" },
|
||||
:integer => { :name => "bigint" },
|
||||
:decimal => { :name => "decimal" },
|
||||
:numeric => { :name => "numeric" },
|
||||
:float => { :name => "float" },
|
||||
:datetime => { :name => "timestamp" },
|
||||
:timestamp => { :name => "timestamp" },
|
||||
:time => { :name => "time" },
|
||||
:date => { :name => "date" },
|
||||
:binary => { :name => "blob sub_type 0" },
|
||||
:boolean => boolean_domain
|
||||
}
|
||||
end
|
||||
|
||||
# Returns true for Firebird adapter (since Firebird requires primary key
|
||||
# values to be pre-fetched before insert). See also #next_sequence_value.
|
||||
def prefetch_primary_key?(table_name = nil)
|
||||
true
|
||||
end
|
||||
|
||||
def default_sequence_name(table_name, primary_key = nil) # :nodoc:
|
||||
"#{table_name}_seq"
|
||||
end
|
||||
|
||||
|
||||
# QUOTING ==================================================
|
||||
|
||||
def quote(value, column = nil) # :nodoc:
|
||||
if [Time, DateTime].include?(value.class)
|
||||
"CAST('#{value.strftime("%Y-%m-%d %H:%M:%S")}' AS TIMESTAMP)"
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def quote_string(string) # :nodoc:
|
||||
string.gsub(/'/, "''")
|
||||
end
|
||||
|
||||
def quote_column_name(column_name) # :nodoc:
|
||||
%Q("#{ar_to_fb_case(column_name.to_s)}")
|
||||
end
|
||||
|
||||
def quoted_true # :nodoc:
|
||||
quote(boolean_domain[:true])
|
||||
end
|
||||
|
||||
def quoted_false # :nodoc:
|
||||
quote(boolean_domain[:false])
|
||||
end
|
||||
|
||||
|
||||
# CONNECTION MANAGEMENT ====================================
|
||||
|
||||
def active? # :nodoc:
|
||||
not @connection.closed?
|
||||
end
|
||||
|
||||
def disconnect! # :nodoc:
|
||||
@connection.close rescue nil
|
||||
end
|
||||
|
||||
def reconnect! # :nodoc:
|
||||
disconnect!
|
||||
@connection = @connection.database.connect(*@connection_params)
|
||||
end
|
||||
|
||||
|
||||
# DATABASE STATEMENTS ======================================
|
||||
|
||||
def select_all(sql, name = nil) # :nodoc:
|
||||
select(sql, name)
|
||||
end
|
||||
|
||||
def select_one(sql, name = nil) # :nodoc:
|
||||
select(sql, name).first
|
||||
end
|
||||
|
||||
def execute(sql, name = nil, &block) # :nodoc:
|
||||
log(sql, name) do
|
||||
if @transaction
|
||||
@connection.execute(sql, @transaction, &block)
|
||||
else
|
||||
@connection.execute_immediate(sql, &block)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) # :nodoc:
|
||||
execute(sql, name)
|
||||
id_value
|
||||
end
|
||||
|
||||
alias_method :update, :execute
|
||||
alias_method :delete, :execute
|
||||
|
||||
def begin_db_transaction() # :nodoc:
|
||||
@transaction = @connection.start_transaction
|
||||
end
|
||||
|
||||
def commit_db_transaction() # :nodoc:
|
||||
@transaction.commit
|
||||
ensure
|
||||
@transaction = nil
|
||||
end
|
||||
|
||||
def rollback_db_transaction() # :nodoc:
|
||||
@transaction.rollback
|
||||
ensure
|
||||
@transaction = nil
|
||||
end
|
||||
|
||||
def add_limit_offset!(sql, options) # :nodoc:
|
||||
if options[:limit]
|
||||
limit_string = "FIRST #{options[:limit]}"
|
||||
limit_string << " SKIP #{options[:offset]}" if options[:offset]
|
||||
sql.sub!(/\A(\s*SELECT\s)/i, '\&' + limit_string + ' ')
|
||||
end
|
||||
end
|
||||
|
||||
# Returns the next sequence value from a sequence generator. Not generally
|
||||
# called directly; used by ActiveRecord to get the next primary key value
|
||||
# when inserting a new database record (see #prefetch_primary_key?).
|
||||
def next_sequence_value(sequence_name)
|
||||
FireRuby::Generator.new(sequence_name, @connection).next(1)
|
||||
end
|
||||
|
||||
|
||||
# SCHEMA STATEMENTS ========================================
|
||||
|
||||
def current_database # :nodoc:
|
||||
file = @connection.database.file.split(':').last
|
||||
File.basename(file, '.*')
|
||||
end
|
||||
|
||||
def recreate_database! # :nodoc:
|
||||
sql = "SELECT rdb$character_set_name FROM rdb$database"
|
||||
charset = execute(sql).to_a.first[0].rstrip
|
||||
disconnect!
|
||||
@connection.database.drop(*@connection_params)
|
||||
FireRuby::Database.create(@connection.database.file,
|
||||
@connection_params[0], @connection_params[1], 4096, charset)
|
||||
end
|
||||
|
||||
def tables(name = nil) # :nodoc:
|
||||
sql = "SELECT rdb$relation_name FROM rdb$relations WHERE rdb$system_flag = 0"
|
||||
execute(sql, name).collect { |row| row[0].rstrip.downcase }
|
||||
end
|
||||
|
||||
def indexes(table_name, name = nil) # :nodoc:
|
||||
index_metadata(table_name, false, name).inject([]) do |indexes, row|
|
||||
if indexes.empty? or indexes.last.name != row[0]
|
||||
indexes << IndexDefinition.new(table_name, row[0].rstrip.downcase, row[1] == 1, [])
|
||||
end
|
||||
indexes.last.columns << row[2].rstrip.downcase
|
||||
indexes
|
||||
end
|
||||
end
|
||||
|
||||
def columns(table_name, name = nil) # :nodoc:
|
||||
sql = <<-end_sql
|
||||
SELECT r.rdb$field_name, r.rdb$field_source, f.rdb$field_type, f.rdb$field_sub_type,
|
||||
f.rdb$field_length, f.rdb$field_precision, f.rdb$field_scale,
|
||||
COALESCE(r.rdb$default_source, f.rdb$default_source) rdb$default_source,
|
||||
COALESCE(r.rdb$null_flag, f.rdb$null_flag) rdb$null_flag
|
||||
FROM rdb$relation_fields r
|
||||
JOIN rdb$fields f ON r.rdb$field_source = f.rdb$field_name
|
||||
WHERE r.rdb$relation_name = '#{table_name.to_s.upcase}'
|
||||
ORDER BY r.rdb$field_position
|
||||
end_sql
|
||||
execute(sql, name).collect do |field|
|
||||
field_values = field.values.collect do |value|
|
||||
case value
|
||||
when String then value.rstrip
|
||||
when FireRuby::Blob then value.to_s
|
||||
else value
|
||||
end
|
||||
end
|
||||
FirebirdColumn.new(*field_values)
|
||||
end
|
||||
end
|
||||
|
||||
def create_table(name, options = {}) # :nodoc:
|
||||
begin
|
||||
super
|
||||
rescue StatementInvalid
|
||||
raise unless non_existent_domain_error?
|
||||
create_boolean_domain
|
||||
super
|
||||
end
|
||||
unless options[:id] == false or options[:sequence] == false
|
||||
sequence_name = options[:sequence] || default_sequence_name(name)
|
||||
create_sequence(sequence_name)
|
||||
end
|
||||
end
|
||||
|
||||
def drop_table(name, options = {}) # :nodoc:
|
||||
super(name)
|
||||
unless options[:sequence] == false
|
||||
sequence_name = options[:sequence] || default_sequence_name(name)
|
||||
drop_sequence(sequence_name) if sequence_exists?(sequence_name)
|
||||
end
|
||||
end
|
||||
|
||||
def add_column(table_name, column_name, type, options = {}) # :nodoc:
|
||||
super
|
||||
rescue StatementInvalid
|
||||
raise unless non_existent_domain_error?
|
||||
create_boolean_domain
|
||||
super
|
||||
end
|
||||
|
||||
def change_column(table_name, column_name, type, options = {}) # :nodoc:
|
||||
change_column_type(table_name, column_name, type, options)
|
||||
change_column_position(table_name, column_name, options[:position]) if options.include?(:position)
|
||||
change_column_default(table_name, column_name, options[:default]) if options_include_default?(options)
|
||||
end
|
||||
|
||||
def change_column_default(table_name, column_name, default) # :nodoc:
|
||||
table_name = table_name.to_s.upcase
|
||||
sql = <<-end_sql
|
||||
UPDATE rdb$relation_fields f1
|
||||
SET f1.rdb$default_source =
|
||||
(SELECT f2.rdb$default_source FROM rdb$relation_fields f2
|
||||
WHERE f2.rdb$relation_name = '#{table_name}'
|
||||
AND f2.rdb$field_name = '#{TEMP_COLUMN_NAME}'),
|
||||
f1.rdb$default_value =
|
||||
(SELECT f2.rdb$default_value FROM rdb$relation_fields f2
|
||||
WHERE f2.rdb$relation_name = '#{table_name}'
|
||||
AND f2.rdb$field_name = '#{TEMP_COLUMN_NAME}')
|
||||
WHERE f1.rdb$relation_name = '#{table_name}'
|
||||
AND f1.rdb$field_name = '#{ar_to_fb_case(column_name.to_s)}'
|
||||
end_sql
|
||||
transaction do
|
||||
add_column(table_name, TEMP_COLUMN_NAME, :string, :default => default)
|
||||
execute sql
|
||||
remove_column(table_name, TEMP_COLUMN_NAME)
|
||||
end
|
||||
end
|
||||
|
||||
def rename_column(table_name, column_name, new_column_name) # :nodoc:
|
||||
execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} TO #{new_column_name}"
|
||||
end
|
||||
|
||||
def remove_index(table_name, options) #:nodoc:
|
||||
execute "DROP INDEX #{quote_column_name(index_name(table_name, options))}"
|
||||
end
|
||||
|
||||
def rename_table(name, new_name) # :nodoc:
|
||||
if table_has_constraints_or_dependencies?(name)
|
||||
raise ActiveRecordError,
|
||||
"Table #{name} includes constraints or dependencies that are not supported by " <<
|
||||
"the Firebird rename_table migration. Try explicitly removing the constraints/" <<
|
||||
"dependencies first, or manually renaming the table."
|
||||
end
|
||||
|
||||
transaction do
|
||||
copy_table(name, new_name)
|
||||
copy_table_indexes(name, new_name)
|
||||
end
|
||||
begin
|
||||
copy_table_data(name, new_name)
|
||||
copy_sequence_value(name, new_name)
|
||||
rescue
|
||||
drop_table(new_name)
|
||||
raise
|
||||
end
|
||||
drop_table(name)
|
||||
end
|
||||
|
||||
def dump_schema_information # :nodoc:
|
||||
super << ";\n"
|
||||
end
|
||||
|
||||
def type_to_sql(type, limit = nil, precision = nil, scale = nil) # :nodoc:
|
||||
case type
|
||||
when :integer then integer_sql_type(limit)
|
||||
when :float then float_sql_type(limit)
|
||||
when :string then super(type, limit, precision, scale)
|
||||
else super(type, limit, precision, scale)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
def integer_sql_type(limit)
|
||||
case limit
|
||||
when (1..2) then 'smallint'
|
||||
when (3..4) then 'integer'
|
||||
else 'bigint'
|
||||
end
|
||||
end
|
||||
|
||||
def float_sql_type(limit)
|
||||
limit.to_i <= 4 ? 'float' : 'double precision'
|
||||
end
|
||||
|
||||
def select(sql, name = nil)
|
||||
execute(sql, name).collect do |row|
|
||||
hashed_row = {}
|
||||
row.each do |column, value|
|
||||
value = value.to_s if FireRuby::Blob === value
|
||||
hashed_row[fb_to_ar_case(column)] = value
|
||||
end
|
||||
hashed_row
|
||||
end
|
||||
end
|
||||
|
||||
def primary_key(table_name)
|
||||
if pk_row = index_metadata(table_name, true).to_a.first
|
||||
pk_row[2].rstrip.downcase
|
||||
end
|
||||
end
|
||||
|
||||
def index_metadata(table_name, pk, name = nil)
|
||||
sql = <<-end_sql
|
||||
SELECT i.rdb$index_name, i.rdb$unique_flag, s.rdb$field_name
|
||||
FROM rdb$indices i
|
||||
JOIN rdb$index_segments s ON i.rdb$index_name = s.rdb$index_name
|
||||
LEFT JOIN rdb$relation_constraints c ON i.rdb$index_name = c.rdb$index_name
|
||||
WHERE i.rdb$relation_name = '#{table_name.to_s.upcase}'
|
||||
end_sql
|
||||
if pk
|
||||
sql << "AND c.rdb$constraint_type = 'PRIMARY KEY'\n"
|
||||
else
|
||||
sql << "AND (c.rdb$constraint_type IS NULL OR c.rdb$constraint_type != 'PRIMARY KEY')\n"
|
||||
end
|
||||
sql << "ORDER BY i.rdb$index_name, s.rdb$field_position\n"
|
||||
execute sql, name
|
||||
end
|
||||
|
||||
def change_column_type(table_name, column_name, type, options = {})
|
||||
sql = "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} TYPE #{type_to_sql(type, options[:limit])}"
|
||||
execute sql
|
||||
rescue StatementInvalid
|
||||
raise unless non_existent_domain_error?
|
||||
create_boolean_domain
|
||||
execute sql
|
||||
end
|
||||
|
||||
def change_column_position(table_name, column_name, position)
|
||||
execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} POSITION #{position}"
|
||||
end
|
||||
|
||||
def copy_table(from, to)
|
||||
table_opts = {}
|
||||
if pk = primary_key(from)
|
||||
table_opts[:primary_key] = pk
|
||||
else
|
||||
table_opts[:id] = false
|
||||
end
|
||||
create_table(to, table_opts) do |table|
|
||||
from_columns = columns(from).reject { |col| col.name == table_opts[:primary_key] }
|
||||
from_columns.each do |column|
|
||||
col_opts = [:limit, :default, :null].inject({}) { |opts, opt| opts.merge(opt => column.send(opt)) }
|
||||
table.column column.name, column.type, col_opts
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def copy_table_indexes(from, to)
|
||||
indexes(from).each do |index|
|
||||
unless index.name[from.to_s]
|
||||
raise ActiveRecordError,
|
||||
"Cannot rename index #{index.name}, because the index name does not include " <<
|
||||
"the original table name (#{from}). Try explicitly removing the index on the " <<
|
||||
"original table and re-adding it on the new (renamed) table."
|
||||
end
|
||||
options = {}
|
||||
options[:name] = index.name.gsub(from.to_s, to.to_s)
|
||||
options[:unique] = index.unique
|
||||
add_index(to, index.columns, options)
|
||||
end
|
||||
end
|
||||
|
||||
def copy_table_data(from, to)
|
||||
execute "INSERT INTO #{to} SELECT * FROM #{from}", "Copy #{from} data to #{to}"
|
||||
end
|
||||
|
||||
def copy_sequence_value(from, to)
|
||||
sequence_value = FireRuby::Generator.new(default_sequence_name(from), @connection).last
|
||||
execute "SET GENERATOR #{default_sequence_name(to)} TO #{sequence_value}"
|
||||
end
|
||||
|
||||
def sequence_exists?(sequence_name)
|
||||
FireRuby::Generator.exists?(sequence_name, @connection)
|
||||
end
|
||||
|
||||
def create_sequence(sequence_name)
|
||||
FireRuby::Generator.create(sequence_name.to_s, @connection)
|
||||
end
|
||||
|
||||
def drop_sequence(sequence_name)
|
||||
FireRuby::Generator.new(sequence_name.to_s, @connection).drop
|
||||
end
|
||||
|
||||
def create_boolean_domain
|
||||
sql = <<-end_sql
|
||||
CREATE DOMAIN #{boolean_domain[:name]} AS #{boolean_domain[:type]}
|
||||
CHECK (VALUE IN (#{quoted_true}, #{quoted_false}) OR VALUE IS NULL)
|
||||
end_sql
|
||||
execute sql rescue nil
|
||||
end
|
||||
|
||||
def table_has_constraints_or_dependencies?(table_name)
|
||||
table_name = table_name.to_s.upcase
|
||||
sql = <<-end_sql
|
||||
SELECT 1 FROM rdb$relation_constraints
|
||||
WHERE rdb$relation_name = '#{table_name}'
|
||||
AND rdb$constraint_type IN ('UNIQUE', 'FOREIGN KEY', 'CHECK')
|
||||
UNION
|
||||
SELECT 1 FROM rdb$dependencies
|
||||
WHERE rdb$depended_on_name = '#{table_name}'
|
||||
AND rdb$depended_on_type = 0
|
||||
end_sql
|
||||
!select(sql).empty?
|
||||
end
|
||||
|
||||
def non_existent_domain_error?
|
||||
$!.message.include? FireRuby::NON_EXISTENT_DOMAIN_ERROR
|
||||
end
|
||||
|
||||
# Maps uppercase Firebird column names to lowercase for ActiveRecord;
|
||||
# mixed-case columns retain their original case.
|
||||
def fb_to_ar_case(column_name)
|
||||
column_name =~ /[[:lower:]]/ ? column_name : column_name.downcase
|
||||
end
|
||||
|
||||
# Maps lowercase ActiveRecord column names to uppercase for Fierbird;
|
||||
# mixed-case columns retain their original case.
|
||||
def ar_to_fb_case(column_name)
|
||||
column_name =~ /[[:upper:]]/ ? column_name : column_name.upcase
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,863 +0,0 @@
|
||||
# Requires FrontBase Ruby bindings (gem install ruby-frontbase)
|
||||
|
||||
require 'active_record/connection_adapters/abstract_adapter'
|
||||
|
||||
FB_TRACE = false
|
||||
|
||||
module ActiveRecord
|
||||
|
||||
class Base
|
||||
class << self
|
||||
# Establishes a connection to the database that's used by all Active Record objects.
|
||||
def frontbase_connection(config) # :nodoc:
|
||||
# FrontBase only supports one unnamed sequence per table
|
||||
define_attr_method(:set_sequence_name, :sequence_name, &Proc.new {|*args| nil})
|
||||
|
||||
config = config.symbolize_keys
|
||||
database = config[:database]
|
||||
port = config[:port]
|
||||
host = config[:host]
|
||||
username = config[:username]
|
||||
password = config[:password]
|
||||
dbpassword = config[:dbpassword]
|
||||
session_name = config[:session_name]
|
||||
|
||||
dbpassword = '' if dbpassword.nil?
|
||||
|
||||
# Turn off colorization since it makes tail/less output difficult
|
||||
self.colorize_logging = false
|
||||
|
||||
require_library_or_gem 'frontbase' unless self.class.const_defined? :FBSQL_Connect
|
||||
|
||||
# Check bindings version
|
||||
version = "0.0.0"
|
||||
version = FBSQL_Connect::FB_BINDINGS_VERSION if defined? FBSQL_Connect::FB_BINDINGS_VERSION
|
||||
|
||||
if ActiveRecord::ConnectionAdapters::FrontBaseAdapter.compare_versions(version,"1.0.0") == -1
|
||||
raise AdapterNotFound,
|
||||
'The FrontBase adapter requires ruby-frontbase version 1.0.0 or greater; you appear ' <<
|
||||
"to be running an older version (#{version}) -- please update ruby-frontbase (gem install ruby-frontbase)."
|
||||
end
|
||||
connection = FBSQL_Connect.connect(host, port, database, username, password, dbpassword, session_name)
|
||||
ConnectionAdapters::FrontBaseAdapter.new(connection, logger, [host, port, database, username, password, dbpassword, session_name], config)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
module ConnectionAdapters
|
||||
|
||||
# From EOF Documentation....
|
||||
# buffer should have space for EOUniqueBinaryKeyLength (12) bytes.
|
||||
# Assigns a world-wide unique ID made up of:
|
||||
# < Sequence [2], ProcessID [2] , Time [4], IP Addr [4] >
|
||||
|
||||
class TwelveByteKey < String #:nodoc:
|
||||
@@mutex = Mutex.new
|
||||
@@sequence_number = rand(65536)
|
||||
@@key_cached_pid_component = nil
|
||||
@@key_cached_ip_component = nil
|
||||
|
||||
def initialize(string = nil)
|
||||
# Generate a unique key
|
||||
if string.nil?
|
||||
new_key = replace('_' * 12)
|
||||
|
||||
new_key[0..1] = self.class.key_sequence_component
|
||||
new_key[2..3] = self.class.key_pid_component
|
||||
new_key[4..7] = self.class.key_time_component
|
||||
new_key[8..11] = self.class.key_ip_component
|
||||
new_key
|
||||
else
|
||||
if string.size == 24
|
||||
string.gsub!(/[[:xdigit:]]{2}/) { |x| x.hex.chr }
|
||||
end
|
||||
raise "string is not 12 bytes long" unless string.size == 12
|
||||
super(string)
|
||||
end
|
||||
end
|
||||
|
||||
def inspect
|
||||
unpack("H*").first.upcase
|
||||
end
|
||||
|
||||
alias_method :to_s, :inspect
|
||||
|
||||
private
|
||||
|
||||
class << self
|
||||
def key_sequence_component
|
||||
seq = nil
|
||||
@@mutex.synchronize do
|
||||
seq = @@sequence_number
|
||||
@@sequence_number = (@@sequence_number + 1) % 65536
|
||||
end
|
||||
|
||||
sequence_component = "__"
|
||||
sequence_component[0] = seq >> 8
|
||||
sequence_component[1] = seq
|
||||
sequence_component
|
||||
end
|
||||
|
||||
def key_pid_component
|
||||
if @@key_cached_pid_component.nil?
|
||||
@@mutex.synchronize do
|
||||
pid = $$
|
||||
pid_component = "__"
|
||||
pid_component[0] = pid >> 8
|
||||
pid_component[1] = pid
|
||||
@@key_cached_pid_component = pid_component
|
||||
end
|
||||
end
|
||||
@@key_cached_pid_component
|
||||
end
|
||||
|
||||
def key_time_component
|
||||
time = Time.new.to_i
|
||||
time_component = "____"
|
||||
time_component[0] = (time & 0xFF000000) >> 24
|
||||
time_component[1] = (time & 0x00FF0000) >> 16
|
||||
time_component[2] = (time & 0x0000FF00) >> 8
|
||||
time_component[3] = (time & 0x000000FF)
|
||||
time_component
|
||||
end
|
||||
|
||||
def key_ip_component
|
||||
if @@key_cached_ip_component.nil?
|
||||
@@mutex.synchronize do
|
||||
old_lookup_flag = BasicSocket.do_not_reverse_lookup
|
||||
BasicSocket.do_not_reverse_lookup = true
|
||||
udpsocket = UDPSocket.new
|
||||
udpsocket.connect("17.112.152.32",1)
|
||||
ip_string = udpsocket.addr[3]
|
||||
BasicSocket.do_not_reverse_lookup = old_lookup_flag
|
||||
packed = Socket.pack_sockaddr_in(0,ip_string)
|
||||
addr_subset = packed[4..7]
|
||||
ip = addr_subset[0] << 24 | addr_subset[1] << 16 | addr_subset[2] << 8 | addr_subset[3]
|
||||
ip_component = "____"
|
||||
ip_component[0] = (ip & 0xFF000000) >> 24
|
||||
ip_component[1] = (ip & 0x00FF0000) >> 16
|
||||
ip_component[2] = (ip & 0x0000FF00) >> 8
|
||||
ip_component[3] = (ip & 0x000000FF)
|
||||
@@key_cached_ip_component = ip_component
|
||||
end
|
||||
end
|
||||
@@key_cached_ip_component
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
class FrontBaseColumn < Column #:nodoc:
|
||||
attr_reader :fb_autogen
|
||||
|
||||
def initialize(base, name, type, typename, limit, precision, scale, default, nullable)
|
||||
|
||||
@base = base
|
||||
@name = name
|
||||
@type = simplified_type(type,typename,limit)
|
||||
@limit = limit
|
||||
@precision = precision
|
||||
@scale = scale
|
||||
@default = default
|
||||
@null = nullable == "YES"
|
||||
@text = [:string, :text].include? @type
|
||||
@number = [:float, :integer, :decimal].include? @type
|
||||
@fb_autogen = false
|
||||
|
||||
if @default
|
||||
@default.gsub!(/^'(.*)'$/,'\1') if @text
|
||||
@fb_autogen = @default.include?("SELECT UNIQUE FROM")
|
||||
case @type
|
||||
when :boolean
|
||||
@default = @default == "TRUE"
|
||||
when :binary
|
||||
if @default != "X''"
|
||||
buffer = ""
|
||||
@default.scan(/../) { |h| buffer << h.hex.chr }
|
||||
@default = buffer
|
||||
else
|
||||
@default = ""
|
||||
end
|
||||
else
|
||||
@default = type_cast(@default)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Casts value (which is a String) to an appropriate instance.
|
||||
def type_cast(value)
|
||||
if type == :twelvebytekey
|
||||
ActiveRecord::ConnectionAdapters::TwelveByteKey.new(value)
|
||||
else
|
||||
super(value)
|
||||
end
|
||||
end
|
||||
|
||||
def type_cast_code(var_name)
|
||||
if type == :twelvebytekey
|
||||
"ActiveRecord::ConnectionAdapters::TwelveByteKey.new(#{var_name})"
|
||||
else
|
||||
super(var_name)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
def simplified_type(field_type, type_name,limit)
|
||||
ret_type = :string
|
||||
puts "typecode: [#{field_type}] [#{type_name}]" if FB_TRACE
|
||||
|
||||
# 12 byte primary keys are a special case that Apple's EOF
|
||||
# used heavily. Optimize for this case
|
||||
if field_type == 11 && limit == 96
|
||||
ret_type = :twelvebytekey # BIT(96)
|
||||
else
|
||||
ret_type = case field_type
|
||||
when 1 then :boolean # BOOLEAN
|
||||
when 2 then :integer # INTEGER
|
||||
when 4 then :float # FLOAT
|
||||
when 10 then :string # CHARACTER VARYING
|
||||
when 11 then :bitfield # BIT
|
||||
when 13 then :date # DATE
|
||||
when 14 then :time # TIME
|
||||
when 16 then :timestamp # TIMESTAMP
|
||||
when 20 then :text # CLOB
|
||||
when 21 then :binary # BLOB
|
||||
when 22 then :integer # TINYINT
|
||||
else
|
||||
puts "ERROR: Unknown typecode: [#{field_type}] [#{type_name}]"
|
||||
end
|
||||
end
|
||||
puts "ret_type: #{ret_type.inspect}" if FB_TRACE
|
||||
ret_type
|
||||
end
|
||||
end
|
||||
|
||||
class FrontBaseAdapter < AbstractAdapter
|
||||
|
||||
class << self
|
||||
def compare_versions(v1, v2)
|
||||
v1_seg = v1.split(".")
|
||||
v2_seg = v2.split(".")
|
||||
0.upto([v1_seg.length,v2_seg.length].min) do |i|
|
||||
step = (v1_seg[i].to_i <=> v2_seg[i].to_i)
|
||||
return step unless step == 0
|
||||
end
|
||||
return v1_seg.length <=> v2_seg.length
|
||||
end
|
||||
end
|
||||
|
||||
def initialize(connection, logger, connection_options, config)
|
||||
super(connection, logger)
|
||||
@connection_options, @config = connection_options, config
|
||||
@transaction_mode = :pessimistic
|
||||
|
||||
# Start out in auto-commit mode
|
||||
self.rollback_db_transaction
|
||||
|
||||
# threaded_connections_test.rb will fail unless we set the session
|
||||
# to optimistic locking mode
|
||||
# set_pessimistic_transactions
|
||||
# execute "SET TRANSACTION ISOLATION LEVEL REPEATABLE READ, READ WRITE, LOCKING OPTIMISTIC"
|
||||
end
|
||||
|
||||
# Returns the human-readable name of the adapter. Use mixed case - one
|
||||
# can always use downcase if needed.
|
||||
def adapter_name #:nodoc:
|
||||
'FrontBase'
|
||||
end
|
||||
|
||||
# Does this adapter support migrations? Backend specific, as the
|
||||
# abstract adapter always returns +false+.
|
||||
def supports_migrations? #:nodoc:
|
||||
true
|
||||
end
|
||||
|
||||
def native_database_types #:nodoc:
|
||||
{
|
||||
:primary_key => "INTEGER DEFAULT UNIQUE PRIMARY KEY",
|
||||
:string => { :name => "VARCHAR", :limit => 255 },
|
||||
:text => { :name => "CLOB" },
|
||||
:integer => { :name => "INTEGER" },
|
||||
:float => { :name => "FLOAT" },
|
||||
:decimal => { :name => "DECIMAL" },
|
||||
:datetime => { :name => "TIMESTAMP" },
|
||||
:timestamp => { :name => "TIMESTAMP" },
|
||||
:time => { :name => "TIME" },
|
||||
:date => { :name => "DATE" },
|
||||
:binary => { :name => "BLOB" },
|
||||
:boolean => { :name => "BOOLEAN" },
|
||||
:twelvebytekey => { :name => "BYTE", :limit => 12}
|
||||
}
|
||||
end
|
||||
|
||||
|
||||
# QUOTING ==================================================
|
||||
|
||||
# Quotes the column value to help prevent
|
||||
# {SQL injection attacks}[http://en.wikipedia.org/wiki/SQL_injection].
|
||||
def quote(value, column = nil)
|
||||
return value.quoted_id if value.respond_to?(:quoted_id)
|
||||
|
||||
retvalue = "<INVALID>"
|
||||
|
||||
puts "quote(#{value.inspect}(#{value.class}),#{column.type.inspect})" if FB_TRACE
|
||||
# If a column was passed in, use column type information
|
||||
unless value.nil?
|
||||
if column
|
||||
retvalue = case column.type
|
||||
when :string
|
||||
if value.kind_of?(String)
|
||||
"'#{quote_string(value.to_s)}'" # ' (for ruby-mode)
|
||||
else
|
||||
"'#{quote_string(value.to_yaml)}'"
|
||||
end
|
||||
when :integer
|
||||
if value.kind_of?(TrueClass)
|
||||
'1'
|
||||
elsif value.kind_of?(FalseClass)
|
||||
'0'
|
||||
else
|
||||
value.to_i.to_s
|
||||
end
|
||||
when :float
|
||||
value.to_f.to_s
|
||||
when :decimal
|
||||
value.to_d.to_s("F")
|
||||
when :datetime, :timestamp
|
||||
"TIMESTAMP '#{value.strftime("%Y-%m-%d %H:%M:%S")}'"
|
||||
when :time
|
||||
"TIME '#{value.strftime("%H:%M:%S")}'"
|
||||
when :date
|
||||
"DATE '#{value.strftime("%Y-%m-%d")}'"
|
||||
when :twelvebytekey
|
||||
value = value.to_s.unpack("H*").first unless value.kind_of?(TwelveByteKey)
|
||||
"X'#{value.to_s}'"
|
||||
when :boolean
|
||||
value = quoted_true if value.kind_of?(TrueClass)
|
||||
value = quoted_false if value.kind_of?(FalseClass)
|
||||
value
|
||||
when :binary
|
||||
blob_handle = @connection.create_blob(value.to_s)
|
||||
puts "SQL -> Insert #{value.to_s.length} byte blob as #{retvalue}" if FB_TRACE
|
||||
blob_handle.handle
|
||||
when :text
|
||||
if value.kind_of?(String)
|
||||
clobdata = value.to_s # ' (for ruby-mode)
|
||||
else
|
||||
clobdata = value.to_yaml
|
||||
end
|
||||
clob_handle = @connection.create_clob(clobdata)
|
||||
puts "SQL -> Insert #{value.to_s.length} byte clob as #{retvalue}" if FB_TRACE
|
||||
clob_handle.handle
|
||||
else
|
||||
raise "*** UNKNOWN TYPE: #{column.type.inspect}"
|
||||
end # case
|
||||
# Since we don't have column type info, make a best guess based
|
||||
# on the Ruby class of the value
|
||||
else
|
||||
retvalue = case value
|
||||
when ActiveRecord::ConnectionAdapters::TwelveByteKey
|
||||
s = value.unpack("H*").first
|
||||
"X'#{s}'"
|
||||
when String
|
||||
if column && column.type == :binary
|
||||
s = value.unpack("H*").first
|
||||
"X'#{s}'"
|
||||
elsif column && [:integer, :float, :decimal].include?(column.type)
|
||||
value.to_s
|
||||
else
|
||||
"'#{quote_string(value)}'" # ' (for ruby-mode)
|
||||
end
|
||||
when NilClass
|
||||
"NULL"
|
||||
when TrueClass
|
||||
(column && column.type == :integer ? '1' : quoted_true)
|
||||
when FalseClass
|
||||
(column && column.type == :integer ? '0' : quoted_false)
|
||||
when Float, Fixnum, Bignum, BigDecimal
|
||||
value.to_s
|
||||
else
|
||||
if value.acts_like?(:time) || value.acts_like?(:date)
|
||||
if column
|
||||
case column.type
|
||||
when :date
|
||||
"DATE '#{value.strftime("%Y-%m-%d")}'"
|
||||
when :time
|
||||
"TIME '#{value.strftime("%H:%M:%S")}'"
|
||||
when :timestamp
|
||||
"TIMESTAMP '#{value.strftime("%Y-%m-%d %H:%M:%S")}'"
|
||||
else
|
||||
raise NotImplementedError, "Unknown column type!"
|
||||
end # case
|
||||
else # Column wasn't passed in, so try to guess the right type
|
||||
if value.acts_like?(:date)
|
||||
"DATE '#{value.strftime("%Y-%m-%d")}'"
|
||||
else
|
||||
if [:hour, :min, :sec].all? {|part| value.send(:part).zero? }
|
||||
"TIME '#{value.strftime("%H:%M:%S")}'"
|
||||
else
|
||||
"TIMESTAMP '#{quoted_date(value)}'"
|
||||
end
|
||||
end
|
||||
end #if column
|
||||
else
|
||||
"'#{quote_string(value.to_yaml)}'"
|
||||
end
|
||||
end #case
|
||||
end
|
||||
else
|
||||
retvalue = "NULL"
|
||||
end
|
||||
|
||||
retvalue
|
||||
end # def
|
||||
|
||||
# Quotes a string, escaping any ' (single quote) characters.
|
||||
def quote_string(s)
|
||||
s.gsub(/'/, "''") # ' (for ruby-mode)
|
||||
end
|
||||
|
||||
def quote_column_name(name) #:nodoc:
|
||||
%( "#{name}" )
|
||||
end
|
||||
|
||||
def quoted_true
|
||||
"true"
|
||||
end
|
||||
|
||||
def quoted_false
|
||||
"false"
|
||||
end
|
||||
|
||||
|
||||
# CONNECTION MANAGEMENT ====================================
|
||||
|
||||
def active?
|
||||
true if @connection.status == 1
|
||||
rescue => e
|
||||
false
|
||||
end
|
||||
|
||||
def reconnect!
|
||||
@connection.close rescue nil
|
||||
@connection = FBSQL_Connect.connect(*@connection_options.first(7))
|
||||
end
|
||||
|
||||
# Close this connection
|
||||
def disconnect!
|
||||
@connection.close rescue nil
|
||||
@active = false
|
||||
end
|
||||
|
||||
# DATABASE STATEMENTS ======================================
|
||||
|
||||
# Returns an array of record hashes with the column names as keys and
|
||||
# column values as values.
|
||||
def select_all(sql, name = nil) #:nodoc:
|
||||
fbsql = cleanup_fb_sql(sql)
|
||||
return_value = []
|
||||
fbresult = execute(sql, name)
|
||||
puts "select_all SQL -> #{fbsql}" if FB_TRACE
|
||||
columns = fbresult.columns
|
||||
|
||||
fbresult.each do |row|
|
||||
puts "SQL <- #{row.inspect}" if FB_TRACE
|
||||
hashed_row = {}
|
||||
colnum = 0
|
||||
row.each do |col|
|
||||
hashed_row[columns[colnum]] = col
|
||||
if col.kind_of?(FBSQL_LOB)
|
||||
hashed_row[columns[colnum]] = col.read
|
||||
end
|
||||
colnum += 1
|
||||
end
|
||||
puts "raw row: #{hashed_row.inspect}" if FB_TRACE
|
||||
return_value << hashed_row
|
||||
end
|
||||
return_value
|
||||
end
|
||||
|
||||
def select_one(sql, name = nil) #:nodoc:
|
||||
fbsql = cleanup_fb_sql(sql)
|
||||
return_value = []
|
||||
fbresult = execute(fbsql, name)
|
||||
puts "SQL -> #{fbsql}" if FB_TRACE
|
||||
columns = fbresult.columns
|
||||
|
||||
fbresult.each do |row|
|
||||
puts "SQL <- #{row.inspect}" if FB_TRACE
|
||||
hashed_row = {}
|
||||
colnum = 0
|
||||
row.each do |col|
|
||||
hashed_row[columns[colnum]] = col
|
||||
if col.kind_of?(FBSQL_LOB)
|
||||
hashed_row[columns[colnum]] = col.read
|
||||
end
|
||||
colnum += 1
|
||||
end
|
||||
return_value << hashed_row
|
||||
break
|
||||
end
|
||||
fbresult.clear
|
||||
return_value.first
|
||||
end
|
||||
|
||||
def query(sql, name = nil) #:nodoc:
|
||||
fbsql = cleanup_fb_sql(sql)
|
||||
puts "SQL(query) -> #{fbsql}" if FB_TRACE
|
||||
log(fbsql, name) { @connection.query(fbsql) }
|
||||
rescue => e
|
||||
puts "FB Exception: #{e.inspect}" if FB_TRACE
|
||||
raise e
|
||||
end
|
||||
|
||||
def execute(sql, name = nil) #:nodoc:
|
||||
fbsql = cleanup_fb_sql(sql)
|
||||
puts "SQL(execute) -> #{fbsql}" if FB_TRACE
|
||||
log(fbsql, name) { @connection.query(fbsql) }
|
||||
rescue ActiveRecord::StatementInvalid => e
|
||||
if e.message.scan(/Table name - \w* - exists/).empty?
|
||||
puts "FB Exception: #{e.inspect}" if FB_TRACE
|
||||
raise e
|
||||
end
|
||||
end
|
||||
|
||||
# Returns the last auto-generated ID from the affected table.
|
||||
def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
|
||||
puts "SQL -> #{sql.inspect}" if FB_TRACE
|
||||
execute(sql, name)
|
||||
id_value || pk
|
||||
end
|
||||
|
||||
# Executes the update statement and returns the number of rows affected.
|
||||
def update(sql, name = nil) #:nodoc:
|
||||
puts "SQL -> #{sql.inspect}" if FB_TRACE
|
||||
execute(sql, name).num_rows
|
||||
end
|
||||
|
||||
alias_method :delete, :update #:nodoc:
|
||||
|
||||
def set_pessimistic_transactions
|
||||
if @transaction_mode == :optimistic
|
||||
execute "SET TRANSACTION ISOLATION LEVEL SERIALIZABLE, LOCKING PESSIMISTIC, READ WRITE"
|
||||
@transaction_mode = :pessimistic
|
||||
end
|
||||
end
|
||||
|
||||
def set_optimistic_transactions
|
||||
if @transaction_mode == :pessimistic
|
||||
execute "SET TRANSACTION ISOLATION LEVEL REPEATABLE READ, READ WRITE, LOCKING OPTIMISTIC"
|
||||
@transaction_mode = :optimistic
|
||||
end
|
||||
end
|
||||
|
||||
def begin_db_transaction #:nodoc:
|
||||
execute "SET COMMIT FALSE" rescue nil
|
||||
end
|
||||
|
||||
def commit_db_transaction #:nodoc:
|
||||
execute "COMMIT"
|
||||
ensure
|
||||
execute "SET COMMIT TRUE"
|
||||
end
|
||||
|
||||
def rollback_db_transaction #:nodoc:
|
||||
execute "ROLLBACK"
|
||||
ensure
|
||||
execute "SET COMMIT TRUE"
|
||||
end
|
||||
|
||||
def add_limit_offset!(sql, options) #:nodoc:
|
||||
if limit = options[:limit]
|
||||
offset = options[:offset] || 0
|
||||
|
||||
# Here is the full syntax FrontBase supports:
|
||||
# (from gclem@frontbase.com)
|
||||
#
|
||||
# TOP <limit - unsigned integer>
|
||||
# TOP ( <offset expr>, <limit expr>)
|
||||
|
||||
# "TOP 0" is not allowed, so we have
|
||||
# to use a cheap trick.
|
||||
if limit.zero?
|
||||
case sql
|
||||
when /WHERE/i
|
||||
sql.sub!(/WHERE/i, 'WHERE 0 = 1 AND ')
|
||||
when /ORDER\s+BY/i
|
||||
sql.sub!(/ORDER\s+BY/i, 'WHERE 0 = 1 ORDER BY')
|
||||
else
|
||||
sql << 'WHERE 0 = 1'
|
||||
end
|
||||
else
|
||||
if offset.zero?
|
||||
sql.replace sql.gsub("SELECT ","SELECT TOP #{limit} ")
|
||||
else
|
||||
sql.replace sql.gsub("SELECT ","SELECT TOP(#{offset},#{limit}) ")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def prefetch_primary_key?(table_name = nil)
|
||||
true
|
||||
end
|
||||
|
||||
# Returns the next sequence value from a sequence generator. Not generally
|
||||
# called directly; used by ActiveRecord to get the next primary key value
|
||||
# when inserting a new database record (see #prefetch_primary_key?).
|
||||
def next_sequence_value(sequence_name)
|
||||
unique = select_value("SELECT UNIQUE FROM #{sequence_name}","Next Sequence Value")
|
||||
# The test cases cannot handle a zero primary key
|
||||
unique.zero? ? select_value("SELECT UNIQUE FROM #{sequence_name}","Next Sequence Value") : unique
|
||||
end
|
||||
|
||||
def default_sequence_name(table, column)
|
||||
table
|
||||
end
|
||||
|
||||
# Set the sequence to the max value of the table's column.
|
||||
def reset_sequence!(table, column, sequence = nil)
|
||||
klasses = classes_for_table_name(table)
|
||||
klass = klasses.nil? ? nil : klasses.first
|
||||
pk = klass.primary_key unless klass.nil?
|
||||
if pk && klass.columns_hash[pk].type == :integer
|
||||
execute("SET UNIQUE FOR #{klass.table_name}(#{pk})")
|
||||
end
|
||||
end
|
||||
|
||||
def classes_for_table_name(table)
|
||||
ActiveRecord::Base.send(:subclasses).select {|klass| klass.table_name == table}
|
||||
end
|
||||
|
||||
def reset_pk_sequence!(table, pk = nil, sequence = nil)
|
||||
klasses = classes_for_table_name(table)
|
||||
klass = klasses.nil? ? nil : klasses.first
|
||||
pk = klass.primary_key unless klass.nil?
|
||||
if pk && klass.columns_hash[pk].type == :integer
|
||||
mpk = select_value("SELECT MAX(#{pk}) FROM #{table}")
|
||||
execute("SET UNIQUE FOR #{klass.table_name}(#{pk})")
|
||||
end
|
||||
end
|
||||
|
||||
# SCHEMA STATEMENTS ========================================
|
||||
|
||||
def structure_dump #:nodoc:
|
||||
select_all("SHOW TABLES").inject('') do |structure, table|
|
||||
structure << select_one("SHOW CREATE TABLE #{table.to_a.first.last}")["Create Table"] << ";\n\n"
|
||||
end
|
||||
end
|
||||
|
||||
def recreate_database(name) #:nodoc:
|
||||
drop_database(name)
|
||||
create_database(name)
|
||||
end
|
||||
|
||||
def create_database(name) #:nodoc:
|
||||
execute "CREATE DATABASE #{name}"
|
||||
end
|
||||
|
||||
def drop_database(name) #:nodoc:
|
||||
execute "DROP DATABASE #{name}"
|
||||
end
|
||||
|
||||
def current_database
|
||||
select_value('SELECT "CATALOG_NAME" FROM INFORMATION_SCHEMA.CATALOGS').downcase
|
||||
end
|
||||
|
||||
def tables(name = nil) #:nodoc:
|
||||
select_values(<<-SQL, nil)
|
||||
SELECT "TABLE_NAME"
|
||||
FROM INFORMATION_SCHEMA.TABLES AS T0,
|
||||
INFORMATION_SCHEMA.SCHEMATA AS T1
|
||||
WHERE T0.SCHEMA_PK = T1.SCHEMA_PK
|
||||
AND "SCHEMA_NAME" = CURRENT_SCHEMA
|
||||
SQL
|
||||
end
|
||||
|
||||
def indexes(table_name, name = nil)#:nodoc:
|
||||
indexes = []
|
||||
current_index = nil
|
||||
sql = <<-SQL
|
||||
SELECT INDEX_NAME, T2.ORDINAL_POSITION, INDEX_COLUMN_COUNT, INDEX_TYPE,
|
||||
"COLUMN_NAME", IS_NULLABLE
|
||||
FROM INFORMATION_SCHEMA.TABLES AS T0,
|
||||
INFORMATION_SCHEMA.INDEXES AS T1,
|
||||
INFORMATION_SCHEMA.INDEX_COLUMN_USAGE AS T2,
|
||||
INFORMATION_SCHEMA.COLUMNS AS T3
|
||||
WHERE T0."TABLE_NAME" = '#{table_name}'
|
||||
AND INDEX_TYPE <> 0
|
||||
AND T0.TABLE_PK = T1.TABLE_PK
|
||||
AND T0.TABLE_PK = T2.TABLE_PK
|
||||
AND T0.TABLE_PK = T3.TABLE_PK
|
||||
AND T1.INDEXES_PK = T2.INDEX_PK
|
||||
AND T2.COLUMN_PK = T3.COLUMN_PK
|
||||
ORDER BY INDEX_NAME, T2.ORDINAL_POSITION
|
||||
SQL
|
||||
|
||||
columns = []
|
||||
query(sql).each do |row|
|
||||
index_name = row[0]
|
||||
ord_position = row[1]
|
||||
ndx_colcount = row[2]
|
||||
index_type = row[3]
|
||||
column_name = row[4]
|
||||
|
||||
is_unique = index_type == 1
|
||||
|
||||
columns << column_name
|
||||
if ord_position == ndx_colcount
|
||||
indexes << IndexDefinition.new(table_name, index_name, is_unique , columns)
|
||||
columns = []
|
||||
end
|
||||
end
|
||||
indexes
|
||||
end
|
||||
|
||||
def columns(table_name, name = nil)#:nodoc:
|
||||
sql = <<-SQL
|
||||
SELECT "TABLE_NAME", "COLUMN_NAME", ORDINAL_POSITION, IS_NULLABLE, COLUMN_DEFAULT,
|
||||
DATA_TYPE, DATA_TYPE_CODE, CHARACTER_MAXIMUM_LENGTH, NUMERIC_PRECISION,
|
||||
NUMERIC_PRECISION_RADIX, NUMERIC_SCALE, DATETIME_PRECISION, DATETIME_PRECISION_LEADING
|
||||
FROM INFORMATION_SCHEMA.TABLES T0,
|
||||
INFORMATION_SCHEMA.COLUMNS T1,
|
||||
INFORMATION_SCHEMA.DATA_TYPE_DESCRIPTOR T3
|
||||
WHERE "TABLE_NAME" = '#{table_name}'
|
||||
AND T0.TABLE_PK = T1.TABLE_PK
|
||||
AND T0.TABLE_PK = T3.TABLE_OR_DOMAIN_PK
|
||||
AND T1.COLUMN_PK = T3.COLUMN_NAME_PK
|
||||
ORDER BY T1.ORDINAL_POSITION
|
||||
SQL
|
||||
|
||||
rawresults = query(sql,name)
|
||||
columns = []
|
||||
rawresults.each do |field|
|
||||
args = [base = field[0],
|
||||
name = field[1],
|
||||
typecode = field[6],
|
||||
typestring = field[5],
|
||||
limit = field[7],
|
||||
precision = field[8],
|
||||
scale = field[9],
|
||||
default = field[4],
|
||||
nullable = field[3]]
|
||||
columns << FrontBaseColumn.new(*args)
|
||||
end
|
||||
columns
|
||||
end
|
||||
|
||||
def create_table(name, options = {})
|
||||
table_definition = TableDefinition.new(self)
|
||||
table_definition.primary_key(options[:primary_key] || "id") unless options[:id] == false
|
||||
|
||||
yield table_definition
|
||||
|
||||
if options[:force]
|
||||
drop_table(name) rescue nil
|
||||
end
|
||||
|
||||
create_sql = "CREATE#{' TEMPORARY' if options[:temporary]} TABLE "
|
||||
create_sql << "#{name} ("
|
||||
create_sql << table_definition.to_sql
|
||||
create_sql << ") #{options[:options]}"
|
||||
begin_db_transaction
|
||||
execute create_sql
|
||||
commit_db_transaction
|
||||
rescue ActiveRecord::StatementInvalid => e
|
||||
raise e unless e.message.match(/Table name - \w* - exists/)
|
||||
end
|
||||
|
||||
def rename_table(name, new_name)
|
||||
columns = columns(name)
|
||||
pkcol = columns.find {|c| c.fb_autogen}
|
||||
execute "ALTER TABLE NAME #{name} TO #{new_name}"
|
||||
if pkcol
|
||||
change_column_default(new_name,pkcol.name,"UNIQUE")
|
||||
begin_db_transaction
|
||||
mpk = select_value("SELECT MAX(#{pkcol.name}) FROM #{new_name}")
|
||||
mpk = 0 if mpk.nil?
|
||||
execute "SET UNIQUE=#{mpk} FOR #{new_name}"
|
||||
commit_db_transaction
|
||||
end
|
||||
end
|
||||
|
||||
# Drops a table from the database.
|
||||
def drop_table(name, options = {})
|
||||
execute "DROP TABLE #{name} RESTRICT"
|
||||
rescue ActiveRecord::StatementInvalid => e
|
||||
raise e unless e.message.match(/Referenced TABLE - \w* - does not exist/)
|
||||
end
|
||||
|
||||
# Adds a new column to the named table.
|
||||
# See TableDefinition#column for details of the options you can use.
|
||||
def add_column(table_name, column_name, type, options = {})
|
||||
add_column_sql = "ALTER TABLE #{table_name} ADD #{column_name} #{type_to_sql(type, options[:limit])}"
|
||||
options[:type] = type
|
||||
add_column_options!(add_column_sql, options)
|
||||
execute(add_column_sql)
|
||||
end
|
||||
|
||||
def add_column_options!(sql, options) #:nodoc:
|
||||
default_value = quote(options[:default], options[:column])
|
||||
if options_include_default?(options)
|
||||
if options[:type] == :boolean
|
||||
default_value = options[:default] == 0 ? quoted_false : quoted_true
|
||||
end
|
||||
sql << " DEFAULT #{default_value}"
|
||||
end
|
||||
sql << " NOT NULL" if options[:null] == false
|
||||
end
|
||||
|
||||
# Removes the column from the table definition.
|
||||
# ===== Examples
|
||||
# remove_column(:suppliers, :qualification)
|
||||
def remove_column(table_name, column_name)
|
||||
execute "ALTER TABLE #{table_name} DROP #{column_name} RESTRICT"
|
||||
end
|
||||
|
||||
def remove_index(table_name, options = {}) #:nodoc:
|
||||
if options[:unique]
|
||||
execute "ALTER TABLE #{table_name} DROP CONSTRAINT #{quote_column_name(index_name(table_name, options))} RESTRICT"
|
||||
else
|
||||
execute "DROP INDEX #{quote_column_name(index_name(table_name, options))}"
|
||||
end
|
||||
end
|
||||
|
||||
def change_column_default(table_name, column_name, default) #:nodoc:
|
||||
execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} SET DEFAULT #{default}" if default != "NULL"
|
||||
end
|
||||
|
||||
def change_column(table_name, column_name, type, options = {}) #:nodoc:
|
||||
change_column_sql = %( ALTER COLUMN "#{table_name}"."#{column_name}" TO #{type_to_sql(type, options[:limit])} )
|
||||
execute(change_column_sql)
|
||||
change_column_sql = %( ALTER TABLE "#{table_name}" ALTER COLUMN "#{column_name}" )
|
||||
|
||||
if options_include_default?(options)
|
||||
default_value = quote(options[:default], options[:column])
|
||||
if type == :boolean
|
||||
default_value = options[:default] == 0 ? quoted_false : quoted_true
|
||||
end
|
||||
change_column_sql << " SET DEFAULT #{default_value}"
|
||||
end
|
||||
|
||||
execute(change_column_sql)
|
||||
|
||||
# change_column_sql = "ALTER TABLE #{table_name} CHANGE #{column_name} #{column_name} #{type_to_sql(type, options[:limit])}"
|
||||
# add_column_options!(change_column_sql, options)
|
||||
# execute(change_column_sql)
|
||||
end
|
||||
|
||||
def rename_column(table_name, column_name, new_column_name) #:nodoc:
|
||||
execute %( ALTER COLUMN NAME "#{table_name}"."#{column_name}" TO "#{new_column_name}" )
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Clean up sql to make it something FrontBase can digest
|
||||
def cleanup_fb_sql(sql) #:nodoc:
|
||||
# Turn non-standard != into standard <>
|
||||
cleansql = sql.gsub("!=", "<>")
|
||||
# Strip blank lines and comments
|
||||
cleansql.split("\n").reject { |line| line.match(/^(?:\s*|--.*)$/) } * "\n"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,459 +0,0 @@
|
||||
require 'active_record/connection_adapters/abstract_adapter'
|
||||
|
||||
module ActiveRecord
|
||||
class Base
|
||||
# Establishes a connection to the database that's used by all Active Record objects
|
||||
def self.openbase_connection(config) # :nodoc:
|
||||
require_library_or_gem 'openbase' unless self.class.const_defined?(:OpenBase)
|
||||
|
||||
config = config.symbolize_keys
|
||||
host = config[:host]
|
||||
username = config[:username].to_s
|
||||
password = config[:password].to_s
|
||||
|
||||
if config.has_key?(:database)
|
||||
database = config[:database]
|
||||
else
|
||||
raise ArgumentError, "No database specified. Missing argument: database."
|
||||
end
|
||||
|
||||
oba = ConnectionAdapters::OpenBaseAdapter.new(
|
||||
OpenBase.new(database, host, username, password), logger
|
||||
)
|
||||
|
||||
if oba.raw_connection.connected?
|
||||
unless oba.tables.include?(ConnectionAdapters::OpenBaseAdapter::COLUMN_SUPPORT_TABLE)
|
||||
oba.execute(<<-SQL,"Creating OpenBase Column Support Table")
|
||||
CREATE TABLE #{ConnectionAdapters::OpenBaseAdapter::COLUMN_SUPPORT_TABLE} (name char, type char, precision int, scale int)
|
||||
SQL
|
||||
end
|
||||
oba.select_all("SELECT * FROM #{ConnectionAdapters::OpenBaseAdapter::COLUMN_SUPPORT_TABLE}").each do |col|
|
||||
ConnectionAdapters::OpenBaseAdapter::DECIMAL_COLUMNS.store(col["name"],[col["precision"],col["scale"]])
|
||||
end
|
||||
end
|
||||
|
||||
oba
|
||||
end
|
||||
end
|
||||
|
||||
module ConnectionAdapters
|
||||
class OpenBaseColumn < Column #:nodoc:
|
||||
private
|
||||
def simplified_type(field_type)
|
||||
return :integer if field_type.downcase =~ /long/
|
||||
return :decimal if field_type.downcase == "money"
|
||||
return :binary if field_type.downcase == "object"
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
# The OpenBase adapter works with the Ruby/Openbase driver by Derrick Spell,
|
||||
# provided with the distribution of OpenBase 10.0.6 and later
|
||||
# http://www.openbase.com
|
||||
#
|
||||
# Options:
|
||||
#
|
||||
# * <tt>:host</tt> -- Defaults to localhost
|
||||
# * <tt>:username</tt> -- Defaults to nothing
|
||||
# * <tt>:password</tt> -- Defaults to nothing
|
||||
# * <tt>:database</tt> -- The name of the database. No default, must be provided.
|
||||
#
|
||||
# The OpenBase adapter will make use of OpenBase's ability to generate unique ids
|
||||
# for any column with an unique index applied. Thus, if the value of a primary
|
||||
# key is not specified at the time an INSERT is performed, the adapter will prefetch
|
||||
# a unique id for the primary key. This prefetching is also necessary in order
|
||||
# to return the id after an insert.
|
||||
#
|
||||
#
|
||||
# Maintainer: derrick.spell@gmail.com
|
||||
class OpenBaseAdapter < AbstractAdapter
|
||||
DECIMAL_COLUMNS = {}
|
||||
COLUMN_SUPPORT_TABLE = "rails_openbase_column_support"
|
||||
def adapter_name
|
||||
'OpenBase'
|
||||
end
|
||||
|
||||
def native_database_types
|
||||
{
|
||||
:primary_key => "integer NOT NULL UNIQUE INDEX DEFAULT _rowid",
|
||||
:string => { :name => "char", :limit => 4096 },
|
||||
:text => { :name => "text" },
|
||||
:integer => { :name => "integer" },
|
||||
:float => { :name => "float" },
|
||||
:decimal => { :name => "decimal" },
|
||||
:datetime => { :name => "datetime" },
|
||||
:timestamp => { :name => "timestamp" },
|
||||
:time => { :name => "time" },
|
||||
:date => { :name => "date" },
|
||||
:binary => { :name => "object" },
|
||||
:boolean => { :name => "boolean" }
|
||||
}
|
||||
end
|
||||
|
||||
def supports_migrations?
|
||||
true
|
||||
end
|
||||
|
||||
def prefetch_primary_key?(table_name = nil)
|
||||
true
|
||||
end
|
||||
|
||||
def default_sequence_name(table_name, primary_key) # :nodoc:
|
||||
"#{table_name} #{primary_key}"
|
||||
end
|
||||
|
||||
def next_sequence_value(sequence_name)
|
||||
ary = sequence_name.split(' ')
|
||||
if (!ary[1]) then
|
||||
ary[0] =~ /(\w+)_nonstd_seq/
|
||||
ary[0] = $1
|
||||
end
|
||||
@connection.unique_row_id(ary[0], ary[1])
|
||||
end
|
||||
|
||||
|
||||
# QUOTING ==================================================
|
||||
|
||||
def quote(value, column = nil)
|
||||
if value.kind_of?(String) && column && column.type == :binary
|
||||
"'#{@connection.insert_binary(value)}'"
|
||||
elsif value.kind_of?(BigDecimal)
|
||||
return "'#{value.to_s}'"
|
||||
elsif column && column.type == :integer && column.sql_type =~ /decimal/
|
||||
return "'#{value.to_s}'"
|
||||
elsif [Float,Fixnum,Bignum].include?(value.class) && column && column.type == :string
|
||||
return "'#{value.to_s}'"
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def quoted_true
|
||||
"1"
|
||||
end
|
||||
|
||||
def quoted_false
|
||||
"0"
|
||||
end
|
||||
|
||||
|
||||
|
||||
# DATABASE STATEMENTS ======================================
|
||||
|
||||
def add_limit_offset!(sql, options) #:nodoc:
|
||||
return if options[:limit].nil?
|
||||
limit = options[:limit]
|
||||
offset = options[:offset]
|
||||
if limit == 0
|
||||
# Mess with the where clause to ensure we get no results
|
||||
if sql =~ /WHERE/i
|
||||
sql.sub!(/WHERE/i, 'WHERE 1 = 2 AND ')
|
||||
elsif sql =~ /ORDER\s+BY/i
|
||||
sql.sub!(/ORDER\s+BY/i, 'WHERE 1 = 2 ORDER BY')
|
||||
else
|
||||
sql << 'WHERE 1 = 2'
|
||||
end
|
||||
elsif offset.nil?
|
||||
sql << " RETURN RESULTS #{limit}"
|
||||
else
|
||||
sql << " RETURN RESULTS #{offset} TO #{limit + offset}"
|
||||
end
|
||||
end
|
||||
|
||||
def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
|
||||
execute(sql, name)
|
||||
update_nulls_after_insert(sql, name, pk, id_value, sequence_name)
|
||||
id_value
|
||||
end
|
||||
|
||||
def execute(sql, name = nil) #:nodoc:
|
||||
log(sql, name) { @connection.execute(sql) }
|
||||
end
|
||||
|
||||
def direct_execute(sql, name = nil) #:nodoc:
|
||||
log(sql, name) { @connection.execute(sql) }
|
||||
end
|
||||
|
||||
def update(sql, name = nil) #:nodoc:
|
||||
execute(sql, name).rows_affected
|
||||
end
|
||||
|
||||
alias_method :delete, :update #:nodoc:
|
||||
|
||||
def begin_db_transaction #:nodoc:
|
||||
execute "START TRANSACTION"
|
||||
rescue Exception
|
||||
# Transactions aren't supported
|
||||
end
|
||||
|
||||
def commit_db_transaction #:nodoc:
|
||||
execute "COMMIT"
|
||||
rescue Exception
|
||||
# Transactions aren't supported
|
||||
end
|
||||
|
||||
def rollback_db_transaction #:nodoc:
|
||||
execute "ROLLBACK"
|
||||
rescue Exception
|
||||
# Transactions aren't supported
|
||||
end
|
||||
|
||||
|
||||
# SCHEMA STATEMENTS ========================================
|
||||
# Return the list of all tables in the schema search path.
|
||||
def tables(name = nil) #:nodoc:
|
||||
tables = @connection.tables
|
||||
tables.reject! { |t| /\A_SYS_/ === t }
|
||||
end
|
||||
|
||||
def columns(table_name, name = nil) #:nodoc:
|
||||
sql = "SELECT * FROM _sys_tables "
|
||||
sql << "WHERE tablename='#{table_name}' AND INDEXOF(fieldname,'_')<>0 "
|
||||
sql << "ORDER BY columnNumber"
|
||||
columns = []
|
||||
direct_execute(sql, name).each_hash do |row|
|
||||
columns << OpenBaseColumn.new(row["fieldname"],
|
||||
default_value(row["defaultvalue"],row["typename"]),
|
||||
sql_type_name(table_name,row["fieldname"],row["typename"],row["length"]),
|
||||
row["notnull"] == 1 ? false : true)
|
||||
end
|
||||
columns
|
||||
end
|
||||
|
||||
def column_names(table_name) #:nodoc:
|
||||
sql = "SELECT fieldname FROM _sys_tables "
|
||||
sql << "WHERE tablename='#{table_name}' AND INDEXOF(fieldname,'_')<>0 "
|
||||
sql << "ORDER BY columnNumber"
|
||||
names = direct_execute(sql).fetch_all
|
||||
names.flatten! || names
|
||||
end
|
||||
|
||||
def indexes(table_name, name = nil)#:nodoc:
|
||||
sql = "SELECT fieldname, notnull, searchindex, uniqueindex, clusteredindex FROM _sys_tables "
|
||||
sql << "WHERE tablename='#{table_name}' AND INDEXOF(fieldname,'_')<>0 "
|
||||
sql << "AND primarykey=0 "
|
||||
sql << "AND (searchindex=1 OR uniqueindex=1 OR clusteredindex=1) "
|
||||
sql << "ORDER BY columnNumber"
|
||||
indexes = []
|
||||
execute(sql, name).each do |row|
|
||||
indexes << IndexDefinition.new(table_name,ob_index_name(row),row[3]==1,[row[0]])
|
||||
end
|
||||
indexes
|
||||
end
|
||||
|
||||
def create_table(name, options = {}) #:nodoc:
|
||||
return_value = super
|
||||
|
||||
# Get my own copy of TableDefinition so that i can detect decimal columns
|
||||
table_definition = TableDefinition.new(self)
|
||||
yield table_definition
|
||||
|
||||
table_definition.columns.each do |col|
|
||||
if col.type == :decimal
|
||||
record_decimal(name, col.name, col.precision, col.scale)
|
||||
end
|
||||
end
|
||||
|
||||
unless options[:id] == false
|
||||
primary_key = (options[:primary_key] || "id")
|
||||
direct_execute("CREATE PRIMARY KEY #{name} (#{primary_key})")
|
||||
end
|
||||
return_value
|
||||
end
|
||||
|
||||
def rename_table(name, new_name)
|
||||
execute "RENAME #{name} #{new_name}"
|
||||
end
|
||||
|
||||
def add_column(table_name, column_name, type, options = {})
|
||||
return_value = super(table_name, "COLUMN " + column_name.to_s, type, options)
|
||||
if type == :decimal
|
||||
record_decimal(table_name, column_name, options[:precision], options[:scale])
|
||||
end
|
||||
end
|
||||
|
||||
def remove_column(table_name, column_name)
|
||||
execute "ALTER TABLE #{table_name} REMOVE COLUMN #{quote_column_name(column_name)}"
|
||||
end
|
||||
|
||||
def rename_column(table_name, column_name, new_column_name)
|
||||
execute "ALTER TABLE #{table_name} RENAME #{quote_column_name(column_name)} TO #{quote_column_name(new_column_name)}"
|
||||
end
|
||||
|
||||
def add_column_options!(sql, options) #:nodoc:
|
||||
sql << " NOT NULL" if options[:null] == false
|
||||
sql << " DEFAULT #{quote(options[:default], options[:column])}" if options_include_default?(options)
|
||||
end
|
||||
|
||||
def change_column(table_name, column_name, type, options = {}) #:nodoc:
|
||||
unless options_include_default?(options)
|
||||
options[:default] = select_one("SELECT * FROM _sys_tables WHERE tablename='#{table_name}' AND fieldname='#{column_name}'")["defaultvalue"]
|
||||
end
|
||||
|
||||
change_column_sql = "ALTER TABLE #{table_name} ADD COLUMN #{column_name} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
|
||||
add_column_options!(change_column_sql, options)
|
||||
execute(change_column_sql)
|
||||
end
|
||||
|
||||
def change_column_default(table_name, column_name, default)
|
||||
execute "ALTER TABLE #{table_name} COLUMN #{column_name} SET DEFAULT #{quote(default)}"
|
||||
end
|
||||
|
||||
def add_index(table_name, column_name, options = {})
|
||||
if Hash === options # legacy support, since this param was a string
|
||||
index_type = options[:unique] ? "UNIQUE" : ""
|
||||
else
|
||||
index_type = options
|
||||
end
|
||||
execute "CREATE #{index_type} INDEX #{table_name} #{column_name}"
|
||||
end
|
||||
|
||||
def remove_index(table_name, options = {})
|
||||
execute "DROP INDEX #{table_name} #{options === Hash ? options[:column] : options}"
|
||||
end
|
||||
|
||||
def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
|
||||
return super unless type.to_s == 'decimal'
|
||||
|
||||
if (scale.to_i == 2)
|
||||
return 'money'
|
||||
elsif (scale.to_i == 0)
|
||||
return 'longlong'
|
||||
else
|
||||
return "char(#{precision.to_i + 1})"
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
private
|
||||
def select(sql, name = nil)
|
||||
decimals = detect_decimals(sql) || []
|
||||
sql = add_order_by_rowid(sql)
|
||||
|
||||
# OpenBase ignores the return results when there is a group by
|
||||
# so limit the result set that we return to rails if need be
|
||||
if (sql =~ /GROUP BY/i)
|
||||
sql.sub!(/RETURN RESULTS (\d+)( TO (\d+))?/i,"")
|
||||
|
||||
results = execute(sql, name)
|
||||
if ($2)
|
||||
results.fetch_offset = $1.to_i
|
||||
results.fetch_limit = $3.to_i - $1.to_i
|
||||
elsif ($1)
|
||||
results.fetch_limit = $1.to_i
|
||||
end
|
||||
else
|
||||
results = execute(sql, name)
|
||||
end
|
||||
|
||||
rows = []
|
||||
if ( results.rows_affected )
|
||||
results.each_hash do |row| # loop through result rows
|
||||
row.delete("_rowid") if row.key?("_rowid")
|
||||
decimals.each do |name, precision, scale|
|
||||
row[name] = BigDecimal.new(row[name]) if row[name] === String
|
||||
end
|
||||
rows << row
|
||||
end
|
||||
end
|
||||
rows
|
||||
end
|
||||
|
||||
def default_value(value,type=nil)
|
||||
return value if value.nil?
|
||||
|
||||
# Boolean type values
|
||||
return true if value =~ /true/
|
||||
return false if value =~ /false/
|
||||
# Alternative boolean default declarations
|
||||
return true if (value == 1 && type == "boolean")
|
||||
return false if (value == 0 && type == "boolean")
|
||||
|
||||
# Date / Time magic values
|
||||
return Time.now.to_s if value =~ /^now\(\)/i
|
||||
|
||||
# Empty strings should be set to nil
|
||||
return nil if value.empty?
|
||||
|
||||
# Otherwise return what we got from OpenBase
|
||||
# and hope for the best...
|
||||
# Take off the leading space and unquote
|
||||
value.lstrip!
|
||||
value = value[1,value.length-2] if value.first.eql?("'") && value.last.eql?("'")
|
||||
return nil if value.eql?("NULL")
|
||||
return value
|
||||
end
|
||||
|
||||
def sql_type_name(table_name, col_name, type, length)
|
||||
full_name = table_name.to_s + "." + col_name.to_s
|
||||
if DECIMAL_COLUMNS.include?(full_name) && type != "longlong"
|
||||
return "decimal(#{DECIMAL_COLUMNS[full_name][0]},#{DECIMAL_COLUMNS[full_name][1]})"
|
||||
end
|
||||
return "#{type}(#{length})" if ( type =~ /char/ )
|
||||
type
|
||||
end
|
||||
|
||||
def ob_index_name(row = [])
|
||||
name = ""
|
||||
name << "UNIQUE " if row[3]
|
||||
name << "CLUSTERED " if row[4]
|
||||
name << "INDEX"
|
||||
name
|
||||
end
|
||||
|
||||
def detect_decimals(sql)
|
||||
# Detect any decimal columns that will need to be cast when fetched
|
||||
decimals = []
|
||||
sql =~ /SELECT\s+(.*)\s+FROM\s+(\w+)/i
|
||||
select_clause = $1
|
||||
main_table = $2
|
||||
if select_clause == "*"
|
||||
column_names(main_table).each do |col|
|
||||
full_name = main_table + "." + col
|
||||
if DECIMAL_COLUMNS.include?(full_name)
|
||||
decimals << [col,DECIMAL_COLUMNS[full_name][0].to_i,DECIMAL_COLUMNS[full_name][1].to_i]
|
||||
end
|
||||
end
|
||||
end
|
||||
return decimals
|
||||
end
|
||||
|
||||
def add_order_by_rowid(sql)
|
||||
# ORDER BY _rowid if no explicit ORDER BY
|
||||
# This will ensure that find(:first) returns the first inserted row
|
||||
if (sql !~ /(ORDER BY)|(GROUP BY)/)
|
||||
if (sql =~ /RETURN RESULTS/)
|
||||
sql.sub!(/RETURN RESULTS/,"ORDER BY _rowid RETURN RESULTS")
|
||||
else
|
||||
sql << " ORDER BY _rowid"
|
||||
end
|
||||
end
|
||||
sql
|
||||
end
|
||||
|
||||
def record_decimal(table_name, column_name, precision, scale)
|
||||
full_name = table_name.to_s + "." + column_name.to_s
|
||||
DECIMAL_COLUMNS.store(full_name, [precision.to_i,scale.to_i])
|
||||
direct_execute("INSERT INTO #{COLUMN_SUPPORT_TABLE} (name,type,precision,scale) VALUES ('#{full_name}','decimal',#{precision.to_i},#{scale.to_i})")
|
||||
end
|
||||
|
||||
def update_nulls_after_insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
|
||||
sql =~ /INSERT INTO (\w+) \((.*)\) VALUES\s*\((.*)\)/m
|
||||
table = $1
|
||||
cols = $2
|
||||
values = $3
|
||||
cols = cols.split(',')
|
||||
values.gsub!(/'[^']*'/,"''")
|
||||
values.gsub!(/"[^"]*"/,"\"\"")
|
||||
values = values.split(',')
|
||||
update_cols = []
|
||||
values.each_index { |index| update_cols << cols[index] if values[index] =~ /\s*NULL\s*/ }
|
||||
update_sql = "UPDATE #{table} SET"
|
||||
update_cols.each { |col| update_sql << " #{col}=NULL," unless col.empty? }
|
||||
update_sql.chop!()
|
||||
update_sql << " WHERE #{pk}=#{quote(id_value)}"
|
||||
direct_execute(update_sql,"Null Correction") if update_cols.size > 0
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
Reference in New Issue
Block a user