mirror of
https://github.com/github/rails.git
synced 2026-01-30 00:38:00 -05:00
Extract Oracle, SQLServer, and Sybase adapters into gems.
git-svn-id: http://svn-commit.rubyonrails.org/rails/trunk@7689 5ecf4fe2-1ee6-0310-87b1-e25e094e27de
This commit is contained in:
@@ -1,5 +1,7 @@
|
||||
*2.0.0 [Preview Release]* (September 29th, 2007) [Includes duplicates of changes from 1.14.2 - 1.15.3]
|
||||
|
||||
* Extract Oracle, SQLServer, and Sybase adapters into gems. [Jeremy Kemper]
|
||||
|
||||
* Added fixture caching that'll speed up a normal fixture-powered test suite between 50% and 100% #9682 [frederick.cheung@gmail.com]
|
||||
|
||||
* Correctly quote id list for limited eager loading. #7482 [tmacedo]
|
||||
|
||||
@@ -1,721 +0,0 @@
|
||||
# oracle_adapter.rb -- ActiveRecord adapter for Oracle 8i, 9i, 10g
|
||||
#
|
||||
# Original author: Graham Jenkins
|
||||
#
|
||||
# Current maintainer: Michael Schoen <schoenm@earthlink.net>
|
||||
#
|
||||
#########################################################################
|
||||
#
|
||||
# Implementation notes:
|
||||
# 1. Redefines (safely) a method in ActiveRecord to make it possible to
|
||||
# implement an autonumbering solution for Oracle.
|
||||
# 2. The OCI8 driver is patched to properly handle values for LONG and
|
||||
# TIMESTAMP columns. The driver-author has indicated that a future
|
||||
# release of the driver will obviate this patch.
|
||||
# 3. LOB support is implemented through an after_save callback.
|
||||
# 4. Oracle does not offer native LIMIT and OFFSET options; this
|
||||
# functionality is mimiced through the use of nested selects.
|
||||
# See http://asktom.oracle.com/pls/ask/f?p=4950:8:::::F4950_P8_DISPLAYID:127412348064
|
||||
#
|
||||
# Do what you want with this code, at your own peril, but if any
|
||||
# significant portion of my code remains then please acknowledge my
|
||||
# contribution.
|
||||
# portions Copyright 2005 Graham Jenkins
|
||||
|
||||
require 'active_record/connection_adapters/abstract_adapter'
|
||||
require 'delegate'
|
||||
|
||||
begin
|
||||
require_library_or_gem 'oci8' unless self.class.const_defined? :OCI8
|
||||
|
||||
module ActiveRecord
|
||||
class Base
|
||||
def self.oracle_connection(config) #:nodoc:
|
||||
# Use OCI8AutoRecover instead of normal OCI8 driver.
|
||||
ConnectionAdapters::OracleAdapter.new OCI8AutoRecover.new(config), logger
|
||||
end
|
||||
|
||||
# for backwards-compatibility
|
||||
def self.oci_connection(config) #:nodoc:
|
||||
config[:database] = config[:host]
|
||||
self.oracle_connection(config)
|
||||
end
|
||||
|
||||
# After setting large objects to empty, select the OCI8::LOB
|
||||
# and write back the data.
|
||||
after_save :write_lobs
|
||||
def write_lobs #:nodoc:
|
||||
if connection.is_a?(ConnectionAdapters::OracleAdapter)
|
||||
connection.write_lobs(self.class.table_name, self.class, attributes)
|
||||
end
|
||||
end
|
||||
|
||||
private :write_lobs
|
||||
end
|
||||
|
||||
|
||||
module ConnectionAdapters #:nodoc:
|
||||
class OracleColumn < Column #:nodoc:
|
||||
|
||||
def type_cast(value)
|
||||
return guess_date_or_time(value) if type == :datetime && OracleAdapter.emulate_dates
|
||||
super
|
||||
end
|
||||
|
||||
private
|
||||
def simplified_type(field_type)
|
||||
return :boolean if OracleAdapter.emulate_booleans && field_type == 'NUMBER(1)'
|
||||
case field_type
|
||||
when /date|time/i then :datetime
|
||||
else super
|
||||
end
|
||||
end
|
||||
|
||||
def guess_date_or_time(value)
|
||||
(value.hour == 0 and value.min == 0 and value.sec == 0) ?
|
||||
Date.new(value.year, value.month, value.day) : value
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
# This is an Oracle/OCI adapter for the ActiveRecord persistence
|
||||
# framework. It relies upon the OCI8 driver, which works with Oracle 8i
|
||||
# and above. Most recent development has been on Debian Linux against
|
||||
# a 10g database, ActiveRecord 1.12.1 and OCI8 0.1.13.
|
||||
# See: http://rubyforge.org/projects/ruby-oci8/
|
||||
#
|
||||
# Usage notes:
|
||||
# * Key generation assumes a "${table_name}_seq" sequence is available
|
||||
# for all tables; the sequence name can be changed using
|
||||
# ActiveRecord::Base.set_sequence_name. When using Migrations, these
|
||||
# sequences are created automatically.
|
||||
# * Oracle uses DATE or TIMESTAMP datatypes for both dates and times.
|
||||
# Consequently some hacks are employed to map data back to Date or Time
|
||||
# in Ruby. If the column_name ends in _time it's created as a Ruby Time.
|
||||
# Else if the hours/minutes/seconds are 0, I make it a Ruby Date. Else
|
||||
# it's a Ruby Time. This is a bit nasty - but if you use Duck Typing
|
||||
# you'll probably not care very much. In 9i and up it's tempting to
|
||||
# map DATE to Date and TIMESTAMP to Time, but too many databases use
|
||||
# DATE for both. Timezones and sub-second precision on timestamps are
|
||||
# not supported.
|
||||
# * Default values that are functions (such as "SYSDATE") are not
|
||||
# supported. This is a restriction of the way ActiveRecord supports
|
||||
# default values.
|
||||
# * Support for Oracle8 is limited by Rails' use of ANSI join syntax, which
|
||||
# is supported in Oracle9i and later. You will need to use #finder_sql for
|
||||
# has_and_belongs_to_many associations to run against Oracle8.
|
||||
#
|
||||
# Required parameters:
|
||||
#
|
||||
# * <tt>:username</tt>
|
||||
# * <tt>:password</tt>
|
||||
# * <tt>:database</tt>
|
||||
class OracleAdapter < AbstractAdapter
|
||||
|
||||
@@emulate_booleans = true
|
||||
cattr_accessor :emulate_booleans
|
||||
|
||||
@@emulate_dates = false
|
||||
cattr_accessor :emulate_dates
|
||||
|
||||
def adapter_name #:nodoc:
|
||||
'Oracle'
|
||||
end
|
||||
|
||||
def supports_migrations? #:nodoc:
|
||||
true
|
||||
end
|
||||
|
||||
def native_database_types #:nodoc:
|
||||
{
|
||||
:primary_key => "NUMBER(38) NOT NULL PRIMARY KEY",
|
||||
:string => { :name => "VARCHAR2", :limit => 255 },
|
||||
:text => { :name => "CLOB" },
|
||||
:integer => { :name => "NUMBER", :limit => 38 },
|
||||
:float => { :name => "NUMBER" },
|
||||
:decimal => { :name => "DECIMAL" },
|
||||
:datetime => { :name => "DATE" },
|
||||
:timestamp => { :name => "DATE" },
|
||||
:time => { :name => "DATE" },
|
||||
:date => { :name => "DATE" },
|
||||
:binary => { :name => "BLOB" },
|
||||
:boolean => { :name => "NUMBER", :limit => 1 }
|
||||
}
|
||||
end
|
||||
|
||||
def table_alias_length
|
||||
30
|
||||
end
|
||||
|
||||
# QUOTING ==================================================
|
||||
#
|
||||
# see: abstract/quoting.rb
|
||||
|
||||
# camelCase column names need to be quoted; not that anyone using Oracle
|
||||
# would really do this, but handling this case means we pass the test...
|
||||
def quote_column_name(name) #:nodoc:
|
||||
name.to_s =~ /[A-Z]/ ? "\"#{name}\"" : name
|
||||
end
|
||||
|
||||
def quote_string(s) #:nodoc:
|
||||
s.gsub(/'/, "''")
|
||||
end
|
||||
|
||||
def quote(value, column = nil) #:nodoc:
|
||||
if value && column && [:text, :binary].include?(column.type)
|
||||
%Q{empty_#{ column.sql_type.downcase rescue 'blob' }()}
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def quoted_true
|
||||
"1"
|
||||
end
|
||||
|
||||
def quoted_false
|
||||
"0"
|
||||
end
|
||||
|
||||
|
||||
# CONNECTION MANAGEMENT ====================================
|
||||
#
|
||||
|
||||
# Returns true if the connection is active.
|
||||
def active?
|
||||
# Pings the connection to check if it's still good. Note that an
|
||||
# #active? method is also available, but that simply returns the
|
||||
# last known state, which isn't good enough if the connection has
|
||||
# gone stale since the last use.
|
||||
@connection.ping
|
||||
rescue OCIException
|
||||
false
|
||||
end
|
||||
|
||||
# Reconnects to the database.
|
||||
def reconnect!
|
||||
@connection.reset!
|
||||
rescue OCIException => e
|
||||
@logger.warn "#{adapter_name} automatic reconnection failed: #{e.message}"
|
||||
end
|
||||
|
||||
# Disconnects from the database.
|
||||
def disconnect!
|
||||
@connection.logoff rescue nil
|
||||
@connection.active = false
|
||||
end
|
||||
|
||||
|
||||
# DATABASE STATEMENTS ======================================
|
||||
#
|
||||
# see: abstract/database_statements.rb
|
||||
|
||||
def execute(sql, name = nil) #:nodoc:
|
||||
log(sql, name) { @connection.exec sql }
|
||||
end
|
||||
|
||||
# Returns the next sequence value from a sequence generator. Not generally
|
||||
# called directly; used by ActiveRecord to get the next primary key value
|
||||
# when inserting a new database record (see #prefetch_primary_key?).
|
||||
def next_sequence_value(sequence_name)
|
||||
id = 0
|
||||
@connection.exec("select #{sequence_name}.nextval id from dual") { |r| id = r[0].to_i }
|
||||
id
|
||||
end
|
||||
|
||||
def begin_db_transaction #:nodoc:
|
||||
@connection.autocommit = false
|
||||
end
|
||||
|
||||
def commit_db_transaction #:nodoc:
|
||||
@connection.commit
|
||||
ensure
|
||||
@connection.autocommit = true
|
||||
end
|
||||
|
||||
def rollback_db_transaction #:nodoc:
|
||||
@connection.rollback
|
||||
ensure
|
||||
@connection.autocommit = true
|
||||
end
|
||||
|
||||
def add_limit_offset!(sql, options) #:nodoc:
|
||||
offset = options[:offset] || 0
|
||||
|
||||
if limit = options[:limit]
|
||||
sql.replace "select * from (select raw_sql_.*, rownum raw_rnum_ from (#{sql}) raw_sql_ where rownum <= #{offset+limit}) where raw_rnum_ > #{offset}"
|
||||
elsif offset > 0
|
||||
sql.replace "select * from (select raw_sql_.*, rownum raw_rnum_ from (#{sql}) raw_sql_) where raw_rnum_ > #{offset}"
|
||||
end
|
||||
end
|
||||
|
||||
# Returns true for Oracle adapter (since Oracle requires primary key
|
||||
# values to be pre-fetched before insert). See also #next_sequence_value.
|
||||
def prefetch_primary_key?(table_name = nil)
|
||||
true
|
||||
end
|
||||
|
||||
def default_sequence_name(table, column) #:nodoc:
|
||||
"#{table}_seq"
|
||||
end
|
||||
|
||||
|
||||
# Inserts the given fixture into the table. Overridden to properly handle lobs.
|
||||
def insert_fixture(fixture, table_name)
|
||||
super
|
||||
|
||||
klass = fixture.class_name.constantize rescue nil
|
||||
if klass.respond_to?(:ancestors) && klass.ancestors.include?(ActiveRecord::Base)
|
||||
write_lobs(table_name, klass, fixture)
|
||||
end
|
||||
end
|
||||
|
||||
# Writes LOB values from attributes, as indicated by the LOB columns of klass.
|
||||
def write_lobs(table_name, klass, attributes)
|
||||
id = quote(attributes[klass.primary_key])
|
||||
klass.columns.select { |col| col.sql_type =~ /LOB$/i }.each do |col|
|
||||
value = attributes[col.name]
|
||||
value = value.to_yaml if col.text? && klass.serialized_attributes[col.name]
|
||||
next if value.nil? || (value == '')
|
||||
lob = select_one("SELECT #{col.name} FROM #{table_name} WHERE #{klass.primary_key} = #{id}",
|
||||
'Writable Large Object')[col.name]
|
||||
lob.write value
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
# SCHEMA STATEMENTS ========================================
|
||||
#
|
||||
# see: abstract/schema_statements.rb
|
||||
|
||||
def current_database #:nodoc:
|
||||
select_one("select sys_context('userenv','db_name') db from dual")["db"]
|
||||
end
|
||||
|
||||
def tables(name = nil) #:nodoc:
|
||||
select_all("select lower(table_name) from user_tables").inject([]) do | tabs, t |
|
||||
tabs << t.to_a.first.last
|
||||
end
|
||||
end
|
||||
|
||||
def indexes(table_name, name = nil) #:nodoc:
|
||||
result = select_all(<<-SQL, name)
|
||||
SELECT lower(i.index_name) as index_name, i.uniqueness, lower(c.column_name) as column_name
|
||||
FROM user_indexes i, user_ind_columns c
|
||||
WHERE i.table_name = '#{table_name.to_s.upcase}'
|
||||
AND c.index_name = i.index_name
|
||||
AND i.index_name NOT IN (SELECT uc.index_name FROM user_constraints uc WHERE uc.constraint_type = 'P')
|
||||
ORDER BY i.index_name, c.column_position
|
||||
SQL
|
||||
|
||||
current_index = nil
|
||||
indexes = []
|
||||
|
||||
result.each do |row|
|
||||
if current_index != row['index_name']
|
||||
indexes << IndexDefinition.new(table_name, row['index_name'], row['uniqueness'] == "UNIQUE", [])
|
||||
current_index = row['index_name']
|
||||
end
|
||||
|
||||
indexes.last.columns << row['column_name']
|
||||
end
|
||||
|
||||
indexes
|
||||
end
|
||||
|
||||
def columns(table_name, name = nil) #:nodoc:
|
||||
(owner, table_name) = @connection.describe(table_name)
|
||||
|
||||
table_cols = <<-SQL
|
||||
select column_name as name, data_type as sql_type, data_default, nullable,
|
||||
decode(data_type, 'NUMBER', data_precision,
|
||||
'FLOAT', data_precision,
|
||||
'VARCHAR2', data_length,
|
||||
'CHAR', data_length,
|
||||
null) as limit,
|
||||
decode(data_type, 'NUMBER', data_scale, null) as scale
|
||||
from all_tab_columns
|
||||
where owner = '#{owner}'
|
||||
and table_name = '#{table_name}'
|
||||
order by column_id
|
||||
SQL
|
||||
|
||||
select_all(table_cols, name).map do |row|
|
||||
limit, scale = row['limit'], row['scale']
|
||||
if limit || scale
|
||||
row['sql_type'] << "(#{(limit || 38).to_i}" + ((scale = scale.to_i) > 0 ? ",#{scale})" : ")")
|
||||
end
|
||||
|
||||
# clean up odd default spacing from Oracle
|
||||
if row['data_default']
|
||||
row['data_default'].sub!(/^(.*?)\s*$/, '\1')
|
||||
row['data_default'].sub!(/^'(.*)'$/, '\1')
|
||||
row['data_default'] = nil if row['data_default'] =~ /^(null|empty_[bc]lob\(\))$/i
|
||||
end
|
||||
|
||||
OracleColumn.new(oracle_downcase(row['name']),
|
||||
row['data_default'],
|
||||
row['sql_type'],
|
||||
row['nullable'] == 'Y')
|
||||
end
|
||||
end
|
||||
|
||||
def create_table(name, options = {}) #:nodoc:
|
||||
super(name, options)
|
||||
seq_name = options[:sequence_name] || "#{name}_seq"
|
||||
execute "CREATE SEQUENCE #{seq_name} START WITH 10000" unless options[:id] == false
|
||||
end
|
||||
|
||||
def rename_table(name, new_name) #:nodoc:
|
||||
execute "RENAME #{name} TO #{new_name}"
|
||||
execute "RENAME #{name}_seq TO #{new_name}_seq" rescue nil
|
||||
end
|
||||
|
||||
def drop_table(name, options = {}) #:nodoc:
|
||||
super(name)
|
||||
seq_name = options[:sequence_name] || "#{name}_seq"
|
||||
execute "DROP SEQUENCE #{seq_name}" rescue nil
|
||||
end
|
||||
|
||||
def remove_index(table_name, options = {}) #:nodoc:
|
||||
execute "DROP INDEX #{index_name(table_name, options)}"
|
||||
end
|
||||
|
||||
def change_column_default(table_name, column_name, default) #:nodoc:
|
||||
execute "ALTER TABLE #{table_name} MODIFY #{quote_column_name(column_name)} DEFAULT #{quote(default)}"
|
||||
end
|
||||
|
||||
def change_column(table_name, column_name, type, options = {}) #:nodoc:
|
||||
change_column_sql = "ALTER TABLE #{table_name} MODIFY #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
|
||||
add_column_options!(change_column_sql, options)
|
||||
execute(change_column_sql)
|
||||
end
|
||||
|
||||
def rename_column(table_name, column_name, new_column_name) #:nodoc:
|
||||
execute "ALTER TABLE #{table_name} RENAME COLUMN #{quote_column_name(column_name)} to #{quote_column_name(new_column_name)}"
|
||||
end
|
||||
|
||||
def remove_column(table_name, column_name) #:nodoc:
|
||||
execute "ALTER TABLE #{table_name} DROP COLUMN #{quote_column_name(column_name)}"
|
||||
end
|
||||
|
||||
# Find a table's primary key and sequence.
|
||||
# *Note*: Only primary key is implemented - sequence will be nil.
|
||||
def pk_and_sequence_for(table_name)
|
||||
(owner, table_name) = @connection.describe(table_name)
|
||||
|
||||
pks = select_values(<<-SQL, 'Primary Key')
|
||||
select cc.column_name
|
||||
from all_constraints c, all_cons_columns cc
|
||||
where c.owner = '#{owner}'
|
||||
and c.table_name = '#{table_name}'
|
||||
and c.constraint_type = 'P'
|
||||
and cc.owner = c.owner
|
||||
and cc.constraint_name = c.constraint_name
|
||||
SQL
|
||||
|
||||
# only support single column keys
|
||||
pks.size == 1 ? [oracle_downcase(pks.first), nil] : nil
|
||||
end
|
||||
|
||||
def structure_dump #:nodoc:
|
||||
s = select_all("select sequence_name from user_sequences").inject("") do |structure, seq|
|
||||
structure << "create sequence #{seq.to_a.first.last};\n\n"
|
||||
end
|
||||
|
||||
select_all("select table_name from user_tables").inject(s) do |structure, table|
|
||||
ddl = "create table #{table.to_a.first.last} (\n "
|
||||
cols = select_all(%Q{
|
||||
select column_name, data_type, data_length, data_precision, data_scale, data_default, nullable
|
||||
from user_tab_columns
|
||||
where table_name = '#{table.to_a.first.last}'
|
||||
order by column_id
|
||||
}).map do |row|
|
||||
col = "#{row['column_name'].downcase} #{row['data_type'].downcase}"
|
||||
if row['data_type'] =='NUMBER' and !row['data_precision'].nil?
|
||||
col << "(#{row['data_precision'].to_i}"
|
||||
col << ",#{row['data_scale'].to_i}" if !row['data_scale'].nil?
|
||||
col << ')'
|
||||
elsif row['data_type'].include?('CHAR')
|
||||
col << "(#{row['data_length'].to_i})"
|
||||
end
|
||||
col << " default #{row['data_default']}" if !row['data_default'].nil?
|
||||
col << ' not null' if row['nullable'] == 'N'
|
||||
col
|
||||
end
|
||||
ddl << cols.join(",\n ")
|
||||
ddl << ");\n\n"
|
||||
structure << ddl
|
||||
end
|
||||
end
|
||||
|
||||
def structure_drop #:nodoc:
|
||||
s = select_all("select sequence_name from user_sequences").inject("") do |drop, seq|
|
||||
drop << "drop sequence #{seq.to_a.first.last};\n\n"
|
||||
end
|
||||
|
||||
select_all("select table_name from user_tables").inject(s) do |drop, table|
|
||||
drop << "drop table #{table.to_a.first.last} cascade constraints;\n\n"
|
||||
end
|
||||
end
|
||||
|
||||
def add_column_options!(sql, options) #:nodoc:
|
||||
# handle case of defaults for CLOB columns, which would otherwise get "quoted" incorrectly
|
||||
if options_include_default?(options) && (column = options[:column]) && column.type == :text
|
||||
sql << " DEFAULT #{quote(options.delete(:default))}"
|
||||
end
|
||||
super
|
||||
end
|
||||
|
||||
# SELECT DISTINCT clause for a given set of columns and a given ORDER BY clause.
|
||||
#
|
||||
# Oracle requires the ORDER BY columns to be in the SELECT list for DISTINCT
|
||||
# queries. However, with those columns included in the SELECT DISTINCT list, you
|
||||
# won't actually get a distinct list of the column you want (presuming the column
|
||||
# has duplicates with multiple values for the ordered-by columns. So we use the
|
||||
# FIRST_VALUE function to get a single (first) value for each column, effectively
|
||||
# making every row the same.
|
||||
#
|
||||
# distinct("posts.id", "posts.created_at desc")
|
||||
def distinct(columns, order_by)
|
||||
return "DISTINCT #{columns}" if order_by.blank?
|
||||
|
||||
# construct a valid DISTINCT clause, ie. one that includes the ORDER BY columns, using
|
||||
# FIRST_VALUE such that the inclusion of these columns doesn't invalidate the DISTINCT
|
||||
order_columns = order_by.split(',').map { |s| s.strip }.reject(&:blank?)
|
||||
order_columns = order_columns.zip((0...order_columns.size).to_a).map do |c, i|
|
||||
"FIRST_VALUE(#{c.split.first}) OVER (PARTITION BY #{columns} ORDER BY #{c}) AS alias_#{i}__"
|
||||
end
|
||||
sql = "DISTINCT #{columns}, "
|
||||
sql << order_columns * ", "
|
||||
end
|
||||
|
||||
# ORDER BY clause for the passed order option.
|
||||
#
|
||||
# Uses column aliases as defined by #distinct.
|
||||
def add_order_by_for_association_limiting!(sql, options)
|
||||
return sql if options[:order].blank?
|
||||
|
||||
order = options[:order].split(',').collect { |s| s.strip }.reject(&:blank?)
|
||||
order.map! {|s| $1 if s =~ / (.*)/}
|
||||
order = order.zip((0...order.size).to_a).map { |s,i| "alias_#{i}__ #{s}" }.join(', ')
|
||||
|
||||
sql << " ORDER BY #{order}"
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def select(sql, name = nil)
|
||||
cursor = execute(sql, name)
|
||||
cols = cursor.get_col_names.map { |x| oracle_downcase(x) }
|
||||
rows = []
|
||||
|
||||
while row = cursor.fetch
|
||||
hash = Hash.new
|
||||
|
||||
cols.each_with_index do |col, i|
|
||||
hash[col] =
|
||||
case row[i]
|
||||
when OCI8::LOB
|
||||
name == 'Writable Large Object' ? row[i]: row[i].read
|
||||
when OraDate
|
||||
d = row[i]
|
||||
if emulate_dates && (d.hour == 0 && d.minute == 0 && d.second == 0)
|
||||
d.to_date
|
||||
else
|
||||
# see string_to_time; Time overflowing to DateTime, respecting the default timezone
|
||||
time_array = [d.year, d.month, d.day, d.hour, d.minute, d.second]
|
||||
begin
|
||||
Time.send(Base.default_timezone, *time_array)
|
||||
rescue
|
||||
zone_offset = if Base.default_timezone == :local then DateTime.now.offset else 0 end
|
||||
# Append zero calendar reform start to account for dates skipped by calendar reform
|
||||
DateTime.new(*time_array[0..5] << zone_offset << 0) rescue nil
|
||||
end
|
||||
end
|
||||
else row[i]
|
||||
end unless col == 'raw_rnum_'
|
||||
end
|
||||
|
||||
rows << hash
|
||||
end
|
||||
|
||||
rows
|
||||
ensure
|
||||
cursor.close if cursor
|
||||
end
|
||||
|
||||
# Oracle column names by default are case-insensitive, but treated as upcase;
|
||||
# for neatness, we'll downcase within Rails. EXCEPT that folks CAN quote
|
||||
# their column names when creating Oracle tables, which makes then case-sensitive.
|
||||
# I don't know anybody who does this, but we'll handle the theoretical case of a
|
||||
# camelCase column name. I imagine other dbs handle this different, since there's a
|
||||
# unit test that's currently failing test_oci.
|
||||
def oracle_downcase(column_name)
|
||||
column_name =~ /[a-z]/ ? column_name : column_name.downcase
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
class OCI8 #:nodoc:
|
||||
|
||||
# This OCI8 patch may not longer be required with the upcoming
|
||||
# release of version 0.2.
|
||||
class Cursor #:nodoc:
|
||||
alias :define_a_column_pre_ar :define_a_column
|
||||
def define_a_column(i)
|
||||
case do_ocicall(@ctx) { @parms[i - 1].attrGet(OCI_ATTR_DATA_TYPE) }
|
||||
when 8; @stmt.defineByPos(i, String, 65535) # Read LONG values
|
||||
when 187; @stmt.defineByPos(i, OraDate) # Read TIMESTAMP values
|
||||
when 108
|
||||
if @parms[i - 1].attrGet(OCI_ATTR_TYPE_NAME) == 'XMLTYPE'
|
||||
@stmt.defineByPos(i, String, 65535)
|
||||
else
|
||||
raise 'unsupported datatype'
|
||||
end
|
||||
else define_a_column_pre_ar i
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# missing constant from oci8 < 0.1.14
|
||||
OCI_PTYPE_UNK = 0 unless defined?(OCI_PTYPE_UNK)
|
||||
|
||||
# Uses the describeAny OCI call to find the target owner and table_name
|
||||
# indicated by +name+, parsing through synonynms as necessary. Returns
|
||||
# an array of [owner, table_name].
|
||||
def describe(name)
|
||||
@desc ||= @@env.alloc(OCIDescribe)
|
||||
@desc.attrSet(OCI_ATTR_DESC_PUBLIC, -1) if VERSION >= '0.1.14'
|
||||
@desc.describeAny(@svc, name.to_s, OCI_PTYPE_UNK) rescue raise %Q{"DESC #{name}" failed; does it exist?}
|
||||
info = @desc.attrGet(OCI_ATTR_PARAM)
|
||||
|
||||
case info.attrGet(OCI_ATTR_PTYPE)
|
||||
when OCI_PTYPE_TABLE, OCI_PTYPE_VIEW
|
||||
owner = info.attrGet(OCI_ATTR_OBJ_SCHEMA)
|
||||
table_name = info.attrGet(OCI_ATTR_OBJ_NAME)
|
||||
[owner, table_name]
|
||||
when OCI_PTYPE_SYN
|
||||
schema = info.attrGet(OCI_ATTR_SCHEMA_NAME)
|
||||
name = info.attrGet(OCI_ATTR_NAME)
|
||||
describe(schema + '.' + name)
|
||||
else raise %Q{"DESC #{name}" failed; not a table or view.}
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
|
||||
# The OracleConnectionFactory factors out the code necessary to connect and
|
||||
# configure an Oracle/OCI connection.
|
||||
class OracleConnectionFactory #:nodoc:
|
||||
def new_connection(username, password, database, async, prefetch_rows, cursor_sharing)
|
||||
conn = OCI8.new username, password, database
|
||||
conn.exec %q{alter session set nls_date_format = 'YYYY-MM-DD HH24:MI:SS'}
|
||||
conn.exec %q{alter session set nls_timestamp_format = 'YYYY-MM-DD HH24:MI:SS'} rescue nil
|
||||
conn.autocommit = true
|
||||
conn.non_blocking = true if async
|
||||
conn.prefetch_rows = prefetch_rows
|
||||
conn.exec "alter session set cursor_sharing = #{cursor_sharing}" rescue nil
|
||||
conn
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
# The OCI8AutoRecover class enhances the OCI8 driver with auto-recover and
|
||||
# reset functionality. If a call to #exec fails, and autocommit is turned on
|
||||
# (ie., we're not in the middle of a longer transaction), it will
|
||||
# automatically reconnect and try again. If autocommit is turned off,
|
||||
# this would be dangerous (as the earlier part of the implied transaction
|
||||
# may have failed silently if the connection died) -- so instead the
|
||||
# connection is marked as dead, to be reconnected on it's next use.
|
||||
class OCI8AutoRecover < DelegateClass(OCI8) #:nodoc:
|
||||
attr_accessor :active
|
||||
alias :active? :active
|
||||
|
||||
cattr_accessor :auto_retry
|
||||
class << self
|
||||
alias :auto_retry? :auto_retry
|
||||
end
|
||||
@@auto_retry = false
|
||||
|
||||
def initialize(config, factory = OracleConnectionFactory.new)
|
||||
@active = true
|
||||
@username, @password, @database, = config[:username], config[:password], config[:database]
|
||||
@async = config[:allow_concurrency]
|
||||
@prefetch_rows = config[:prefetch_rows] || 100
|
||||
@cursor_sharing = config[:cursor_sharing] || 'similar'
|
||||
@factory = factory
|
||||
@connection = @factory.new_connection @username, @password, @database, @async, @prefetch_rows, @cursor_sharing
|
||||
super @connection
|
||||
end
|
||||
|
||||
# Checks connection, returns true if active. Note that ping actively
|
||||
# checks the connection, while #active? simply returns the last
|
||||
# known state.
|
||||
def ping
|
||||
@connection.exec("select 1 from dual") { |r| nil }
|
||||
@active = true
|
||||
rescue
|
||||
@active = false
|
||||
raise
|
||||
end
|
||||
|
||||
# Resets connection, by logging off and creating a new connection.
|
||||
def reset!
|
||||
logoff rescue nil
|
||||
begin
|
||||
@connection = @factory.new_connection @username, @password, @database, @async, @prefetch_rows, @cursor_sharing
|
||||
__setobj__ @connection
|
||||
@active = true
|
||||
rescue
|
||||
@active = false
|
||||
raise
|
||||
end
|
||||
end
|
||||
|
||||
# ORA-00028: your session has been killed
|
||||
# ORA-01012: not logged on
|
||||
# ORA-03113: end-of-file on communication channel
|
||||
# ORA-03114: not connected to ORACLE
|
||||
LOST_CONNECTION_ERROR_CODES = [ 28, 1012, 3113, 3114 ]
|
||||
|
||||
# Adds auto-recovery functionality.
|
||||
#
|
||||
# See: http://www.jiubao.org/ruby-oci8/api.en.html#label-11
|
||||
def exec(sql, *bindvars, &block)
|
||||
should_retry = self.class.auto_retry? && autocommit?
|
||||
|
||||
begin
|
||||
@connection.exec(sql, *bindvars, &block)
|
||||
rescue OCIException => e
|
||||
raise unless LOST_CONNECTION_ERROR_CODES.include?(e.code)
|
||||
@active = false
|
||||
raise unless should_retry
|
||||
should_retry = false
|
||||
reset! rescue nil
|
||||
retry
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
rescue LoadError
|
||||
# OCI8 driver is unavailable.
|
||||
module ActiveRecord # :nodoc:
|
||||
class Base
|
||||
@@oracle_error_message = "Oracle/OCI libraries could not be loaded: #{$!.to_s}"
|
||||
def self.oracle_connection(config) # :nodoc:
|
||||
# Set up a reasonable error message
|
||||
raise LoadError, @@oracle_error_message
|
||||
end
|
||||
def self.oci_connection(config) # :nodoc:
|
||||
# Set up a reasonable error message
|
||||
raise LoadError, @@oracle_error_message
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,622 +0,0 @@
|
||||
require 'active_record/connection_adapters/abstract_adapter'
|
||||
|
||||
require 'bigdecimal'
|
||||
require 'bigdecimal/util'
|
||||
|
||||
# sqlserver_adapter.rb -- ActiveRecord adapter for Microsoft SQL Server
|
||||
#
|
||||
# Author: Joey Gibson <joey@joeygibson.com>
|
||||
# Date: 10/14/2004
|
||||
#
|
||||
# Modifications: DeLynn Berry <delynnb@megastarfinancial.com>
|
||||
# Date: 3/22/2005
|
||||
#
|
||||
# Modifications (ODBC): Mark Imbriaco <mark.imbriaco@pobox.com>
|
||||
# Date: 6/26/2005
|
||||
|
||||
# Modifications (Migrations): Tom Ward <tom@popdog.net>
|
||||
# Date: 27/10/2005
|
||||
#
|
||||
# Modifications (Numerous fixes as maintainer): Ryan Tomayko <rtomayko@gmail.com>
|
||||
# Date: Up to July 2006
|
||||
|
||||
# Current maintainer: Tom Ward <tom@popdog.net>
|
||||
|
||||
module ActiveRecord
|
||||
class Base
|
||||
def self.sqlserver_connection(config) #:nodoc:
|
||||
require_library_or_gem 'dbi' unless self.class.const_defined?(:DBI)
|
||||
|
||||
config = config.symbolize_keys
|
||||
|
||||
mode = config[:mode] ? config[:mode].to_s.upcase : 'ADO'
|
||||
username = config[:username] ? config[:username].to_s : 'sa'
|
||||
password = config[:password] ? config[:password].to_s : ''
|
||||
autocommit = config.key?(:autocommit) ? config[:autocommit] : true
|
||||
if mode == "ODBC"
|
||||
raise ArgumentError, "Missing DSN. Argument ':dsn' must be set in order for this adapter to work." unless config.has_key?(:dsn)
|
||||
dsn = config[:dsn]
|
||||
driver_url = "DBI:ODBC:#{dsn}"
|
||||
else
|
||||
raise ArgumentError, "Missing Database. Argument ':database' must be set in order for this adapter to work." unless config.has_key?(:database)
|
||||
database = config[:database]
|
||||
host = config[:host] ? config[:host].to_s : 'localhost'
|
||||
driver_url = "DBI:ADO:Provider=SQLOLEDB;Data Source=#{host};Initial Catalog=#{database};User ID=#{username};Password=#{password};"
|
||||
end
|
||||
conn = DBI.connect(driver_url, username, password)
|
||||
conn["AutoCommit"] = autocommit
|
||||
ConnectionAdapters::SQLServerAdapter.new(conn, logger, [driver_url, username, password])
|
||||
end
|
||||
end # class Base
|
||||
|
||||
module ConnectionAdapters
|
||||
class SQLServerColumn < Column# :nodoc:
|
||||
attr_reader :identity, :is_special
|
||||
|
||||
def initialize(name, default, sql_type = nil, identity = false, null = true) # TODO: check ok to remove scale_value = 0
|
||||
super(name, default, sql_type, null)
|
||||
@identity = identity
|
||||
@is_special = sql_type =~ /text|ntext|image/i
|
||||
# TODO: check ok to remove @scale = scale_value
|
||||
# SQL Server only supports limits on *char and float types
|
||||
@limit = nil unless @type == :float or @type == :string
|
||||
end
|
||||
|
||||
def simplified_type(field_type)
|
||||
case field_type
|
||||
when /real/i then :float
|
||||
when /money/i then :decimal
|
||||
when /image/i then :binary
|
||||
when /bit/i then :boolean
|
||||
when /uniqueidentifier/i then :string
|
||||
else super
|
||||
end
|
||||
end
|
||||
|
||||
def type_cast(value)
|
||||
return nil if value.nil?
|
||||
case type
|
||||
when :datetime then cast_to_datetime(value)
|
||||
when :timestamp then cast_to_time(value)
|
||||
when :time then cast_to_time(value)
|
||||
when :date then cast_to_datetime(value)
|
||||
when :boolean then value == true or (value =~ /^t(rue)?$/i) == 0 or value.to_s == '1'
|
||||
else super
|
||||
end
|
||||
end
|
||||
|
||||
def cast_to_time(value)
|
||||
return value if value.is_a?(Time)
|
||||
time_array = ParseDate.parsedate(value)
|
||||
Time.send(Base.default_timezone, *time_array) rescue nil
|
||||
end
|
||||
|
||||
def cast_to_datetime(value)
|
||||
return value.to_time if value.is_a?(DBI::Timestamp)
|
||||
|
||||
if value.is_a?(Time)
|
||||
if value.year != 0 and value.month != 0 and value.day != 0
|
||||
return value
|
||||
else
|
||||
return Time.mktime(2000, 1, 1, value.hour, value.min, value.sec) rescue nil
|
||||
end
|
||||
end
|
||||
|
||||
if value.is_a?(DateTime)
|
||||
return Time.mktime(value.year, value.mon, value.day, value.hour, value.min, value.sec)
|
||||
end
|
||||
|
||||
return cast_to_time(value) if value.is_a?(Date) or value.is_a?(String) rescue nil
|
||||
value
|
||||
end
|
||||
|
||||
# TODO: Find less hack way to convert DateTime objects into Times
|
||||
|
||||
def self.string_to_time(value)
|
||||
if value.is_a?(DateTime)
|
||||
return Time.mktime(value.year, value.mon, value.day, value.hour, value.min, value.sec)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
# These methods will only allow the adapter to insert binary data with a length of 7K or less
|
||||
# because of a SQL Server statement length policy.
|
||||
def self.string_to_binary(value)
|
||||
value.gsub(/(\r|\n|\0|\x1a)/) do
|
||||
case $1
|
||||
when "\r" then "%00"
|
||||
when "\n" then "%01"
|
||||
when "\0" then "%02"
|
||||
when "\x1a" then "%03"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def self.binary_to_string(value)
|
||||
value.gsub(/(%00|%01|%02|%03)/) do
|
||||
case $1
|
||||
when "%00" then "\r"
|
||||
when "%01" then "\n"
|
||||
when "%02\0" then "\0"
|
||||
when "%03" then "\x1a"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# In ADO mode, this adapter will ONLY work on Windows systems,
|
||||
# since it relies on Win32OLE, which, to my knowledge, is only
|
||||
# available on Windows.
|
||||
#
|
||||
# This mode also relies on the ADO support in the DBI module. If you are using the
|
||||
# one-click installer of Ruby, then you already have DBI installed, but
|
||||
# the ADO module is *NOT* installed. You will need to get the latest
|
||||
# source distribution of Ruby-DBI from http://ruby-dbi.sourceforge.net/
|
||||
# unzip it, and copy the file
|
||||
# <tt>src/lib/dbd_ado/ADO.rb</tt>
|
||||
# to
|
||||
# <tt>X:/Ruby/lib/ruby/site_ruby/1.8/DBD/ADO/ADO.rb</tt>
|
||||
# (you will more than likely need to create the ADO directory).
|
||||
# Once you've installed that file, you are ready to go.
|
||||
#
|
||||
# In ODBC mode, the adapter requires the ODBC support in the DBI module which requires
|
||||
# the Ruby ODBC module. Ruby ODBC 0.996 was used in development and testing,
|
||||
# and it is available at http://www.ch-werner.de/rubyodbc/
|
||||
#
|
||||
# Options:
|
||||
#
|
||||
# * <tt>:mode</tt> -- ADO or ODBC. Defaults to ADO.
|
||||
# * <tt>:username</tt> -- Defaults to sa.
|
||||
# * <tt>:password</tt> -- Defaults to empty string.
|
||||
# * <tt>:windows_auth</tt> -- Defaults to "User ID=#{username};Password=#{password}"
|
||||
#
|
||||
# ADO specific options:
|
||||
#
|
||||
# * <tt>:host</tt> -- Defaults to localhost.
|
||||
# * <tt>:database</tt> -- The name of the database. No default, must be provided.
|
||||
# * <tt>:windows_auth</tt> -- Use windows authentication instead of username/password.
|
||||
#
|
||||
# ODBC specific options:
|
||||
#
|
||||
# * <tt>:dsn</tt> -- Defaults to nothing.
|
||||
#
|
||||
# ADO code tested on Windows 2000 and higher systems,
|
||||
# running ruby 1.8.2 (2004-07-29) [i386-mswin32], and SQL Server 2000 SP3.
|
||||
#
|
||||
# ODBC code tested on a Fedora Core 4 system, running FreeTDS 0.63,
|
||||
# unixODBC 2.2.11, Ruby ODBC 0.996, Ruby DBI 0.0.23 and Ruby 1.8.2.
|
||||
# [Linux strongmad 2.6.11-1.1369_FC4 #1 Thu Jun 2 22:55:56 EDT 2005 i686 i686 i386 GNU/Linux]
|
||||
class SQLServerAdapter < AbstractAdapter
|
||||
|
||||
def initialize(connection, logger, connection_options=nil)
|
||||
super(connection, logger)
|
||||
@connection_options = connection_options
|
||||
end
|
||||
|
||||
def native_database_types
|
||||
{
|
||||
:primary_key => "int NOT NULL IDENTITY(1, 1) PRIMARY KEY",
|
||||
:string => { :name => "varchar", :limit => 255 },
|
||||
:text => { :name => "text" },
|
||||
:integer => { :name => "int" },
|
||||
:float => { :name => "float", :limit => 8 },
|
||||
:decimal => { :name => "decimal" },
|
||||
:datetime => { :name => "datetime" },
|
||||
:timestamp => { :name => "datetime" },
|
||||
:time => { :name => "datetime" },
|
||||
:date => { :name => "datetime" },
|
||||
:binary => { :name => "image"},
|
||||
:boolean => { :name => "bit"}
|
||||
}
|
||||
end
|
||||
|
||||
def adapter_name
|
||||
'SQLServer'
|
||||
end
|
||||
|
||||
def supports_migrations? #:nodoc:
|
||||
true
|
||||
end
|
||||
|
||||
def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
|
||||
return super unless type.to_s == 'integer'
|
||||
|
||||
if limit.nil? || limit == 4
|
||||
'integer'
|
||||
elsif limit < 4
|
||||
'smallint'
|
||||
else
|
||||
'bigint'
|
||||
end
|
||||
end
|
||||
|
||||
# CONNECTION MANAGEMENT ====================================#
|
||||
|
||||
# Returns true if the connection is active.
|
||||
def active?
|
||||
@connection.execute("SELECT 1").finish
|
||||
true
|
||||
rescue DBI::DatabaseError, DBI::InterfaceError
|
||||
false
|
||||
end
|
||||
|
||||
# Reconnects to the database, returns false if no connection could be made.
|
||||
def reconnect!
|
||||
disconnect!
|
||||
@connection = DBI.connect(*@connection_options)
|
||||
rescue DBI::DatabaseError => e
|
||||
@logger.warn "#{adapter_name} reconnection failed: #{e.message}" if @logger
|
||||
false
|
||||
end
|
||||
|
||||
# Disconnects from the database
|
||||
|
||||
def disconnect!
|
||||
@connection.disconnect rescue nil
|
||||
end
|
||||
|
||||
def select_rows(sql, name = nil)
|
||||
rows = []
|
||||
repair_special_columns(sql)
|
||||
log(sql, name) do
|
||||
@connection.select_all(sql) do |row|
|
||||
record = []
|
||||
row.each do |col|
|
||||
if col.is_a? DBI::Timestamp
|
||||
record << col.to_time
|
||||
else
|
||||
record << col
|
||||
end
|
||||
end
|
||||
rows << record
|
||||
end
|
||||
end
|
||||
rows
|
||||
end
|
||||
|
||||
def columns(table_name, name = nil)
|
||||
return [] if table_name.blank?
|
||||
table_name = table_name.to_s if table_name.is_a?(Symbol)
|
||||
table_name = table_name.split('.')[-1] unless table_name.nil?
|
||||
table_name = table_name.gsub(/[\[\]]/, '')
|
||||
sql = %Q{
|
||||
SELECT
|
||||
cols.COLUMN_NAME as ColName,
|
||||
cols.COLUMN_DEFAULT as DefaultValue,
|
||||
cols.NUMERIC_SCALE as numeric_scale,
|
||||
cols.NUMERIC_PRECISION as numeric_precision,
|
||||
cols.DATA_TYPE as ColType,
|
||||
cols.IS_NULLABLE As IsNullable,
|
||||
COL_LENGTH(cols.TABLE_NAME, cols.COLUMN_NAME) as Length,
|
||||
COLUMNPROPERTY(OBJECT_ID(cols.TABLE_NAME), cols.COLUMN_NAME, 'IsIdentity') as IsIdentity,
|
||||
cols.NUMERIC_SCALE as Scale
|
||||
FROM INFORMATION_SCHEMA.COLUMNS cols
|
||||
WHERE cols.TABLE_NAME = '#{table_name}'
|
||||
}
|
||||
# Comment out if you want to have the Columns select statment logged.
|
||||
# Personally, I think it adds unnecessary bloat to the log.
|
||||
# If you do comment it out, make sure to un-comment the "result" line that follows
|
||||
result = log(sql, name) { @connection.select_all(sql) }
|
||||
#result = @connection.select_all(sql)
|
||||
columns = []
|
||||
result.each do |field|
|
||||
default = field[:DefaultValue].to_s.gsub!(/[()\']/,"") =~ /null/ ? nil : field[:DefaultValue]
|
||||
if field[:ColType] =~ /numeric|decimal/i
|
||||
type = "#{field[:ColType]}(#{field[:numeric_precision]},#{field[:numeric_scale]})"
|
||||
else
|
||||
type = "#{field[:ColType]}(#{field[:Length]})"
|
||||
end
|
||||
is_identity = field[:IsIdentity] == 1
|
||||
is_nullable = field[:IsNullable] == 'YES'
|
||||
columns << SQLServerColumn.new(field[:ColName], default, type, is_identity, is_nullable)
|
||||
end
|
||||
columns
|
||||
end
|
||||
|
||||
def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
|
||||
super || select_value("SELECT @@IDENTITY AS Ident")
|
||||
end
|
||||
|
||||
def update_sql(sql, name = nil)
|
||||
execute(sql, name) do |handle|
|
||||
handle.rows
|
||||
end || select_value("SELECT @@ROWCOUNT AS AffectedRows")
|
||||
end
|
||||
|
||||
def execute(sql, name = nil)
|
||||
if sql =~ /^\s*INSERT/i && (table_name = query_requires_identity_insert?(sql))
|
||||
log(sql, name) do
|
||||
with_identity_insert_enabled(table_name) do
|
||||
@connection.execute(sql) do |handle|
|
||||
yield(handle) if block_given?
|
||||
end
|
||||
end
|
||||
end
|
||||
else
|
||||
log(sql, name) do
|
||||
@connection.execute(sql) do |handle|
|
||||
yield(handle) if block_given?
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def begin_db_transaction
|
||||
@connection["AutoCommit"] = false
|
||||
rescue Exception => e
|
||||
@connection["AutoCommit"] = true
|
||||
end
|
||||
|
||||
def commit_db_transaction
|
||||
@connection.commit
|
||||
ensure
|
||||
@connection["AutoCommit"] = true
|
||||
end
|
||||
|
||||
def rollback_db_transaction
|
||||
@connection.rollback
|
||||
ensure
|
||||
@connection["AutoCommit"] = true
|
||||
end
|
||||
|
||||
def quote(value, column = nil)
|
||||
return value.quoted_id if value.respond_to?(:quoted_id)
|
||||
|
||||
case value
|
||||
when TrueClass then '1'
|
||||
when FalseClass then '0'
|
||||
else
|
||||
if value.acts_like?(:time)
|
||||
"'#{value.strftime("%Y%m%d %H:%M:%S")}'"
|
||||
elsif value.acts_like?(:date)
|
||||
"'#{value.strftime("%Y%m%d")}'"
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def quote_string(string)
|
||||
string.gsub(/\'/, "''")
|
||||
end
|
||||
|
||||
def quote_column_name(name)
|
||||
"[#{name}]"
|
||||
end
|
||||
|
||||
def add_limit_offset!(sql, options)
|
||||
if options[:limit] and options[:offset]
|
||||
total_rows = @connection.select_all("SELECT count(*) as TotalRows from (#{sql.gsub(/\bSELECT(\s+DISTINCT)?\b/i, "SELECT#{$1} TOP 1000000000")}) tally")[0][:TotalRows].to_i
|
||||
if (options[:limit] + options[:offset]) >= total_rows
|
||||
options[:limit] = (total_rows - options[:offset] >= 0) ? (total_rows - options[:offset]) : 0
|
||||
end
|
||||
sql.sub!(/^\s*SELECT(\s+DISTINCT)?/i, "SELECT * FROM (SELECT TOP #{options[:limit]} * FROM (SELECT#{$1} TOP #{options[:limit] + options[:offset]} ")
|
||||
sql << ") AS tmp1"
|
||||
if options[:order]
|
||||
options[:order] = options[:order].split(',').map do |field|
|
||||
parts = field.split(" ")
|
||||
tc = parts[0]
|
||||
if sql =~ /\.\[/ and tc =~ /\./ # if column quoting used in query
|
||||
tc.gsub!(/\./, '\\.\\[')
|
||||
tc << '\\]'
|
||||
end
|
||||
if sql =~ /#{tc} AS (t\d_r\d\d?)/
|
||||
parts[0] = $1
|
||||
elsif parts[0] =~ /\w+\.(\w+)/
|
||||
parts[0] = $1
|
||||
end
|
||||
parts.join(' ')
|
||||
end.join(', ')
|
||||
sql << " ORDER BY #{change_order_direction(options[:order])}) AS tmp2 ORDER BY #{options[:order]}"
|
||||
else
|
||||
sql << " ) AS tmp2"
|
||||
end
|
||||
elsif sql !~ /^\s*SELECT (@@|COUNT\()/i
|
||||
sql.sub!(/^\s*SELECT(\s+DISTINCT)?/i) do
|
||||
"SELECT#{$1} TOP #{options[:limit]}"
|
||||
end unless options[:limit].nil?
|
||||
end
|
||||
end
|
||||
|
||||
def recreate_database(name)
|
||||
drop_database(name)
|
||||
create_database(name)
|
||||
end
|
||||
|
||||
def drop_database(name)
|
||||
execute "DROP DATABASE #{name}"
|
||||
end
|
||||
|
||||
def create_database(name)
|
||||
execute "CREATE DATABASE #{name}"
|
||||
end
|
||||
|
||||
def current_database
|
||||
@connection.select_one("select DB_NAME()")[0]
|
||||
end
|
||||
|
||||
def tables(name = nil)
|
||||
execute("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE = 'BASE TABLE'", name) do |sth|
|
||||
result = sth.inject([]) do |tables, field|
|
||||
table_name = field[0]
|
||||
tables << table_name unless table_name == 'dtproperties'
|
||||
tables
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def indexes(table_name, name = nil)
|
||||
ActiveRecord::Base.connection.instance_variable_get("@connection")["AutoCommit"] = false
|
||||
indexes = []
|
||||
execute("EXEC sp_helpindex '#{table_name}'", name) do |handle|
|
||||
if handle.column_info.any?
|
||||
handle.each do |index|
|
||||
unique = index[1] =~ /unique/
|
||||
primary = index[1] =~ /primary key/
|
||||
if !primary
|
||||
indexes << IndexDefinition.new(table_name, index[0], unique, index[2].split(", ").map {|e| e.gsub('(-)','')})
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
indexes
|
||||
ensure
|
||||
ActiveRecord::Base.connection.instance_variable_get("@connection")["AutoCommit"] = true
|
||||
end
|
||||
|
||||
def rename_table(name, new_name)
|
||||
execute "EXEC sp_rename '#{name}', '#{new_name}'"
|
||||
end
|
||||
|
||||
# Adds a new column to the named table.
|
||||
# See TableDefinition#column for details of the options you can use.
|
||||
def add_column(table_name, column_name, type, options = {})
|
||||
add_column_sql = "ALTER TABLE #{table_name} ADD #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
|
||||
add_column_options!(add_column_sql, options)
|
||||
# TODO: Add support to mimic date columns, using constraints to mark them as such in the database
|
||||
# add_column_sql << " CONSTRAINT ck__#{table_name}__#{column_name}__date_only CHECK ( CONVERT(CHAR(12), #{quote_column_name(column_name)}, 14)='00:00:00:000' )" if type == :date
|
||||
execute(add_column_sql)
|
||||
end
|
||||
|
||||
def rename_column(table, column, new_column_name)
|
||||
execute "EXEC sp_rename '#{table}.#{column}', '#{new_column_name}'"
|
||||
end
|
||||
|
||||
def change_column(table_name, column_name, type, options = {}) #:nodoc:
|
||||
sql_commands = ["ALTER TABLE #{table_name} ALTER COLUMN #{column_name} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"]
|
||||
if options_include_default?(options)
|
||||
remove_default_constraint(table_name, column_name)
|
||||
sql_commands << "ALTER TABLE #{table_name} ADD CONSTRAINT DF_#{table_name}_#{column_name} DEFAULT #{quote(options[:default], options[:column])} FOR #{column_name}"
|
||||
end
|
||||
sql_commands.each {|c|
|
||||
execute(c)
|
||||
}
|
||||
end
|
||||
|
||||
def change_column_default(table_name, column_name, default)
|
||||
remove_default_constraint(table_name, column_name)
|
||||
execute "ALTER TABLE #{table_name} ADD CONSTRAINT DF_#{table_name}_#{column_name} DEFAULT #{quote(default, column_name)} FOR #{column_name}"
|
||||
end
|
||||
|
||||
def remove_column(table_name, column_name)
|
||||
remove_check_constraints(table_name, column_name)
|
||||
remove_default_constraint(table_name, column_name)
|
||||
execute "ALTER TABLE [#{table_name}] DROP COLUMN [#{column_name}]"
|
||||
end
|
||||
|
||||
def remove_default_constraint(table_name, column_name)
|
||||
constraints = select "select def.name from sysobjects def, syscolumns col, sysobjects tab where col.cdefault = def.id and col.name = '#{column_name}' and tab.name = '#{table_name}' and col.id = tab.id"
|
||||
|
||||
constraints.each do |constraint|
|
||||
execute "ALTER TABLE #{table_name} DROP CONSTRAINT #{constraint["name"]}"
|
||||
end
|
||||
end
|
||||
|
||||
def remove_check_constraints(table_name, column_name)
|
||||
# TODO remove all constraints in single method
|
||||
constraints = select "SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE where TABLE_NAME = '#{table_name}' and COLUMN_NAME = '#{column_name}'"
|
||||
constraints.each do |constraint|
|
||||
execute "ALTER TABLE #{table_name} DROP CONSTRAINT #{constraint["CONSTRAINT_NAME"]}"
|
||||
end
|
||||
end
|
||||
|
||||
def remove_index(table_name, options = {})
|
||||
execute "DROP INDEX #{table_name}.#{quote_column_name(index_name(table_name, options))}"
|
||||
end
|
||||
|
||||
private
|
||||
def select(sql, name = nil)
|
||||
repair_special_columns(sql)
|
||||
|
||||
result = []
|
||||
execute(sql) do |handle|
|
||||
handle.each do |row|
|
||||
row_hash = {}
|
||||
row.each_with_index do |value, i|
|
||||
if value.is_a? DBI::Timestamp
|
||||
value = DateTime.new(value.year, value.month, value.day, value.hour, value.minute, value.sec)
|
||||
end
|
||||
row_hash[handle.column_names[i]] = value
|
||||
end
|
||||
result << row_hash
|
||||
end
|
||||
end
|
||||
result
|
||||
end
|
||||
|
||||
# Turns IDENTITY_INSERT ON for table during execution of the block
|
||||
# N.B. This sets the state of IDENTITY_INSERT to OFF after the
|
||||
# block has been executed without regard to its previous state
|
||||
|
||||
def with_identity_insert_enabled(table_name, &block)
|
||||
set_identity_insert(table_name, true)
|
||||
yield
|
||||
ensure
|
||||
set_identity_insert(table_name, false)
|
||||
end
|
||||
|
||||
def set_identity_insert(table_name, enable = true)
|
||||
execute "SET IDENTITY_INSERT #{table_name} #{enable ? 'ON' : 'OFF'}"
|
||||
rescue Exception => e
|
||||
raise ActiveRecordError, "IDENTITY_INSERT could not be turned #{enable ? 'ON' : 'OFF'} for table #{table_name}"
|
||||
end
|
||||
|
||||
def get_table_name(sql)
|
||||
if sql =~ /^\s*insert\s+into\s+([^\(\s]+)\s*|^\s*update\s+([^\(\s]+)\s*/i
|
||||
$1
|
||||
elsif sql =~ /from\s+([^\(\s]+)\s*/i
|
||||
$1
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def identity_column(table_name)
|
||||
@table_columns = {} unless @table_columns
|
||||
@table_columns[table_name] = columns(table_name) if @table_columns[table_name] == nil
|
||||
@table_columns[table_name].each do |col|
|
||||
return col.name if col.identity
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
def query_requires_identity_insert?(sql)
|
||||
table_name = get_table_name(sql)
|
||||
id_column = identity_column(table_name)
|
||||
sql =~ /\[#{id_column}\]/ ? table_name : nil
|
||||
end
|
||||
|
||||
def change_order_direction(order)
|
||||
order.split(",").collect {|fragment|
|
||||
case fragment
|
||||
when /\bDESC\b/i then fragment.gsub(/\bDESC\b/i, "ASC")
|
||||
when /\bASC\b/i then fragment.gsub(/\bASC\b/i, "DESC")
|
||||
else String.new(fragment).split(',').join(' DESC,') + ' DESC'
|
||||
end
|
||||
}.join(",")
|
||||
end
|
||||
|
||||
def get_special_columns(table_name)
|
||||
special = []
|
||||
@table_columns ||= {}
|
||||
@table_columns[table_name] ||= columns(table_name)
|
||||
@table_columns[table_name].each do |col|
|
||||
special << col.name if col.is_special
|
||||
end
|
||||
special
|
||||
end
|
||||
|
||||
def repair_special_columns(sql)
|
||||
special_cols = get_special_columns(get_table_name(sql))
|
||||
for col in special_cols.to_a
|
||||
sql.gsub!(Regexp.new(" #{col.to_s} = "), " #{col.to_s} LIKE ")
|
||||
sql.gsub!(/ORDER BY #{col.to_s}/i, '')
|
||||
end
|
||||
sql
|
||||
end
|
||||
|
||||
end #class SQLServerAdapter < AbstractAdapter
|
||||
end #module ConnectionAdapters
|
||||
end #module ActiveRecord
|
||||
@@ -1,664 +0,0 @@
|
||||
# sybase_adapter.rb
|
||||
# Author: John R. Sheets
|
||||
#
|
||||
# 01 Mar 2006: Initial version. Based on code from Will Sobel
|
||||
# (http://dev.rubyonrails.org/ticket/2030)
|
||||
#
|
||||
# 17 Mar 2006: Added support for migrations; fixed issues with :boolean columns.
|
||||
#
|
||||
# 13 Apr 2006: Improved column type support to properly handle dates and user-defined
|
||||
# types; fixed quoting of integer columns.
|
||||
#
|
||||
# 05 Jan 2007: Updated for Rails 1.2 release:
|
||||
# restricted Fixtures#insert_fixtures monkeypatch to Sybase adapter;
|
||||
# removed SQL type precision from TEXT type to fix broken
|
||||
# ActiveRecordStore (jburks, #6878); refactored select() to use execute();
|
||||
# fixed leaked exception for no-op change_column(); removed verbose SQL dump
|
||||
# from columns(); added missing scale parameter in normalize_type().
|
||||
|
||||
require 'active_record/connection_adapters/abstract_adapter'
|
||||
|
||||
begin
|
||||
require 'sybsql'
|
||||
|
||||
module ActiveRecord
|
||||
class Base
|
||||
# Establishes a connection to the database that's used by all Active Record objects
|
||||
def self.sybase_connection(config) # :nodoc:
|
||||
config = config.symbolize_keys
|
||||
|
||||
username = config[:username] ? config[:username].to_s : 'sa'
|
||||
password = config[:password] ? config[:password].to_s : ''
|
||||
|
||||
if config.has_key?(:host)
|
||||
host = config[:host]
|
||||
else
|
||||
raise ArgumentError, "No database server name specified. Missing argument: host."
|
||||
end
|
||||
|
||||
if config.has_key?(:database)
|
||||
database = config[:database]
|
||||
else
|
||||
raise ArgumentError, "No database specified. Missing argument: database."
|
||||
end
|
||||
|
||||
ConnectionAdapters::SybaseAdapter.new(
|
||||
SybSQL.new({'S' => host, 'U' => username, 'P' => password},
|
||||
ConnectionAdapters::SybaseAdapterContext), database, config, logger)
|
||||
end
|
||||
end # class Base
|
||||
|
||||
module ConnectionAdapters
|
||||
|
||||
# ActiveRecord connection adapter for Sybase Open Client bindings
|
||||
# (see http://raa.ruby-lang.org/project/sybase-ctlib).
|
||||
#
|
||||
# Options:
|
||||
#
|
||||
# * <tt>:host</tt> -- The name of the database server. No default, must be provided.
|
||||
# * <tt>:database</tt> -- The name of the database. No default, must be provided.
|
||||
# * <tt>:username</tt> -- Defaults to "sa".
|
||||
# * <tt>:password</tt> -- Defaults to empty string.
|
||||
#
|
||||
# Usage Notes:
|
||||
#
|
||||
# * The sybase-ctlib bindings do not support the DATE SQL column type; use DATETIME instead.
|
||||
# * Table and column names are limited to 30 chars in Sybase 12.5
|
||||
# * :binary columns not yet supported
|
||||
# * :boolean columns use the BIT SQL type, which does not allow nulls or
|
||||
# indexes. If a DEFAULT is not specified for ALTER TABLE commands, the
|
||||
# column will be declared with DEFAULT 0 (false).
|
||||
#
|
||||
# Migrations:
|
||||
#
|
||||
# The Sybase adapter supports migrations, but for ALTER TABLE commands to
|
||||
# work, the database must have the database option 'select into' set to
|
||||
# 'true' with sp_dboption (see below). The sp_helpdb command lists the current
|
||||
# options for all databases.
|
||||
#
|
||||
# 1> use mydb
|
||||
# 2> go
|
||||
# 1> master..sp_dboption mydb, "select into", true
|
||||
# 2> go
|
||||
# 1> checkpoint
|
||||
# 2> go
|
||||
class SybaseAdapter < AbstractAdapter # :nodoc:
|
||||
class ColumnWithIdentity < Column
|
||||
attr_reader :identity
|
||||
|
||||
def initialize(name, default, sql_type = nil, nullable = nil, identity = nil, primary = nil)
|
||||
super(name, default, sql_type, nullable)
|
||||
@default, @identity, @primary = type_cast(default), identity, primary
|
||||
end
|
||||
|
||||
def simplified_type(field_type)
|
||||
case field_type
|
||||
when /int|bigint|smallint|tinyint/i then :integer
|
||||
when /float|double|real/i then :float
|
||||
when /decimal|money|numeric|smallmoney/i then :decimal
|
||||
when /text|ntext/i then :text
|
||||
when /binary|image|varbinary/i then :binary
|
||||
when /char|nchar|nvarchar|string|varchar/i then :string
|
||||
when /bit/i then :boolean
|
||||
when /datetime|smalldatetime/i then :datetime
|
||||
else super
|
||||
end
|
||||
end
|
||||
|
||||
def self.string_to_binary(value)
|
||||
"0x#{value.unpack("H*")[0]}"
|
||||
end
|
||||
|
||||
def self.binary_to_string(value)
|
||||
# FIXME: sybase-ctlib uses separate sql method for binary columns.
|
||||
value
|
||||
end
|
||||
end # class ColumnWithIdentity
|
||||
|
||||
# Sybase adapter
|
||||
def initialize(connection, database, config = {}, logger = nil)
|
||||
super(connection, logger)
|
||||
context = connection.context
|
||||
context.init(logger)
|
||||
@config = config
|
||||
@numconvert = config.has_key?(:numconvert) ? config[:numconvert] : true
|
||||
@limit = @offset = 0
|
||||
unless connection.sql_norow("USE #{database}")
|
||||
raise "Cannot USE #{database}"
|
||||
end
|
||||
end
|
||||
|
||||
def native_database_types
|
||||
{
|
||||
:primary_key => "numeric(9,0) IDENTITY PRIMARY KEY",
|
||||
:string => { :name => "varchar", :limit => 255 },
|
||||
:text => { :name => "text" },
|
||||
:integer => { :name => "int" },
|
||||
:float => { :name => "float", :limit => 8 },
|
||||
:decimal => { :name => "decimal" },
|
||||
:datetime => { :name => "datetime" },
|
||||
:timestamp => { :name => "timestamp" },
|
||||
:time => { :name => "time" },
|
||||
:date => { :name => "datetime" },
|
||||
:binary => { :name => "image"},
|
||||
:boolean => { :name => "bit" }
|
||||
}
|
||||
end
|
||||
|
||||
def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
|
||||
return super unless type.to_s == 'integer'
|
||||
if !limit.nil? && limit < 4
|
||||
'smallint'
|
||||
else
|
||||
'integer'
|
||||
end
|
||||
end
|
||||
|
||||
def adapter_name
|
||||
'Sybase'
|
||||
end
|
||||
|
||||
def active?
|
||||
!(@connection.connection.nil? || @connection.connection_dead?)
|
||||
end
|
||||
|
||||
def disconnect!
|
||||
@connection.close rescue nil
|
||||
end
|
||||
|
||||
def reconnect!
|
||||
raise "Sybase Connection Adapter does not yet support reconnect!"
|
||||
# disconnect!
|
||||
# connect! # Not yet implemented
|
||||
end
|
||||
|
||||
def table_alias_length
|
||||
30
|
||||
end
|
||||
|
||||
def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
|
||||
begin
|
||||
table_name = get_table_name(sql)
|
||||
col = get_identity_column(table_name)
|
||||
ii_enabled = false
|
||||
|
||||
if col != nil
|
||||
if query_contains_identity_column(sql, col)
|
||||
begin
|
||||
enable_identity_insert(table_name, true)
|
||||
ii_enabled = true
|
||||
rescue Exception => e
|
||||
raise ActiveRecordError, "IDENTITY_INSERT could not be turned ON"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
log(sql, name) do
|
||||
super || select_value("SELECT @@IDENTITY AS last_id")
|
||||
end
|
||||
ensure
|
||||
if ii_enabled
|
||||
begin
|
||||
enable_identity_insert(table_name, false)
|
||||
rescue Exception => e
|
||||
raise ActiveRecordError, "IDENTITY_INSERT could not be turned OFF"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def execute(sql, name = nil)
|
||||
raw_execute(sql, name)
|
||||
@connection.results[0].row_count
|
||||
end
|
||||
|
||||
def begin_db_transaction() raw_execute "BEGIN TRAN" end
|
||||
def commit_db_transaction() raw_execute "COMMIT TRAN" end
|
||||
def rollback_db_transaction() raw_execute "ROLLBACK TRAN" end
|
||||
|
||||
def current_database
|
||||
select_value("select DB_NAME() as name")
|
||||
end
|
||||
|
||||
def tables(name = nil)
|
||||
select("select name from sysobjects where type='U'", name).map { |row| row['name'] }
|
||||
end
|
||||
|
||||
def indexes(table_name, name = nil)
|
||||
select("exec sp_helpindex #{table_name}", name).map do |index|
|
||||
unique = index["index_description"] =~ /unique/
|
||||
primary = index["index_description"] =~ /^clustered/
|
||||
if !primary
|
||||
cols = index["index_keys"].split(", ").each { |col| col.strip! }
|
||||
IndexDefinition.new(table_name, index["index_name"], unique, cols)
|
||||
end
|
||||
end.compact
|
||||
end
|
||||
|
||||
def columns(table_name, name = nil)
|
||||
sql = <<SQLTEXT
|
||||
SELECT col.name AS name, type.name AS type, col.prec, col.scale,
|
||||
col.length, col.status, obj.sysstat2, def.text
|
||||
FROM sysobjects obj, syscolumns col, systypes type, syscomments def
|
||||
WHERE obj.id = col.id AND col.usertype = type.usertype AND type.name != 'timestamp'
|
||||
AND col.cdefault *= def.id AND obj.type = 'U' AND obj.name = '#{table_name}' ORDER BY col.colid
|
||||
SQLTEXT
|
||||
@logger.debug "Get Column Info for table '#{table_name}'" if @logger
|
||||
@connection.set_rowcount(0)
|
||||
@connection.sql(sql)
|
||||
|
||||
raise "SQL Command for table_structure for #{table_name} failed\nMessage: #{@connection.context.message}" if @connection.context.failed?
|
||||
return nil if @connection.cmd_fail?
|
||||
|
||||
@connection.top_row_result.rows.map do |row|
|
||||
name, type, prec, scale, length, status, sysstat2, default = row
|
||||
name.sub!(/_$/o, '')
|
||||
type = normalize_type(type, prec, scale, length)
|
||||
default_value = nil
|
||||
if default =~ /DEFAULT\s+(.+)/o
|
||||
default_value = $1.strip
|
||||
default_value = default_value[1...-1] if default_value =~ /^['"]/o
|
||||
end
|
||||
nullable = (status & 8) == 8
|
||||
identity = status >= 128
|
||||
primary = (sysstat2 & 8) == 8
|
||||
ColumnWithIdentity.new(name, default_value, type, nullable, identity, primary)
|
||||
end
|
||||
end
|
||||
|
||||
def quoted_true
|
||||
"1"
|
||||
end
|
||||
|
||||
def quoted_false
|
||||
"0"
|
||||
end
|
||||
|
||||
def quote(value, column = nil)
|
||||
return value.quoted_id if value.respond_to?(:quoted_id)
|
||||
|
||||
case value
|
||||
when String
|
||||
if column && column.type == :binary && column.class.respond_to?(:string_to_binary)
|
||||
"#{quote_string(column.class.string_to_binary(value))}"
|
||||
elsif @numconvert && force_numeric?(column) && value =~ /^[+-]?[0-9]+$/o
|
||||
value
|
||||
else
|
||||
"'#{quote_string(value)}'"
|
||||
end
|
||||
when NilClass then (column && column.type == :boolean) ? '0' : "NULL"
|
||||
when TrueClass then '1'
|
||||
when FalseClass then '0'
|
||||
when Float, Fixnum, Bignum then force_numeric?(column) ? value.to_s : "'#{value.to_s}'"
|
||||
else
|
||||
if value.acts_like?(:time)
|
||||
"'#{value.strftime("%Y-%m-%d %H:%M:%S")}'"
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# True if column is explicitly declared non-numeric, or
|
||||
# if column is nil (not specified).
|
||||
def force_numeric?(column)
|
||||
(column.nil? || [:integer, :float, :decimal].include?(column.type))
|
||||
end
|
||||
|
||||
def quote_string(s)
|
||||
s.gsub(/'/, "''") # ' (for ruby-mode)
|
||||
end
|
||||
|
||||
def quote_column_name(name)
|
||||
# If column name is close to max length, skip the quotes, since they
|
||||
# seem to count as part of the length.
|
||||
((name.to_s.length + 2) <= table_alias_length) ? "[#{name}]" : name.to_s
|
||||
end
|
||||
|
||||
def add_limit_offset!(sql, options) # :nodoc:
|
||||
@limit = options[:limit]
|
||||
@offset = options[:offset]
|
||||
if use_temp_table?
|
||||
# Use temp table to hack offset with Sybase
|
||||
sql.sub!(/ FROM /i, ' INTO #artemp FROM ')
|
||||
elsif zero_limit?
|
||||
# "SET ROWCOUNT 0" turns off limits, so we have
|
||||
# to use a cheap trick.
|
||||
if sql =~ /WHERE/i
|
||||
sql.sub!(/WHERE/i, 'WHERE 1 = 2 AND ')
|
||||
elsif sql =~ /ORDER\s+BY/i
|
||||
sql.sub!(/ORDER\s+BY/i, 'WHERE 1 = 2 ORDER BY')
|
||||
else
|
||||
sql << 'WHERE 1 = 2'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def add_lock!(sql, options) #:nodoc:
|
||||
@logger.info "Warning: Sybase :lock option '#{options[:lock].inspect}' not supported" if @logger && options.has_key?(:lock)
|
||||
sql
|
||||
end
|
||||
|
||||
def supports_migrations? #:nodoc:
|
||||
true
|
||||
end
|
||||
|
||||
def rename_table(name, new_name)
|
||||
execute "EXEC sp_rename '#{name}', '#{new_name}'"
|
||||
end
|
||||
|
||||
def rename_column(table, column, new_column_name)
|
||||
execute "EXEC sp_rename '#{table}.#{column}', '#{new_column_name}'"
|
||||
end
|
||||
|
||||
def change_column(table_name, column_name, type, options = {}) #:nodoc:
|
||||
begin
|
||||
execute "ALTER TABLE #{table_name} MODIFY #{column_name} #{type_to_sql(type, options[:limit])}"
|
||||
rescue StatementInvalid => e
|
||||
# Swallow exception and reset context if no-op.
|
||||
raise e unless e.message =~ /no columns to drop, add or modify/
|
||||
@connection.context.reset
|
||||
end
|
||||
|
||||
if options.has_key?(:default)
|
||||
remove_default_constraint(table_name, column_name)
|
||||
execute "ALTER TABLE #{table_name} REPLACE #{column_name} DEFAULT #{quote options[:default]}"
|
||||
end
|
||||
end
|
||||
|
||||
def remove_column(table_name, column_name)
|
||||
remove_default_constraint(table_name, column_name)
|
||||
execute "ALTER TABLE #{table_name} DROP #{column_name}"
|
||||
end
|
||||
|
||||
def remove_default_constraint(table_name, column_name)
|
||||
sql = "select def.name from sysobjects def, syscolumns col, sysobjects tab where col.cdefault = def.id and col.name = '#{column_name}' and tab.name = '#{table_name}' and col.id = tab.id"
|
||||
select(sql).each do |constraint|
|
||||
execute "ALTER TABLE #{table_name} DROP CONSTRAINT #{constraint["name"]}"
|
||||
end
|
||||
end
|
||||
|
||||
def remove_index(table_name, options = {})
|
||||
execute "DROP INDEX #{table_name}.#{index_name(table_name, options)}"
|
||||
end
|
||||
|
||||
def add_column_options!(sql, options) #:nodoc:
|
||||
sql << " DEFAULT #{quote(options[:default], options[:column])}" if options_include_default?(options)
|
||||
|
||||
if check_null_for_column?(options[:column], sql)
|
||||
sql << (options[:null] == false ? " NOT NULL" : " NULL")
|
||||
end
|
||||
sql
|
||||
end
|
||||
|
||||
def enable_identity_insert(table_name, enable = true)
|
||||
if has_identity_column(table_name)
|
||||
execute "SET IDENTITY_INSERT #{table_name} #{enable ? 'ON' : 'OFF'}"
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
def check_null_for_column?(col, sql)
|
||||
# Sybase columns are NOT NULL by default, so explicitly set NULL
|
||||
# if :null option is omitted. Disallow NULLs for boolean.
|
||||
type = col.nil? ? "" : col[:type]
|
||||
|
||||
# Ignore :null if a primary key
|
||||
return false if type =~ /PRIMARY KEY/i
|
||||
|
||||
# Ignore :null if a :boolean or BIT column
|
||||
if (sql =~ /\s+bit(\s+DEFAULT)?/i) || type == :boolean
|
||||
# If no default clause found on a boolean column, add one.
|
||||
sql << " DEFAULT 0" if $1.nil?
|
||||
return false
|
||||
end
|
||||
true
|
||||
end
|
||||
|
||||
# Return the last value of the identity global value.
|
||||
def last_insert_id
|
||||
@connection.sql("SELECT @@IDENTITY")
|
||||
unless @connection.cmd_fail?
|
||||
id = @connection.top_row_result.rows.first.first
|
||||
if id
|
||||
id = id.to_i
|
||||
id = nil if id == 0
|
||||
end
|
||||
else
|
||||
id = nil
|
||||
end
|
||||
id
|
||||
end
|
||||
|
||||
def affected_rows(name = nil)
|
||||
@connection.sql("SELECT @@ROWCOUNT")
|
||||
unless @connection.cmd_fail?
|
||||
count = @connection.top_row_result.rows.first.first
|
||||
count = count.to_i if count
|
||||
else
|
||||
0
|
||||
end
|
||||
end
|
||||
|
||||
# If limit is not set at all, we can ignore offset;
|
||||
# if limit *is* set but offset is zero, use normal select
|
||||
# with simple SET ROWCOUNT. Thus, only use the temp table
|
||||
# if limit is set and offset > 0.
|
||||
def use_temp_table?
|
||||
!@limit.nil? && !@offset.nil? && @offset > 0
|
||||
end
|
||||
|
||||
def zero_limit?
|
||||
!@limit.nil? && @limit == 0
|
||||
end
|
||||
|
||||
def raw_execute(sql, name = nil)
|
||||
log(sql, name) do
|
||||
@connection.context.reset
|
||||
@logger.debug "Setting row count to (#{@limit})" if @logger && @limit
|
||||
@connection.set_rowcount(@limit || 0)
|
||||
if sql =~ /^\s*SELECT/i
|
||||
@connection.sql(sql)
|
||||
else
|
||||
@connection.sql_norow(sql)
|
||||
end
|
||||
@limit = @offset = nil
|
||||
if @connection.cmd_fail? or @connection.context.failed?
|
||||
raise "SQL Command Failed for #{name}: #{sql}\nMessage: #{@connection.context.message}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Select limit number of rows starting at optional offset.
|
||||
def select(sql, name = nil)
|
||||
if !use_temp_table?
|
||||
execute(sql, name)
|
||||
else
|
||||
log(sql, name) do
|
||||
# Select into a temp table and prune results
|
||||
@logger.debug "Selecting #{@limit + (@offset || 0)} or fewer rows into #artemp" if @logger
|
||||
@connection.context.reset
|
||||
@connection.set_rowcount(@limit + (@offset || 0))
|
||||
@connection.sql_norow(sql) # Select into temp table
|
||||
@logger.debug "Deleting #{@offset || 0} or fewer rows from #artemp" if @logger
|
||||
@connection.set_rowcount(@offset || 0)
|
||||
@connection.sql_norow("delete from #artemp") # Delete leading rows
|
||||
@connection.set_rowcount(0)
|
||||
@connection.sql("select * from #artemp") # Return the rest
|
||||
end
|
||||
end
|
||||
|
||||
raise StatementInvalid, "SQL Command Failed for #{name}: #{sql}\nMessage: #{@connection.context.message}" if @connection.context.failed? or @connection.cmd_fail?
|
||||
|
||||
rows = []
|
||||
results = @connection.top_row_result
|
||||
if results && results.rows.length > 0
|
||||
fields = results.columns.map { |column| column.sub(/_$/, '') }
|
||||
results.rows.each do |row|
|
||||
hashed_row = {}
|
||||
row.zip(fields) { |cell, column| hashed_row[column] = cell }
|
||||
rows << hashed_row
|
||||
end
|
||||
end
|
||||
@connection.sql_norow("drop table #artemp") if use_temp_table?
|
||||
@limit = @offset = nil
|
||||
rows
|
||||
end
|
||||
|
||||
def get_table_name(sql)
|
||||
if sql =~ /^\s*insert\s+into\s+([^\(\s]+)\s*|^\s*update\s+([^\(\s]+)\s*/i
|
||||
$1
|
||||
elsif sql =~ /from\s+([^\(\s]+)\s*/i
|
||||
$1
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def has_identity_column(table_name)
|
||||
!get_identity_column(table_name).nil?
|
||||
end
|
||||
|
||||
def get_identity_column(table_name)
|
||||
@id_columns ||= {}
|
||||
if !@id_columns.has_key?(table_name)
|
||||
@logger.debug "Looking up identity column for table '#{table_name}'" if @logger
|
||||
col = columns(table_name).detect { |col| col.identity }
|
||||
@id_columns[table_name] = col.nil? ? nil : col.name
|
||||
end
|
||||
@id_columns[table_name]
|
||||
end
|
||||
|
||||
def query_contains_identity_column(sql, col)
|
||||
sql =~ /\[#{col}\]/
|
||||
end
|
||||
|
||||
# Resolve all user-defined types (udt) to their fundamental types.
|
||||
def resolve_type(field_type)
|
||||
(@udts ||= {})[field_type] ||= select_one("sp_help #{field_type}")["Storage_type"].strip
|
||||
end
|
||||
|
||||
def normalize_type(field_type, prec, scale, length)
|
||||
has_scale = (!scale.nil? && scale > 0)
|
||||
type = if field_type =~ /numeric/i and !has_scale
|
||||
'int'
|
||||
elsif field_type =~ /money/i
|
||||
'numeric'
|
||||
else
|
||||
resolve_type(field_type.strip)
|
||||
end
|
||||
|
||||
spec = if prec
|
||||
has_scale ? "(#{prec},#{scale})" : "(#{prec})"
|
||||
elsif length && !(type =~ /date|time|text/)
|
||||
"(#{length})"
|
||||
else
|
||||
''
|
||||
end
|
||||
"#{type}#{spec}"
|
||||
end
|
||||
end # class SybaseAdapter
|
||||
|
||||
class SybaseAdapterContext < SybSQLContext
|
||||
DEADLOCK = 1205
|
||||
attr_reader :message
|
||||
|
||||
def init(logger = nil)
|
||||
@deadlocked = false
|
||||
@failed = false
|
||||
@logger = logger
|
||||
@message = nil
|
||||
end
|
||||
|
||||
def srvmsgCB(con, msg)
|
||||
# Do not log change of context messages.
|
||||
if msg['severity'] == 10 or msg['severity'] == 0
|
||||
return true
|
||||
end
|
||||
|
||||
if msg['msgnumber'] == DEADLOCK
|
||||
@deadlocked = true
|
||||
else
|
||||
@logger.info "SQL Command failed!" if @logger
|
||||
@failed = true
|
||||
end
|
||||
|
||||
if @logger
|
||||
@logger.error "** SybSQLContext Server Message: **"
|
||||
@logger.error " Message number #{msg['msgnumber']} Severity #{msg['severity']} State #{msg['state']} Line #{msg['line']}"
|
||||
@logger.error " Server #{msg['srvname']}"
|
||||
@logger.error " Procedure #{msg['proc']}"
|
||||
@logger.error " Message String: #{msg['text']}"
|
||||
end
|
||||
|
||||
@message = msg['text']
|
||||
|
||||
true
|
||||
end
|
||||
|
||||
def deadlocked?
|
||||
@deadlocked
|
||||
end
|
||||
|
||||
def failed?
|
||||
@failed
|
||||
end
|
||||
|
||||
def reset
|
||||
@deadlocked = false
|
||||
@failed = false
|
||||
@message = nil
|
||||
end
|
||||
|
||||
def cltmsgCB(con, msg)
|
||||
return true unless ( msg.kind_of?(Hash) )
|
||||
unless ( msg[ "severity" ] ) then
|
||||
return true
|
||||
end
|
||||
|
||||
if @logger
|
||||
@logger.error "** SybSQLContext Client-Message: **"
|
||||
@logger.error " Message number: LAYER=#{msg[ 'layer' ]} ORIGIN=#{msg[ 'origin' ]} SEVERITY=#{msg[ 'severity' ]} NUMBER=#{msg[ 'number' ]}"
|
||||
@logger.error " Message String: #{msg['msgstring']}"
|
||||
@logger.error " OS Error: #{msg['osstring']}"
|
||||
|
||||
@message = msg['msgstring']
|
||||
end
|
||||
|
||||
@failed = true
|
||||
|
||||
# Not retry , CS_CV_RETRY_FAIL( probability TimeOut )
|
||||
if( msg[ 'severity' ] == "RETRY_FAIL" ) then
|
||||
@timeout_p = true
|
||||
return false
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
end # class SybaseAdapterContext
|
||||
|
||||
end # module ConnectionAdapters
|
||||
end # module ActiveRecord
|
||||
|
||||
|
||||
# Allow identity inserts for fixtures.
|
||||
require "active_record/fixtures"
|
||||
class Fixtures
|
||||
alias :original_insert_fixtures :insert_fixtures
|
||||
|
||||
def insert_fixtures
|
||||
if @connection.instance_of?(ActiveRecord::ConnectionAdapters::SybaseAdapter)
|
||||
values.each do |fixture|
|
||||
@connection.enable_identity_insert(table_name, true)
|
||||
@connection.execute "INSERT INTO #{@table_name} (#{fixture.key_list}) VALUES (#{fixture.value_list})", 'Fixture Insert'
|
||||
@connection.enable_identity_insert(table_name, false)
|
||||
end
|
||||
else
|
||||
original_insert_fixtures
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
rescue LoadError => cannot_require_sybase
|
||||
# Couldn't load sybase adapter
|
||||
end
|
||||
Reference in New Issue
Block a user