55require "active_storage/service/db_service_rails70"
66
77module ActiveStorage
8- # Wraps a DB table as an Active Storage service. See ActiveStorage::Service
9- # for the generic API documentation that applies to all services.
108 class Service ::DBService < Service
11- # :nocov:
12- if Rails ::VERSION ::MAJOR >= 7
13- include ActiveStorage ::DBServiceRails70
14- elsif Rails ::VERSION ::MAJOR == 6 && Rails ::VERSION ::MINOR == 1
15- include ActiveStorage ::DBServiceRails61
16- else
17- include ActiveStorage ::DBServiceRails60
18- end
19- # :nocov:
9+ include ActiveStorage ::DBServiceRails70
10+
11+ DEFAULT_RETRY_OPTIONS = {
12+ max_attempts : 3 ,
13+ base_delay : 0.1 ,
14+ max_delay : 2.0 ,
15+ retryable_errors : [
16+ ActiveRecord ::ConnectionFailed ,
17+ ActiveRecord ::StatementTimeout
18+ ] . freeze
19+ } . freeze
2020
2121 MINIMUM_CHUNK_SIZE = 1
2222
23- def initialize ( public : false , **)
24- @chunk_size = [ ENV . fetch ( "ASDB_CHUNK_SIZE" ) { 1 . megabytes } . to_i , MINIMUM_CHUNK_SIZE ] . max
23+ def initialize ( public : false , chunk_size : nil , **)
24+ @chunk_size = [ chunk_size || ENV . fetch ( "ASDB_CHUNK_SIZE" ) { 1 . megabyte } . to_i , MINIMUM_CHUNK_SIZE ] . max
2525 @max_size = ENV . fetch ( "ASDB_MAX_FILE_SIZE" , nil ) &.to_i
2626 @public = public
2727 end
@@ -37,7 +37,7 @@ def upload(key, io, checksum: nil, **)
3737 digest = Digest ::MD5 . base64digest ( data )
3838 raise ActiveStorage ::IntegrityError unless digest == checksum
3939 end
40- ::ActiveStorageDB ::File . create! ( ref : key , data : data )
40+ retry_on_failure { ::ActiveStorageDB ::File . create! ( ref : key , data : data ) }
4141 end
4242 end
4343
@@ -55,33 +55,33 @@ def download(key, &block)
5555
5656 def download_chunk ( key , range )
5757 instrument :download_chunk , key : key , range : range do
58- # NOTE: from/size are derived from Range#begin and Range#size (always integers),
59- # so string interpolation into SQL is safe here.
60- from = range . begin + 1
61- size = range . size
62- args = adapter_sqlserver? || adapter_sqlite? ? "data, #{ from } , #{ size } " : "data FROM #{ from } FOR #{ size } "
63- record = object_for ( key , fields : "SUBSTRING(#{ args } ) AS chunk" )
64- raise ActiveStorage ::FileNotFoundError unless record
65-
66- record . chunk
58+ chunk = if adapter_postgresql? && @chunk_size >= 1 . megabyte
59+ pg_read_binary ( key , range )
60+ else
61+ sql_chunk ( key , range )
62+ end
63+ raise ActiveStorage ::FileNotFoundError unless chunk
64+
65+ chunk
6766 end
6867 end
6968
7069 def delete ( key )
7170 instrument :delete , key : key do
7271 comment = "DBService#delete"
73- record = ::ActiveStorageDB ::File . annotate ( comment ) . find_by ( ref : key )
74- record &.destroy
75- # NOTE: Ignore files already deleted
76- !record . nil?
72+ retry_on_failure do
73+ ::ActiveStorageDB ::File . annotate ( comment ) . where ( ref : key ) . delete > 0
74+ end
7775 end
7876 end
7977
8078 def delete_prefixed ( prefix )
8179 instrument :delete_prefixed , prefix : prefix do
8280 comment = "DBService#delete_prefixed"
8381 sanitized_prefix = "#{ ActiveRecord ::Base . sanitize_sql_like ( prefix ) } %"
84- ::ActiveStorageDB ::File . annotate ( comment ) . where ( "ref LIKE ?" , sanitized_prefix ) . destroy_all
82+ retry_on_failure do
83+ ::ActiveStorageDB ::File . annotate ( comment ) . where ( "ref LIKE ?" , sanitized_prefix ) . delete_all
84+ end
8585 end
8686 end
8787
@@ -120,20 +120,61 @@ def headers_for_direct_upload(_key, content_type:, **)
120120
121121 private
122122
123+ def retry_options
124+ @retry_options ||= {
125+ max_attempts : 3 ,
126+ base_delay : 0.1 ,
127+ max_delay : 2.0 ,
128+ retryable_errors : default_retryable_errors
129+ }
130+ end
131+
132+ def retry_on_failure
133+ attempts = 0
134+ max_attempts = retry_options [ :max_attempts ]
135+ base_delay = retry_options [ :base_delay ]
136+ max_delay = retry_options [ :max_delay ]
137+ retryable_errors = retry_options [ :retryable_errors ]
138+
139+ begin
140+ yield
141+ rescue *retryable_errors
142+ attempts += 1
143+ raise if attempts >= max_attempts
144+
145+ delay = [ base_delay * ( 2 **attempts ) , max_delay ] . min
146+ sleep ( delay )
147+ retry
148+ end
149+ end
150+
151+ def default_retryable_errors
152+ errors = [
153+ ActiveRecord ::ConnectionFailed ,
154+ ActiveRecord ::StatementTimeout
155+ ]
156+ errors << PG ::ConnectionBad if defined? ( PG ::ConnectionBad )
157+ errors
158+ end
159+
123160 def service_name_for_token
124161 name . presence || "db"
125162 end
126163
127164 def adapter_sqlite?
128- return @adapter_sqlite if defined? ( @adapter_sqlite )
129-
130- @adapter_sqlite = active_storage_db_adapter_name == "SQLite"
165+ @adapter_sqlite ||= active_storage_db_adapter_name == "SQLite"
131166 end
132167
133168 def adapter_sqlserver?
134- return @adapter_sqlserver if defined? ( @adapter_sqlserver )
169+ @adapter_sqlserver ||= active_storage_db_adapter_name == "SQLServer"
170+ end
135171
136- @adapter_sqlserver = active_storage_db_adapter_name == "SQLServer"
172+ def adapter_postgresql?
173+ @adapter_postgresql ||= active_storage_db_adapter_name == "PostgreSQL"
174+ end
175+
176+ def adapter_mysql?
177+ @adapter_mysql ||= active_storage_db_adapter_name == "Mysql2"
137178 end
138179
139180 def active_storage_db_adapter_name
@@ -191,6 +232,23 @@ def stream(key)
191232 end
192233 end
193234
235+ def sql_chunk ( key , range )
236+ from = range . begin + 1
237+ size = range . size
238+ args = adapter_sqlserver? || adapter_sqlite? ? "data, #{ from } , #{ size } " : "data FROM #{ from } FOR #{ size } "
239+ record = object_for ( key , fields : "SUBSTRING(#{ args } ) AS chunk" )
240+ record &.chunk
241+ end
242+
243+ def pg_read_binary ( key , range )
244+ from = range . begin + 1
245+ size = range . size
246+ comment = "DBService#pg_read_binary"
247+ ::ActiveStorageDB ::File . annotate ( comment ) . where ( ref : key ) . pick ( "get_byte(data, (#{ from } - 1) + generate_series(0, #{ size } - 1))" )
248+ rescue ActiveRecord ::StatementInvalid
249+ sql_chunk ( key , range )
250+ end
251+
194252 def data_size
195253 if adapter_sqlserver?
196254 "DATALENGTH(data) AS size"
0 commit comments