diff --git a/app/assets/stylesheets/components.css b/app/assets/stylesheets/components.css index 7deccd8..bcd33b6 100644 --- a/app/assets/stylesheets/components.css +++ b/app/assets/stylesheets/components.css @@ -274,7 +274,7 @@ } /* ═══════════════════════════════════════════════════════════════════════════ - CONNECTED NOTES + Connected Documents Card grid replacing the old backlinks lists. Each card shows a linked document title + plain-text excerpt. Clicking a card opens a popup with the full rich-text preview. diff --git a/app/jobs/storage_migration_job.rb b/app/jobs/storage_migration_job.rb index bc382bf..0421dbf 100644 --- a/app/jobs/storage_migration_job.rb +++ b/app/jobs/storage_migration_job.rb @@ -28,11 +28,7 @@ def perform(migration_id) end migration.reload - if migration.cancelled? - # Already marked cancelled - elsif migration.failed_items.positive? - migration.update!(status: "completed", completed_at: Time.current) - else + unless migration.cancelled? migration.update!(status: "completed", completed_at: Time.current) end rescue => e @@ -82,8 +78,7 @@ def migrate_backup(record, from_adapter, to_adapter, migration) end def build_adapter(provider) - case provider - when "local" + if provider == "local" StorageAdapter::Local.new else setting = StorageSetting.active_setting @@ -92,18 +87,7 @@ def build_adapter(provider) oauth = OAuthManager.new setting = oauth.ensure_fresh_token!(setting) if oauth.oauth_provider?(setting.provider) - case provider - when "s3" - StorageAdapter::S3.new(config: setting.config_data) - when "dropbox" - StorageAdapter::Dropbox.new(config: setting.config_data) - when "google_drive" - StorageAdapter::GoogleDrive.new(config: setting.config_data) - when "onedrive" - StorageAdapter::OneDrive.new(config: setting.config_data) - else - StorageAdapter::Local.new - end + StorageAdapter.build(provider, setting.config_data) end end diff --git a/app/services/oauth_manager.rb b/app/services/oauth_manager.rb index 124b101..981dbc8 100644 --- a/app/services/oauth_manager.rb +++ b/app/services/oauth_manager.rb @@ -34,7 +34,7 @@ def authorize_url(provider, redirect_uri:) config = provider_config!(provider) params = { - client_id: client_id(config), + client_id: env_credential(config[:client_id_env]), redirect_uri: redirect_uri, response_type: "code" } @@ -66,8 +66,8 @@ def handle_callback(provider, code:, redirect_uri:) grant_type: "authorization_code", code: code, redirect_uri: redirect_uri, - client_id: client_id(config), - client_secret: client_secret(config) + client_id: env_credential(config[:client_id_env]), + client_secret: env_credential(config[:client_secret_env]) } ) @@ -89,8 +89,8 @@ def refresh_access_token(provider, refresh_token:) form: { grant_type: "refresh_token", refresh_token: refresh_token, - client_id: client_id(config), - client_secret: client_secret(config) + client_id: env_credential(config[:client_id_env]), + client_secret: env_credential(config[:client_secret_env]) } ) @@ -152,16 +152,10 @@ def provider_config!(provider) PROVIDERS[key] end - def client_id(config) - ENV.fetch(config[:client_id_env]) + def env_credential(env_key) + ENV.fetch(env_key) rescue KeyError - raise ConfigurationError, "Missing ENV variable #{config[:client_id_env]}. Set it in your .env file or Docker environment." - end - - def client_secret(config) - ENV.fetch(config[:client_secret_env]) - rescue KeyError - raise ConfigurationError, "Missing ENV variable #{config[:client_secret_env]}. Set it in your .env file or Docker environment." + raise ConfigurationError, "Missing ENV variable #{env_key}. Set it in your .env file or Docker environment." end def http_client diff --git a/app/services/storage_adapter/base.rb b/app/services/storage_adapter/base.rb index d31c5c6..dddb310 100644 --- a/app/services/storage_adapter/base.rb +++ b/app/services/storage_adapter/base.rb @@ -1,4 +1,11 @@ module StorageAdapter + ADAPTER_CLASSES = { + "s3" => "S3", + "dropbox" => "Dropbox", + "google_drive" => "GoogleDrive", + "onedrive" => "OneDrive" + }.freeze + def self.resolve setting = StorageSetting.active_setting rescue nil @@ -13,22 +20,19 @@ def self.resolve # Refresh OAuth tokens if expired oauth = OAuthManager.new setting = oauth.ensure_fresh_token!(setting) if oauth.oauth_provider?(setting.provider) - - case setting.provider - when "s3" - S3.new(config: setting.config_data) - when "dropbox" - Dropbox.new(config: setting.config_data) - when "google_drive" - GoogleDrive.new(config: setting.config_data) - when "onedrive" - OneDrive.new(config: setting.config_data) - else - Local.new - end + build(setting.provider, setting.config_data) end end + def self.build(provider, config_data = {}) + klass_name = ADAPTER_CLASSES[provider] + return Local.new unless klass_name + + const_get(klass_name).new(config: config_data) + end + + class ApiError < StandardError; end + class Base def upload(file_path, key, namespace: :files) raise NotImplementedError, "#{self.class}#upload must be implemented" @@ -46,6 +50,10 @@ def list(namespace: :files) raise NotImplementedError, "#{self.class}#list must be implemented" end + def exist?(key, namespace: :files) + list(namespace: namespace).include?(key) + end + def url(key, namespace: :files, expires_in: 1.hour) raise NotImplementedError, "#{self.class}#url must be implemented" end @@ -53,5 +61,11 @@ def url(key, namespace: :files, expires_in: 1.hour) def test_connection raise NotImplementedError, "#{self.class}#test_connection must be implemented" end + + private + + def auth_client(timeout: 30) + HTTP.timeout(timeout).auth("Bearer #{@access_token}") + end end end diff --git a/app/services/storage_adapter/dropbox.rb b/app/services/storage_adapter/dropbox.rb index 739e987..b909339 100644 --- a/app/services/storage_adapter/dropbox.rb +++ b/app/services/storage_adapter/dropbox.rb @@ -5,7 +5,8 @@ class Dropbox < Base ROOT_FOLDER = "/Apps/Inbox".freeze def initialize(config: {}) - @access_token = config["access_token"] || config[:access_token] + config = config.with_indifferent_access + @access_token = config[:access_token] end def upload(file_path, key, namespace: :files) @@ -59,6 +60,15 @@ def list(namespace: :files) raise end + def exist?(key, namespace: :files) + path = dropbox_path(key, namespace) + api_request("/files/get_metadata", path: path) + true + rescue ApiError => e + return false if e.message.include?("path/not_found") + raise + end + def url(key, namespace: :files, expires_in: 1.hour) path = dropbox_path(key, namespace) body = api_request("/files/get_temporary_link", path: path) @@ -85,8 +95,6 @@ def test_connection { ok: false, error: e.message } end - class ApiError < StandardError; end - private def dropbox_path(key, namespace) @@ -94,15 +102,18 @@ def dropbox_path(key, namespace) end def ensure_folder_exists!(namespace) + return if @created_folders&.include?(namespace) + path = "#{ROOT_FOLDER}/#{namespace}" api_request("/files/create_folder_v2", path: path, autorename: false) rescue ApiError => e raise unless e.message.include?("path/conflict") + ensure + (@created_folders ||= Set.new) << namespace end def api_request(endpoint, **params) - response = HTTP.timeout(30) - .auth("Bearer #{@access_token}") + response = auth_client .headers("Content-Type" => "application/json") .post("#{BASE_URL}#{endpoint}", body: params.to_json) @@ -117,8 +128,7 @@ def api_request(endpoint, **params) end def content_request(endpoint, body:, api_arg:) - response = HTTP.timeout(60) - .auth("Bearer #{@access_token}") + response = auth_client(timeout: 60) .headers( "Content-Type" => "application/octet-stream", "Dropbox-API-Arg" => api_arg.to_json @@ -135,8 +145,7 @@ def content_request(endpoint, body:, api_arg:) end def content_download(endpoint, api_arg:) - response = HTTP.timeout(60) - .auth("Bearer #{@access_token}") + response = auth_client(timeout: 60) .headers("Dropbox-API-Arg" => api_arg.to_json) .post("#{CONTENT_URL}#{endpoint}") diff --git a/app/services/storage_adapter/google_drive.rb b/app/services/storage_adapter/google_drive.rb index 062172e..168baea 100644 --- a/app/services/storage_adapter/google_drive.rb +++ b/app/services/storage_adapter/google_drive.rb @@ -6,8 +6,9 @@ class GoogleDrive < Base FOLDER_MIME = "application/vnd.google-apps.folder".freeze def initialize(config: {}) - @access_token = config["access_token"] || config[:access_token] - @folder_ids = config["folder_ids"] || config[:folder_ids] || {} + config = config.with_indifferent_access + @access_token = config[:access_token] + @folder_ids = (config[:folder_ids] || {}).stringify_keys end def upload(file_path, key, namespace: :files) @@ -44,8 +45,7 @@ def delete(key, namespace: :files) file = find_file(key, folder_id) return unless file - HTTP.timeout(30) - .auth("Bearer #{@access_token}") + auth_client .delete("#{API_URL}/files/#{file["id"]}") end @@ -72,6 +72,12 @@ def list(namespace: :files) results end + def exist?(key, namespace: :files) + folder_id = namespace_folder_id(namespace) + return false unless folder_id + !!find_file(key, folder_id) + end + def url(key, namespace: :files, expires_in: 1.hour) folder_id = namespace_folder_id(namespace) file = find_file(key, folder_id) @@ -95,8 +101,7 @@ def test_connection raise ApiError, "Test file content mismatch" unless response.body.to_s == "ok" # Delete it - HTTP.timeout(30) - .auth("Bearer #{@access_token}") + auth_client .delete("#{API_URL}/files/#{file["id"]}") { ok: true } @@ -109,8 +114,6 @@ def folder_ids @folder_ids.dup end - class ApiError < StandardError; end - private def namespace_folder_id(namespace) @@ -130,15 +133,15 @@ def ensure_namespace_folder!(namespace) def ensure_folder!(name, parent_id) # Search for existing folder - query = "name = '#{name}' and '#{parent_id}' in parents and mimeType = '#{FOLDER_MIME}' and trashed = false" + escaped_name = name.gsub("'", "\\\\'") + query = "name = '#{escaped_name}' and '#{parent_id}' in parents and mimeType = '#{FOLDER_MIME}' and trashed = false" body = api_json("/files", params: { q: query, fields: "files(id,name)", pageSize: 1 }) files = body["files"] || [] return files.first["id"] if files.any? # Create folder metadata = { name: name, mimeType: FOLDER_MIME, parents: [ parent_id ] } - response = HTTP.timeout(30) - .auth("Bearer #{@access_token}") + response = auth_client .headers("Content-Type" => "application/json") .post("#{API_URL}/files", body: metadata.to_json) @@ -149,7 +152,8 @@ def ensure_folder!(name, parent_id) def find_file(name, folder_id) return nil unless folder_id - query = "name = '#{name}' and '#{folder_id}' in parents and trashed = false" + escaped_name = name.gsub("'", "\\\\'") + query = "name = '#{escaped_name}' and '#{folder_id}' in parents and trashed = false" body = api_json("/files", params: { q: query, fields: "files(id,name)", pageSize: 1 }) (body["files"] || []).first end @@ -161,8 +165,7 @@ def create_file(name, file_path, folder_id, content: nil) boundary = "StorageAdapter#{SecureRandom.hex(8)}" multipart = build_multipart(boundary, metadata, body) - response = HTTP.timeout(60) - .auth("Bearer #{@access_token}") + response = auth_client(timeout: 60) .headers("Content-Type" => "multipart/related; boundary=#{boundary}") .post("#{UPLOAD_URL}/files?uploadType=multipart", body: multipart) @@ -172,8 +175,7 @@ def create_file(name, file_path, folder_id, content: nil) def update_file(file_id, file_path) body = File.binread(file_path) - response = HTTP.timeout(60) - .auth("Bearer #{@access_token}") + response = auth_client(timeout: 60) .headers("Content-Type" => "application/octet-stream") .patch("#{UPLOAD_URL}/files/#{file_id}?uploadType=media", body: body) @@ -197,16 +199,14 @@ def api_json(path, params: {}) url = "#{API_URL}#{path}" url += "?#{query_string}" unless query_string.empty? - response = HTTP.timeout(30) - .auth("Bearer #{@access_token}") + response = auth_client .get(url) parse_response!(response) end def api_get(path) - response = HTTP.timeout(30) - .auth("Bearer #{@access_token}") + response = auth_client .get("#{API_URL}#{path}") if response.status >= 400 diff --git a/app/services/storage_adapter/local.rb b/app/services/storage_adapter/local.rb index 352ed24..feefa69 100644 --- a/app/services/storage_adapter/local.rb +++ b/app/services/storage_adapter/local.rb @@ -37,6 +37,10 @@ def list(namespace: :files) .sort end + def exist?(key, namespace: :files) + namespace_path(namespace).join(key).exist? + end + def url(key, namespace: :files, expires_in: 1.hour) namespace_path(namespace).join(key).to_s end diff --git a/app/services/storage_adapter/one_drive.rb b/app/services/storage_adapter/one_drive.rb index 48b3243..cd26e61 100644 --- a/app/services/storage_adapter/one_drive.rb +++ b/app/services/storage_adapter/one_drive.rb @@ -4,15 +4,15 @@ class OneDrive < Base ROOT_FOLDER = "Apps/Inbox".freeze def initialize(config: {}) - @access_token = config["access_token"] || config[:access_token] + config = config.with_indifferent_access + @access_token = config[:access_token] end def upload(file_path, key, namespace: :files) path = onedrive_path(key, namespace) body = File.binread(file_path) - response = HTTP.timeout(60) - .auth("Bearer #{@access_token}") + response = auth_client(timeout: 60) .headers("Content-Type" => "application/octet-stream") .put("#{GRAPH_URL}/me/drive/root:/#{path}:/content", body: body) @@ -24,8 +24,7 @@ def download(key, namespace: :files) path = onedrive_path(key, namespace) # Get download URL (302 redirect) — use manual redirect handling - response = HTTP.timeout(30) - .auth("Bearer #{@access_token}") + response = auth_client .get("#{GRAPH_URL}/me/drive/root:/#{path}:/content") if response.status == 302 @@ -33,11 +32,7 @@ def download(key, namespace: :files) response = HTTP.timeout(60).get(download_url) end - if response.status >= 400 - body = JSON.parse(response.body.to_s) rescue {} - error_msg = body.dig("error", "message") || "HTTP #{response.status}" - raise ApiError, error_msg - end + parse_response!(response) if response.status >= 400 tempfile = Tempfile.new([ "onedrive_download", File.extname(key) ]) tempfile.binmode @@ -49,8 +44,7 @@ def download(key, namespace: :files) def delete(key, namespace: :files) path = onedrive_path(key, namespace) - response = HTTP.timeout(30) - .auth("Bearer #{@access_token}") + response = auth_client .delete("#{GRAPH_URL}/me/drive/root:/#{path}:") # 204 No Content = success, 404 = already gone @@ -65,8 +59,7 @@ def list(namespace: :files) url = "#{GRAPH_URL}/me/drive/root:/#{path}:/children?$select=name,file" loop do - response = HTTP.timeout(30) - .auth("Bearer #{@access_token}") + response = auth_client .get(url) body = parse_response!(response) @@ -86,11 +79,17 @@ def list(namespace: :files) raise end + def exist?(key, namespace: :files) + path = onedrive_path(key, namespace) + response = auth_client(timeout: 10) + .get("#{GRAPH_URL}/me/drive/root:/#{path}") + response.status < 400 + end + def url(key, namespace: :files, expires_in: 1.hour) path = onedrive_path(key, namespace) - response = HTTP.timeout(30) - .auth("Bearer #{@access_token}") + response = auth_client .post("#{GRAPH_URL}/me/drive/root:/#{path}:/createLink", json: { type: "view", scope: "anonymous" }) @@ -103,16 +102,14 @@ def test_connection path = onedrive_path(test_key, :files) # Upload test file - response = HTTP.timeout(30) - .auth("Bearer #{@access_token}") + response = auth_client .headers("Content-Type" => "application/octet-stream") .put("#{GRAPH_URL}/me/drive/root:/#{path}:/content", body: "ok") parse_response!(response) # Download it - response = HTTP.timeout(30) - .auth("Bearer #{@access_token}") + response = auth_client .get("#{GRAPH_URL}/me/drive/root:/#{path}:/content") if response.status == 302 @@ -121,8 +118,7 @@ def test_connection end # Delete it - HTTP.timeout(30) - .auth("Bearer #{@access_token}") + auth_client .delete("#{GRAPH_URL}/me/drive/root:/#{path}:") { ok: true } @@ -130,8 +126,6 @@ def test_connection { ok: false, error: e.message } end - class ApiError < StandardError; end - private def onedrive_path(key, namespace) diff --git a/app/services/storage_adapter/s3.rb b/app/services/storage_adapter/s3.rb index 8ad2e60..8224640 100644 --- a/app/services/storage_adapter/s3.rb +++ b/app/services/storage_adapter/s3.rb @@ -1,11 +1,12 @@ module StorageAdapter class S3 < Base def initialize(config: {}) - @bucket = config["bucket"] || config[:bucket] - @region = config["region"] || config[:region] || "us-east-1" - @access_key_id = config["access_key_id"] || config[:access_key_id] - @secret_access_key = config["secret_access_key"] || config[:secret_access_key] - @endpoint = config["endpoint"] || config[:endpoint] + config = config.with_indifferent_access + @bucket = config[:bucket] + @region = config[:region] || "us-east-1" + @access_key_id = config[:access_key_id] + @secret_access_key = config[:secret_access_key] + @endpoint = config[:endpoint] end def self.from_legacy_env @@ -19,40 +20,39 @@ def self.from_legacy_env end def upload(file_path, key, namespace: :files) - prefix = namespace.to_s - object = s3_resource.bucket(@bucket).object("#{prefix}/#{key}") - object.upload_file(file_path) - "s3://#{@bucket}/#{prefix}/#{key}" + s3_object(key, namespace).upload_file(file_path) + "s3://#{@bucket}/#{namespace}/#{key}" end def download(key, namespace: :files) - prefix = namespace.to_s tempfile = Tempfile.new([ "s3_download", File.extname(key) ]) - s3_resource.bucket(@bucket).object("#{prefix}/#{key}").get(response_target: tempfile.path) + s3_object(key, namespace).get(response_target: tempfile.path) tempfile.rewind tempfile end def delete(key, namespace: :files) - prefix = namespace.to_s - s3_resource.bucket(@bucket).object("#{prefix}/#{key}").delete + s3_object(key, namespace).delete end def list(namespace: :files) prefix = namespace.to_s - s3_resource.bucket(@bucket).objects(prefix: "#{prefix}/").map do |obj| + bucket.objects(prefix: "#{prefix}/").map do |obj| obj.key.sub("#{prefix}/", "") end end + def exist?(key, namespace: :files) + s3_object(key, namespace).exists? + end + def url(key, namespace: :files, expires_in: 1.hour) - prefix = namespace.to_s - s3_resource.bucket(@bucket).object("#{prefix}/#{key}").presigned_url(:get, expires_in: expires_in.to_i) + s3_object(key, namespace).presigned_url(:get, expires_in: expires_in.to_i) end def test_connection - test_key = "files/.storage_test_#{SecureRandom.hex(4)}" - obj = s3_resource.bucket(@bucket).object(test_key) + test_key = ".storage_test_#{SecureRandom.hex(4)}" + obj = s3_object(test_key, :files) obj.put(body: "ok") obj.get obj.delete @@ -61,6 +61,14 @@ def test_connection private + def bucket + s3_resource.bucket(@bucket) + end + + def s3_object(key, namespace) + bucket.object("#{namespace}/#{key}") + end + def s3_resource require "aws-sdk-s3" @s3_resource ||= begin diff --git a/app/views/documents/_connected_notes.html.erb b/app/views/documents/_connected_notes.html.erb index cfa1fe0..c3135b6 100644 --- a/app/views/documents/_connected_notes.html.erb +++ b/app/views/documents/_connected_notes.html.erb @@ -1,7 +1,7 @@ <% connected = (links + backlinks).uniq %> <% if connected.any? %> diff --git a/app/views/documents/edit.html.erb b/app/views/documents/edit.html.erb index bcf81bb..ae1b4c1 100644 --- a/app/views/documents/edit.html.erb +++ b/app/views/documents/edit.html.erb @@ -48,9 +48,9 @@
-
- - diff --git a/app/views/layouts/_sidebar.html.erb b/app/views/layouts/_sidebar.html.erb index 99fa3b7..e07d05e 100644 --- a/app/views/layouts/_sidebar.html.erb +++ b/app/views/layouts/_sidebar.html.erb @@ -19,6 +19,12 @@ <% end %> + + <%# ── Navigation ── %> @@ -71,10 +77,7 @@ Settings <% end %> - <%= link_to new_note_path, class: "sidebar-new-note", data: { turbo: false } do %> - <%= heroicon(:plus, style: 'width: 12px; height: 12px;') %> - New Note - <% end %> + @@ -223,9 +226,13 @@ /* Bottom section */ .sidebar-bottom { margin-top: auto; - padding: 14px; border-top: 1px solid var(--color-border-soft); } + +.sidebar-new-note-container { +padding: 10px 12px; +} + .sidebar-new-note { display: flex; align-items: center; diff --git a/lib/active_storage/service/unified_storage_service.rb b/lib/active_storage/service/unified_storage_service.rb index cc94e00..5c4b789 100644 --- a/lib/active_storage/service/unified_storage_service.rb +++ b/lib/active_storage/service/unified_storage_service.rb @@ -8,14 +8,14 @@ def initialize(**config) end def upload(key, io, checksum: nil, **) - if cloud_mode? + if (adapter = cloud_adapter_if_active) instrument :upload, key: key, checksum: checksum do temp = Tempfile.new([ "as_upload", File.extname(key) ]) temp.binmode IO.copy_stream(io, temp) temp.close - cloud_adapter.upload(temp.path, key, namespace: @namespace) + adapter.upload(temp.path, key, namespace: @namespace) ensure temp&.close! end @@ -25,9 +25,9 @@ def upload(key, io, checksum: nil, **) end def download(key, &block) - if cloud_mode? + if (adapter = cloud_adapter_if_active) begin - download_from_cloud(key, &block) + download_from_cloud(adapter, key, &block) rescue => e # Fallback to disk for files not yet migrated raise unless disk_service.exist?(key) @@ -39,10 +39,10 @@ def download(key, &block) end def download_chunk(key, range) - if cloud_mode? + if (adapter = cloud_adapter_if_active) begin instrument :download_chunk, key: key, range: range do - tempfile = cloud_adapter.download(key, namespace: @namespace) + tempfile = adapter.download(key, namespace: @namespace) begin tempfile.seek(range.begin) tempfile.read(range.size) @@ -60,9 +60,9 @@ def download_chunk(key, range) end def delete(key) - if cloud_mode? + if (adapter = cloud_adapter_if_active) instrument :delete, key: key do - cloud_adapter.delete(key, namespace: @namespace) + adapter.delete(key, namespace: @namespace) end # Also remove from disk if it exists (cleanup after migration) disk_service.delete(key) if disk_service.exist?(key) @@ -72,10 +72,10 @@ def delete(key) end def delete_prefixed(prefix) - if cloud_mode? + if (adapter = cloud_adapter_if_active) instrument :delete_prefixed, prefix: prefix do - cloud_adapter.list(namespace: @namespace).each do |file_key| - cloud_adapter.delete(file_key, namespace: @namespace) if file_key.start_with?(prefix) + adapter.list(namespace: @namespace).each do |file_key| + adapter.delete(file_key, namespace: @namespace) if file_key.start_with?(prefix) end end else @@ -84,9 +84,9 @@ def delete_prefixed(prefix) end def exist?(key) - if cloud_mode? + if (adapter = cloud_adapter_if_active) instrument :exist, key: key do |payload| - answer = cloud_adapter.list(namespace: @namespace).include?(key) || disk_service.exist?(key) + answer = adapter.exist?(key, namespace: @namespace) || disk_service.exist?(key) payload[:exist] = answer answer end @@ -96,9 +96,9 @@ def exist?(key) end def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:, custom_metadata: {}) - if cloud_mode? + if (adapter = cloud_adapter_if_active) instrument :url, key: key do |payload| - url = cloud_adapter.url(key, namespace: @namespace, expires_in: expires_in) + url = adapter.url(key, namespace: @namespace, expires_in: expires_in) payload[:url] = url url end @@ -142,10 +142,10 @@ def generate_url(key, expires_in:, filename:, content_type:, disposition:) url_helpers.rails_disk_service_url(verified_key_with_expiration, filename: filename, **url_options) end - def download_from_cloud(key, &block) + def download_from_cloud(adapter, key, &block) if block_given? instrument :streaming_download, key: key do - tempfile = cloud_adapter.download(key, namespace: @namespace) + tempfile = adapter.download(key, namespace: @namespace) begin while (chunk = tempfile.read(5.megabytes)) yield chunk @@ -156,7 +156,7 @@ def download_from_cloud(key, &block) end else instrument :download, key: key do - tempfile = cloud_adapter.download(key, namespace: @namespace) + tempfile = adapter.download(key, namespace: @namespace) begin tempfile.read ensure @@ -166,15 +166,17 @@ def download_from_cloud(key, &block) end end - def cloud_mode? + # Returns cloud adapter if a cloud provider is configured, nil otherwise. + # Single DB query per call (replaces separate cloud_mode? + cloud_adapter). + def cloud_adapter_if_active setting = StorageSetting.active_setting - setting && setting.provider != "local" - rescue - false - end + return nil if setting.nil? || setting.provider == "local" - def cloud_adapter - StorageAdapter.resolve + oauth = OAuthManager.new + setting = oauth.ensure_fresh_token!(setting) if oauth.oauth_provider?(setting.provider) + StorageAdapter.build(setting.provider, setting.config_data) + rescue ActiveRecord::ActiveRecordError + nil end def disk_service diff --git a/spec/requests/documents_web_spec.rb b/spec/requests/documents_web_spec.rb index 8a1e2c5..b7cffe7 100644 --- a/spec/requests/documents_web_spec.rb +++ b/spec/requests/documents_web_spec.rb @@ -103,7 +103,7 @@ end context "backlinks" do - it "displays Connected Notes section when backlinks exist" do + it "displays Connected Documents section when backlinks exist" do source = create(:document, title: "Linking Doc") target = create(:document, title: "Target Doc") create(:document_link, source_document: source, target_document: target) @@ -111,17 +111,17 @@ get document_path(target) expect(response).to have_http_status(:ok) - expect(response.body).to include("Connected Notes") + expect(response.body).to include("Connected Documents") expect(response.body).to include("Linking Doc") end - it "hides Connected Notes section when no backlinks" do + it "hides Connected Documents section when no backlinks" do doc = create(:document, title: "Lonely Doc") get document_path(doc) expect(response).to have_http_status(:ok) - expect(response.body).not_to include("Connected Notes") + expect(response.body).not_to include("Connected Documents") end end end diff --git a/spec/services/storage_adapter_google_drive_spec.rb b/spec/services/storage_adapter_google_drive_spec.rb index fdc87fe..602f14a 100644 --- a/spec/services/storage_adapter_google_drive_spec.rb +++ b/spec/services/storage_adapter_google_drive_spec.rb @@ -80,7 +80,7 @@ headers: { "Content-Type" => "application/json" }) expect { adapter.download("missing.txt", namespace: :files) } - .to raise_error(StorageAdapter::GoogleDrive::ApiError, /File not found/) + .to raise_error(StorageAdapter::ApiError, /File not found/) end end