loading
Generated 2025-09-05T16:20:08+00:00

All Files ( 81.76% covered at 22.57 hits/line )

72 files in total.
2539 relevant lines, 2076 lines covered and 463 lines missed. ( 81.76% )
File % covered Lines Relevant Lines Lines covered Lines missed Avg. Hits / Line
lib/log_struct.rb 100.00 % 37 21 21 0 1.00
lib/log_struct/concerns/configuration.rb 92.86 % 93 42 39 3 35.83
lib/log_struct/concerns/error_handling.rb 59.46 % 94 37 22 15 0.65
lib/log_struct/concerns/logging.rb 100.00 % 45 21 21 0 1.57
lib/log_struct/config_struct/error_handling_modes.rb 100.00 % 25 8 8 0 1.00
lib/log_struct/config_struct/filters.rb 100.00 % 80 17 17 0 5.82
lib/log_struct/config_struct/integrations.rb 100.00 % 89 24 24 0 2.79
lib/log_struct/configuration.rb 100.00 % 59 24 24 0 66.71
lib/log_struct/enums.rb 100.00 % 9 5 5 0 1.00
lib/log_struct/enums/error_handling_mode.rb 100.00 % 22 9 9 0 1.00
lib/log_struct/enums/error_reporter.rb 100.00 % 14 8 8 0 1.00
lib/log_struct/enums/event.rb 100.00 % 48 24 24 0 1.00
lib/log_struct/enums/level.rb 100.00 % 66 43 43 0 9.30
lib/log_struct/enums/source.rb 100.00 % 26 14 14 0 1.00
lib/log_struct/formatter.rb 90.43 % 224 94 85 9 33.56
lib/log_struct/handlers.rb 100.00 % 27 7 7 0 1.14
lib/log_struct/hash_utils.rb 100.00 % 21 10 10 0 5.60
lib/log_struct/integrations.rb 58.06 % 41 31 18 13 0.58
lib/log_struct/integrations/action_mailer.rb 100.00 % 50 25 25 0 3.92
lib/log_struct/integrations/action_mailer/callbacks.rb 88.89 % 100 45 40 5 1.47
lib/log_struct/integrations/action_mailer/error_handling.rb 87.10 % 173 62 54 8 1.16
lib/log_struct/integrations/action_mailer/event_logging.rb 100.00 % 90 40 40 0 1.45
lib/log_struct/integrations/action_mailer/metadata_collection.rb 88.89 % 78 36 32 4 2.22
lib/log_struct/integrations/active_job.rb 58.82 % 38 17 10 7 0.59
lib/log_struct/integrations/active_job/log_subscriber.rb 55.81 % 104 43 24 19 0.56
lib/log_struct/integrations/active_record.rb 95.45 % 258 110 105 5 11.94
lib/log_struct/integrations/active_storage.rb 34.21 % 94 38 13 25 0.34
lib/log_struct/integrations/carrierwave.rb 44.44 % 111 36 16 20 0.44
lib/log_struct/integrations/good_job.rb 53.13 % 111 32 17 15 0.63
lib/log_struct/integrations/good_job/log_subscriber.rb 98.68 % 228 76 75 1 2.91
lib/log_struct/integrations/good_job/logger.rb 100.00 % 73 23 23 0 3.43
lib/log_struct/integrations/host_authorization.rb 52.17 % 81 23 12 11 0.52
lib/log_struct/integrations/integration_interface.rb 100.00 % 21 8 8 0 1.13
lib/log_struct/integrations/lograge.rb 41.86 % 114 43 18 25 0.42
lib/log_struct/integrations/rack_error_handler.rb 71.43 % 32 14 10 4 0.71
lib/log_struct/integrations/rack_error_handler/middleware.rb 44.74 % 146 38 17 21 0.45
lib/log_struct/integrations/shrine.rb 33.33 % 75 24 8 16 0.33
lib/log_struct/integrations/sidekiq.rb 52.94 % 39 17 9 8 0.53
lib/log_struct/integrations/sorbet.rb 42.11 % 49 19 8 11 0.42
lib/log_struct/log.rb 94.44 % 43 18 17 1 0.94
lib/log_struct/log/action_mailer.rb 100.00 % 55 34 34 0 1.26
lib/log_struct/log/active_job.rb 78.95 % 64 38 30 8 0.79
lib/log_struct/log/active_storage.rb 68.09 % 78 47 32 15 0.68
lib/log_struct/log/carrierwave.rb 72.00 % 82 50 36 14 0.72
lib/log_struct/log/error.rb 97.50 % 76 40 39 1 1.35
lib/log_struct/log/good_job.rb 100.00 % 151 70 70 0 8.71
lib/log_struct/log/interfaces/additional_data_field.rb 100.00 % 20 9 9 0 1.00
lib/log_struct/log/interfaces/common_fields.rb 100.00 % 42 20 20 0 1.05
lib/log_struct/log/interfaces/message_field.rb 100.00 % 20 9 9 0 1.00
lib/log_struct/log/interfaces/request_fields.rb 100.00 % 36 19 19 0 1.00
lib/log_struct/log/plain.rb 100.00 % 53 30 30 0 3.60
lib/log_struct/log/request.rb 69.09 % 76 55 38 17 0.71
lib/log_struct/log/security.rb 82.35 % 80 51 42 9 0.82
lib/log_struct/log/shared/add_request_fields.rb 62.50 % 29 16 10 6 0.63
lib/log_struct/log/shared/merge_additional_data_fields.rb 100.00 % 28 15 15 0 7.67
lib/log_struct/log/shared/serialize_common.rb 100.00 % 36 14 14 0 5.29
lib/log_struct/log/shrine.rb 76.19 % 70 42 32 10 0.76
lib/log_struct/log/sidekiq.rb 79.31 % 50 29 23 6 0.83
lib/log_struct/log/sql.rb 100.00 % 126 48 48 0 4.52
lib/log_struct/log_keys.rb 100.00 % 102 2 2 0 1.00
lib/log_struct/monkey_patches/active_support/tagged_logging/formatter.rb 100.00 % 36 14 14 0 21.93
lib/log_struct/multi_error_reporter.rb 81.16 % 149 69 56 13 1.41
lib/log_struct/param_filters.rb 100.00 % 89 37 37 0 11.95
lib/log_struct/railtie.rb 61.54 % 31 13 8 5 0.62
lib/log_struct/semantic_logger/color_formatter.rb 73.81 % 209 84 62 22 2.93
lib/log_struct/semantic_logger/formatter.rb 92.31 % 94 26 24 2 5.58
lib/log_struct/semantic_logger/logger.rb 77.78 % 129 36 28 8 4.81
lib/log_struct/semantic_logger/setup.rb 34.48 % 219 58 20 38 0.34
lib/log_struct/sorbet.rb 100.00 % 13 2 2 0 1.00
lib/log_struct/sorbet/serialize_symbol_keys.rb 83.33 % 23 12 10 2 0.83
lib/log_struct/string_scrubber.rb 100.00 % 84 39 39 0 118.38
tools/log_types_exporter.rb 85.61 % 608 285 244 41 142.07

lib/log_struct.rb

100.0% lines covered

21 relevant lines. 21 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. # Core library files
  4. 1 require "log_struct/sorbet"
  5. 1 require "log_struct/version"
  6. 1 require "log_struct/enums" # All enums are now in the enums directory
  7. 1 require "log_struct/configuration"
  8. 1 require "log_struct/formatter"
  9. 1 require "log_struct/railtie"
  10. 1 require "log_struct/concerns/error_handling"
  11. 1 require "log_struct/concerns/configuration"
  12. 1 require "log_struct/concerns/logging"
  13. # Monkey-patch ActiveSupport::TaggedLogging::Formatter to support hash input/output
  14. 1 require "log_struct/monkey_patches/active_support/tagged_logging/formatter"
  15. # Require integrations
  16. 1 require "log_struct/integrations"
  17. # SemanticLogger integration - core feature for high-performance logging
  18. 1 require "log_struct/semantic_logger/formatter"
  19. 1 require "log_struct/semantic_logger/color_formatter"
  20. 1 require "log_struct/semantic_logger/logger"
  21. 1 require "log_struct/semantic_logger/setup"
  22. 1 module LogStruct
  23. 1 class Error < StandardError; end
  24. 1 extend Concerns::ErrorHandling::ClassMethods
  25. 1 extend Concerns::Configuration::ClassMethods
  26. 1 extend Concerns::Logging::ClassMethods
  27. # Set enabled at require time based on current Rails environment.
  28. # (Users can disable or enable LogStruct later in an initializer.)
  29. 1 set_enabled_from_rails_env!
  30. end

lib/log_struct/concerns/configuration.rb

92.86% lines covered

42 relevant lines. 39 lines covered and 3 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "../configuration"
  4. 1 module LogStruct
  5. 1 module Concerns
  6. # Concern for handling errors according to configured modes
  7. 1 module Configuration
  8. 1 module ClassMethods
  9. 1 extend T::Sig
  10. 2 sig { params(block: T.proc.params(config: LogStruct::Configuration).void).void }
  11. 1 def configure(&block)
  12. 30 yield(config)
  13. end
  14. 2 sig { returns(LogStruct::Configuration) }
  15. 1 def config
  16. 1272 LogStruct::Configuration.instance
  17. end
  18. # (Can't use alias_method since this module is extended into LogStruct)
  19. 2 sig { returns(LogStruct::Configuration) }
  20. 1 def configuration
  21. 55 config
  22. end
  23. # Setter method to replace the configuration (for testing purposes)
  24. 2 sig { params(config: LogStruct::Configuration).void }
  25. 1 def configuration=(config)
  26. 93 LogStruct::Configuration.set_instance(config)
  27. end
  28. 1 sig { returns(T::Boolean) }
  29. 1 def enabled?
  30. config.enabled
  31. end
  32. 2 sig { void }
  33. 1 def set_enabled_from_rails_env!
  34. # Set enabled based on current Rails environment and the LOGSTRUCT_ENABLED env var.
  35. # Precedence:
  36. # 1. Check if LOGSTRUCT_ENABLED env var is defined
  37. # - Sets enabled=true only when value is "true"
  38. # - Sets enabled=false when value is "false" (or any non-"true")
  39. # 2. Otherwise, check if current Rails environment is in enabled_environments
  40. # 3. Otherwise, leave as config.enabled (defaults to true)
  41. # Then check if LOGSTRUCT_ENABLED env var is set
  42. 7 config.enabled = if ENV["LOGSTRUCT_ENABLED"]
  43. # Override to true only if env var is "true"
  44. 3 ENV["LOGSTRUCT_ENABLED"] == "true"
  45. else
  46. 4 config.enabled_environments.include?(::Rails.env.to_sym)
  47. end
  48. end
  49. 1 sig { returns(T::Boolean) }
  50. 1 def is_local?
  51. config.local_environments.include?(::Rails.env.to_sym)
  52. end
  53. 1 sig { returns(T::Boolean) }
  54. 1 def is_production?
  55. !is_local?
  56. end
  57. 2 sig { void }
  58. 1 def merge_rails_filter_parameters!
  59. 1 return unless ::Rails.application.config.respond_to?(:filter_parameters)
  60. 1 rails_filter_params = ::Rails.application.config.filter_parameters
  61. 1 return unless rails_filter_params.is_a?(Array)
  62. # Convert all Rails filter parameters to symbols and merge with our filter keys
  63. 1 converted_params = rails_filter_params.map do |param|
  64. 4 param.respond_to?(:to_sym) ? param.to_sym : param
  65. end
  66. # Add Rails filter parameters to our filter keys
  67. 1 config.filters.filter_keys += converted_params
  68. # Ensure no duplicates
  69. 1 config.filters.filter_keys.uniq!
  70. # Clear Rails filter parameters since we've incorporated them
  71. 1 ::Rails.application.config.filter_parameters.clear
  72. end
  73. end
  74. end
  75. end
  76. end

lib/log_struct/concerns/error_handling.rb

59.46% lines covered

37 relevant lines. 22 lines covered and 15 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. 1 module Concerns
  5. # Concern for handling errors according to configured modes
  6. 1 module ErrorHandling
  7. 1 module ClassMethods
  8. 1 extend T::Sig
  9. 1 extend T::Helpers
  10. # Needed for raise
  11. 1 requires_ancestor { Module }
  12. # Get the error handling mode for a given source
  13. 2 sig { params(source: Source).returns(ErrorHandlingMode) }
  14. 1 def error_handling_mode_for(source)
  15. 1 config = LogStruct.config
  16. # Use a case statement for type-safety
  17. 1 case source
  18. when Source::TypeChecking
  19. config.error_handling_modes.type_checking_errors
  20. when Source::LogStruct
  21. config.error_handling_modes.logstruct_errors
  22. when Source::Security
  23. config.error_handling_modes.security_errors
  24. when Source::Rails, Source::App, Source::Job, Source::Storage, Source::Mailer,
  25. Source::Shrine, Source::CarrierWave, Source::Sidekiq
  26. 1 config.error_handling_modes.standard_errors
  27. else
  28. # Ensures the case statement is exhaustive
  29. T.absurd(source)
  30. end
  31. end
  32. # Log an errors with structured data
  33. 1 sig { params(error: StandardError, source: Source, context: T.nilable(T::Hash[Symbol, T.untyped])).void }
  34. 1 def log_error(error, source:, context: nil)
  35. # Create structured log entry
  36. error_log = Log::Error.from_exception(
  37. source,
  38. error,
  39. context || {}
  40. )
  41. LogStruct.error(error_log)
  42. end
  43. # Report an error using the configured handler or MultiErrorReporter
  44. 1 sig { params(error: StandardError, source: Source, context: T.nilable(T::Hash[Symbol, T.untyped])).void }
  45. 1 def log_and_report_error(error, source:, context: nil)
  46. log_error(error, source: source, context: context)
  47. error_handler = LogStruct.config.error_reporting_handler
  48. if error_handler
  49. # Use the configured handler
  50. error_handler.call(error, context, source)
  51. else
  52. # Fall back to MultiErrorReporter (detects Sentry, Bugsnag, etc.)
  53. LogStruct::MultiErrorReporter.report_error(error, context || {})
  54. end
  55. end
  56. # Handle an error according to the configured error handling mode (log, report, raise, etc)
  57. 2 sig { params(error: StandardError, source: Source, context: T.nilable(T::Hash[Symbol, T.untyped])).void }
  58. 1 def handle_exception(error, source:, context: nil)
  59. 1 mode = error_handling_mode_for(source)
  60. # Log / report in production, raise locally (dev/test)
  61. 1 if mode == ErrorHandlingMode::LogProduction || mode == ErrorHandlingMode::ReportProduction
  62. raise(error) if !LogStruct.is_production?
  63. end
  64. 1 case mode
  65. when ErrorHandlingMode::Ignore
  66. # Do nothing
  67. when ErrorHandlingMode::Raise
  68. 1 raise(error)
  69. when ErrorHandlingMode::Log, ErrorHandlingMode::LogProduction
  70. log_error(error, source: source, context: context)
  71. when ErrorHandlingMode::Report, ErrorHandlingMode::ReportProduction
  72. log_and_report_error(error, source: source, context: context)
  73. else
  74. # Ensures the case statement is exhaustive
  75. T.absurd(mode)
  76. end
  77. end
  78. end
  79. end
  80. end
  81. end

lib/log_struct/concerns/logging.rb

100.0% lines covered

21 relevant lines. 21 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "../log"
  4. 1 module LogStruct
  5. 1 module Concerns
  6. # Concern for handling errors according to configured modes
  7. 1 module Logging
  8. 1 module ClassMethods
  9. 1 extend T::Sig
  10. # Log a log struct at debug level
  11. 2 sig { params(log: Log::Interfaces::CommonFields).void }
  12. 1 def debug(log)
  13. 1 Rails.logger.debug(log)
  14. end
  15. # Log a log struct at info level
  16. 2 sig { params(log: Log::Interfaces::CommonFields).void }
  17. 1 def info(log)
  18. 5 Rails.logger.info(log)
  19. end
  20. # Log a log struct at warn level
  21. 2 sig { params(log: Log::Interfaces::CommonFields).void }
  22. 1 def warn(log)
  23. 1 Rails.logger.warn(log)
  24. end
  25. # Log a log struct at error level
  26. 2 sig { params(log: Log::Interfaces::CommonFields).void }
  27. 1 def error(log)
  28. 4 Rails.logger.error(log)
  29. end
  30. # Log a log struct at fatal level
  31. 2 sig { params(log: Log::Interfaces::CommonFields).void }
  32. 1 def fatal(log)
  33. 1 Rails.logger.fatal(log)
  34. end
  35. end
  36. end
  37. end
  38. end

lib/log_struct/config_struct/error_handling_modes.rb

100.0% lines covered

8 relevant lines. 8 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. 1 module ConfigStruct
  5. 1 class ErrorHandlingModes < T::Struct
  6. 1 include Sorbet::SerializeSymbolKeys
  7. # How to handle different types of errors
  8. # Modes:
  9. # - Ignore - Ignore the error
  10. # - Log - Log the error
  11. # - Report - Log and report to error tracking service (but don't crash)
  12. # - LogProduction - Log error in production, raise locally (dev/test)
  13. # - ReportProduction - Report error in production, raise locally (dev/test)
  14. # - Raise - Always raise the error
  15. # Configurable error handling categories
  16. 1 prop :type_checking_errors, ErrorHandlingMode, default: ErrorHandlingMode::LogProduction
  17. 1 prop :logstruct_errors, ErrorHandlingMode, default: ErrorHandlingMode::LogProduction
  18. 1 prop :security_errors, ErrorHandlingMode, default: ErrorHandlingMode::Report
  19. 1 prop :standard_errors, ErrorHandlingMode, default: ErrorHandlingMode::Raise
  20. end
  21. end
  22. end

lib/log_struct/config_struct/filters.rb

100.0% lines covered

17 relevant lines. 17 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. 1 module ConfigStruct
  5. 1 class Filters < T::Struct
  6. 1 include Sorbet::SerializeSymbolKeys
  7. # Keys that should be filtered in nested structures such as request params and job arguments.
  8. # Filtered data includes information about Hashes and Arrays.
  9. #
  10. # { _filtered: {
  11. # _class: "Hash", # Class of the filtered value
  12. # _bytes: 1234, # Length of JSON string in bytes
  13. # _keys_count: 3, # Number of keys in the hash
  14. # _keys: [:key1, :key2, :key3], # First 10 keys in the hash
  15. # }
  16. # }
  17. #
  18. # Default: [:password, :password_confirmation, :pass, :pw, :token, :secret,
  19. # :credentials, :creds, :auth, :authentication, :authorization]
  20. #
  21. 1 prop :filter_keys,
  22. T::Array[Symbol],
  23. factory: -> {
  24. 42 %i[
  25. password password_confirmation pass pw token secret
  26. credentials auth authentication authorization
  27. credit_card ssn social_security
  28. ]
  29. }
  30. # Keys where string values should include an SHA256 hash.
  31. # Useful for tracing emails across requests (e.g. sign in, sign up) while protecting privacy.
  32. # Default: [:email, :email_address]
  33. 1 prop :filter_keys_with_hashes,
  34. T::Array[Symbol],
  35. 42 factory: -> { %i[email email_address] }
  36. # Hash salt for SHA256 hashing (typically used for email addresses)
  37. # Used for both param filters and string scrubbing
  38. # Default: "l0g5t0p"
  39. 1 prop :hash_salt, String, default: "l0g5t0p"
  40. # Hash length for SHA256 hashing (typically used for email addresses)
  41. # Used for both param filters and string scrubbing
  42. # Default: 12
  43. 1 prop :hash_length, Integer, default: 12
  44. # Filter email addresses. Also controls email filtering for the ActionMailer integration
  45. # (to, from, recipient fields, etc.)
  46. # Default: true
  47. 1 prop :email_addresses, T::Boolean, default: true
  48. # Filter URL passwords
  49. # Default: true
  50. 1 prop :url_passwords, T::Boolean, default: true
  51. # Filter credit card numbers
  52. # Default: true
  53. 1 prop :credit_card_numbers, T::Boolean, default: true
  54. # Filter phone numbers
  55. # Default: true
  56. 1 prop :phone_numbers, T::Boolean, default: true
  57. # Filter social security numbers
  58. # Default: true
  59. 1 prop :ssns, T::Boolean, default: true
  60. # Filter IP addresses
  61. # Default: false
  62. 1 prop :ip_addresses, T::Boolean, default: false
  63. # Filter MAC addresses
  64. # Default: false
  65. 1 prop :mac_addresses, T::Boolean, default: false
  66. end
  67. end
  68. end

lib/log_struct/config_struct/integrations.rb

100.0% lines covered

24 relevant lines. 24 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "active_support/notifications"
  4. 1 module LogStruct
  5. 1 module ConfigStruct
  6. 1 class Integrations < T::Struct
  7. 1 include Sorbet::SerializeSymbolKeys
  8. # Enable or disable Sorbet error handler integration
  9. # Default: true
  10. 1 prop :enable_sorbet_error_handlers, T::Boolean, default: true
  11. # Enable or disable Lograge integration
  12. # Default: true
  13. 1 prop :enable_lograge, T::Boolean, default: true
  14. # Custom options for Lograge
  15. # Default: nil
  16. 1 prop :lograge_custom_options, T.nilable(Handlers::LogrageCustomOptions), default: nil
  17. # Enable or disable ActionMailer integration
  18. # Default: true
  19. 1 prop :enable_actionmailer, T::Boolean, default: true
  20. # Enable or disable host authorization logging
  21. # Default: true
  22. 1 prop :enable_host_authorization, T::Boolean, default: true
  23. # Enable or disable ActiveJob integration
  24. # Default: true
  25. 1 prop :enable_activejob, T::Boolean, default: true
  26. # Enable or disable Rack middleware
  27. # Default: true
  28. 1 prop :enable_rack_error_handler, T::Boolean, default: true
  29. # Enable or disable Sidekiq integration
  30. # Default: true
  31. 1 prop :enable_sidekiq, T::Boolean, default: true
  32. # Enable or disable Shrine integration
  33. # Default: true
  34. 1 prop :enable_shrine, T::Boolean, default: true
  35. # Enable or disable ActiveStorage integration
  36. # Default: true
  37. 1 prop :enable_activestorage, T::Boolean, default: true
  38. # Enable or disable CarrierWave integration
  39. # Default: true
  40. 1 prop :enable_carrierwave, T::Boolean, default: true
  41. # Enable or disable GoodJob integration
  42. # Default: true
  43. 1 prop :enable_goodjob, T::Boolean, default: true
  44. # Enable SemanticLogger integration for high-performance logging
  45. # Default: true
  46. 1 prop :enable_semantic_logger, T::Boolean, default: true
  47. # Enable colored JSON output in development
  48. # Default: true
  49. 1 prop :enable_color_output, T::Boolean, default: true
  50. # Color configuration for JSON output
  51. # Default: nil (uses SemanticLogger defaults)
  52. 1 prop :color_map, T.nilable(T::Hash[Symbol, Symbol]), default: nil
  53. # Filter noisy loggers (ActionView, etc.)
  54. # Default: false
  55. 1 prop :filter_noisy_loggers, T::Boolean, default: false
  56. # Enable SQL query logging through ActiveRecord instrumentation
  57. # Default: false (can be resource intensive)
  58. 1 prop :enable_sql_logging, T::Boolean, default: false
  59. # Only log SQL queries slower than this threshold (in milliseconds)
  60. # Set to 0 or nil to log all queries
  61. # Default: 100.0 (log queries taking >100ms)
  62. 1 prop :sql_slow_query_threshold, T.nilable(Float), default: 100.0
  63. # Include bind parameters in SQL logs (disable in production for security)
  64. # Default: true in development/test, false in production
  65. 44 prop :sql_log_bind_params, T::Boolean, factory: -> { !defined?(::Rails) || !::Rails.respond_to?(:env) || !::Rails.env.production? }
  66. end
  67. end
  68. end

lib/log_struct/configuration.rb

100.0% lines covered

24 relevant lines. 24 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "handlers"
  4. 1 require_relative "config_struct/error_handling_modes"
  5. 1 require_relative "config_struct/integrations"
  6. 1 require_relative "config_struct/filters"
  7. 1 module LogStruct
  8. # Core configuration class that provides a type-safe API
  9. 1 class Configuration < T::Struct
  10. 1 extend T::Sig
  11. 1 include Sorbet::SerializeSymbolKeys
  12. # -------------------------------------------------------------------------------------
  13. # Props
  14. # -------------------------------------------------------------------------------------
  15. 1 prop :enabled, T::Boolean, default: true
  16. 44 prop :enabled_environments, T::Array[Symbol], factory: -> { [:test, :production] }
  17. 44 prop :local_environments, T::Array[Symbol], factory: -> { [:development, :test] }
  18. 44 const :integrations, ConfigStruct::Integrations, factory: -> { ConfigStruct::Integrations.new }
  19. 32 const :filters, ConfigStruct::Filters, factory: -> { ConfigStruct::Filters.new }
  20. # Custom log scrubbing handler for any additional string scrubbing
  21. # Default: nil
  22. 1 prop :string_scrubbing_handler, T.nilable(Handlers::StringScrubber)
  23. # Custom handler for error reporting
  24. # Default: Errors are handled by MultiErrorReporter
  25. # (auto-detects Sentry, Bugsnag, Rollbar, Honeybadger, etc.)
  26. 1 prop :error_reporting_handler, T.nilable(Handlers::ErrorReporter), default: nil
  27. # How to handle errors from various sources
  28. 1 const :error_handling_modes,
  29. ConfigStruct::ErrorHandlingModes,
  30. factory: -> {
  31. 43 ConfigStruct::ErrorHandlingModes.new
  32. }
  33. # -------------------------------------------------------------------------------------
  34. # Class Methods
  35. # -------------------------------------------------------------------------------------
  36. # Class‐instance variable
  37. 1 @instance = T.let(nil, T.nilable(Configuration))
  38. 2 sig { returns(Configuration) }
  39. 1 def self.instance
  40. 1282 @instance ||= T.let(Configuration.new, T.nilable(Configuration))
  41. end
  42. 2 sig { params(config: Configuration).void }
  43. 1 def self.set_instance(config)
  44. 93 @instance = config
  45. end
  46. end
  47. end

lib/log_struct/enums.rb

100.0% lines covered

5 relevant lines. 5 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. # Require all enums in this directory
  4. 1 require_relative "enums/error_handling_mode"
  5. 1 require_relative "enums/error_reporter"
  6. 1 require_relative "enums/event"
  7. 1 require_relative "enums/level"
  8. 1 require_relative "enums/source"

lib/log_struct/enums/error_handling_mode.rb

100.0% lines covered

9 relevant lines. 9 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. # Enum for error handling modes
  5. 1 class ErrorHandlingMode < T::Enum
  6. 1 enums do
  7. # Always ignore the error
  8. 1 Ignore = new(:ignore)
  9. # Always log the error
  10. 1 Log = new(:log)
  11. # Always report to tracking service and continue
  12. 1 Report = new(:report)
  13. # Log in production, raise locally (dev/test)
  14. 1 LogProduction = new(:log_production)
  15. # Report in production, raise locally (dev/test)
  16. 1 ReportProduction = new(:report_production)
  17. # Always raise regardless of environment
  18. 1 Raise = new(:raise)
  19. end
  20. end
  21. end

lib/log_struct/enums/error_reporter.rb

100.0% lines covered

8 relevant lines. 8 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. 1 class ErrorReporter < T::Enum
  5. 1 enums do
  6. 1 RailsLogger = new(:rails_logger)
  7. 1 Sentry = new(:sentry)
  8. 1 Bugsnag = new(:bugsnag)
  9. 1 Rollbar = new(:rollbar)
  10. 1 Honeybadger = new(:honeybadger)
  11. end
  12. end
  13. end

lib/log_struct/enums/event.rb

100.0% lines covered

24 relevant lines. 24 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. # Define log event types as an enum
  5. 1 class Event < T::Enum
  6. 1 enums do
  7. # Plain log messages
  8. 1 Log = new(:log)
  9. # Request events
  10. 1 Request = new(:request)
  11. # Job events
  12. 1 Enqueue = new(:enqueue)
  13. 1 Schedule = new(:schedule)
  14. 1 Start = new(:start)
  15. 1 Finish = new(:finish)
  16. # File storage events (ActiveStorage, Shrine, CarrierWave, etc.)
  17. 1 Upload = new(:upload)
  18. 1 Download = new(:download)
  19. 1 Delete = new(:delete)
  20. 1 Metadata = new(:metadata)
  21. 1 Exist = new(:exist)
  22. 1 Stream = new(:stream)
  23. 1 Url = new(:url)
  24. # Email events
  25. 1 Delivery = new(:delivery)
  26. 1 Delivered = new(:delivered)
  27. # Security events
  28. 1 IPSpoof = new(:ip_spoof)
  29. 1 CSRFViolation = new(:csrf_violation)
  30. 1 BlockedHost = new(:blocked_host)
  31. # Database events
  32. 1 Database = new(:database)
  33. # Error events
  34. 1 Error = new(:error)
  35. # Fallback
  36. 1 Unknown = new(:unknown)
  37. end
  38. end
  39. end

lib/log_struct/enums/level.rb

100.0% lines covered

43 relevant lines. 43 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "logger"
  4. 1 module LogStruct
  5. # Define log levels as an enum
  6. 1 class Level < T::Enum
  7. 1 extend T::Sig
  8. 1 enums do
  9. # Standard log levels
  10. 1 Debug = new(:debug)
  11. 1 Info = new(:info)
  12. 1 Warn = new(:warn)
  13. 1 Error = new(:error)
  14. 1 Fatal = new(:fatal)
  15. 1 Unknown = new(:unknown)
  16. end
  17. # Convert a Level to the corresponding Logger integer constant
  18. 2 sig { returns(Integer) }
  19. 1 def to_severity_int
  20. 6 case serialize
  21. 1 when :debug then ::Logger::DEBUG
  22. 1 when :info then ::Logger::INFO
  23. 1 when :warn then ::Logger::WARN
  24. 1 when :error then ::Logger::ERROR
  25. 1 when :fatal then ::Logger::FATAL
  26. 1 else ::Logger::UNKNOWN
  27. end
  28. end
  29. # Convert a string or integer severity to a Level
  30. 2 sig { params(severity: T.any(String, Symbol, Integer, NilClass)).returns(Level) }
  31. 1 def self.from_severity(severity)
  32. 76 return Unknown if severity.nil?
  33. 75 return from_severity_int(severity) if severity.is_a?(Integer)
  34. 64 from_severity_sym(severity.downcase.to_sym)
  35. end
  36. 2 sig { params(severity: Symbol).returns(Level) }
  37. 1 def self.from_severity_sym(severity)
  38. 64 case severity.to_s.downcase.to_sym
  39. 6 when :debug then Debug
  40. 36 when :info then Info
  41. 5 when :warn then Warn
  42. 9 when :error then Error
  43. 5 when :fatal then Fatal
  44. 3 else Unknown
  45. end
  46. end
  47. 2 sig { params(severity: Integer).returns(Level) }
  48. 1 def self.from_severity_int(severity)
  49. 11 case severity
  50. 1 when ::Logger::DEBUG then Debug
  51. 5 when ::Logger::INFO then Info
  52. 1 when ::Logger::WARN then Warn
  53. 1 when ::Logger::ERROR then Error
  54. 1 when ::Logger::FATAL then Fatal
  55. 2 else Unknown
  56. end
  57. end
  58. end
  59. end

lib/log_struct/enums/source.rb

100.0% lines covered

14 relevant lines. 14 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. # Combined Source class that unifies log and error sources
  5. 1 class Source < T::Enum
  6. 1 enums do
  7. # Error sources
  8. 1 TypeChecking = new(:type_checking) # For type checking errors (Sorbet)
  9. 1 LogStruct = new(:logstruct) # Errors from LogStruct itself
  10. 1 Security = new(:security) # Security-related events
  11. # Application sources
  12. 1 Rails = new(:rails) # For request-related logs/errors
  13. 1 Job = new(:job) # ActiveJob logs/errors
  14. 1 Storage = new(:storage) # ActiveStorage logs/errors
  15. 1 Mailer = new(:mailer) # ActionMailer logs/errors
  16. 1 App = new(:app) # General application logs/errors
  17. # Third-party gem sources
  18. 1 Shrine = new(:shrine)
  19. 1 CarrierWave = new(:carrierwave)
  20. 1 Sidekiq = new(:sidekiq)
  21. end
  22. end
  23. end

lib/log_struct/formatter.rb

90.43% lines covered

94 relevant lines. 85 lines covered and 9 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "logger"
  4. 1 require "active_support/core_ext/object/blank"
  5. 1 require "json"
  6. 1 require "globalid"
  7. 1 require_relative "enums/source"
  8. 1 require_relative "enums/event"
  9. 1 require_relative "string_scrubber"
  10. 1 require_relative "log"
  11. 1 require_relative "param_filters"
  12. 1 require_relative "multi_error_reporter"
  13. 1 module LogStruct
  14. 1 class Formatter < ::Logger::Formatter
  15. 1 extend T::Sig
  16. # Add current_tags method to support ActiveSupport::TaggedLogging
  17. 2 sig { returns(T::Array[String]) }
  18. 1 def current_tags
  19. 7 Thread.current[:activesupport_tagged_logging_tags] ||= []
  20. end
  21. # Add tagged method to support ActiveSupport::TaggedLogging
  22. 2 sig { params(tags: T::Array[String], blk: T.proc.params(formatter: Formatter).void).returns(T.untyped) }
  23. 1 def tagged(*tags, &blk)
  24. 1 new_tags = tags.flatten
  25. 1 current_tags.concat(new_tags) if new_tags.any?
  26. 1 yield self
  27. ensure
  28. 1 current_tags.pop(new_tags.size) if new_tags&.any?
  29. end
  30. # Add clear_tags! method to support ActiveSupport::TaggedLogging
  31. 2 sig { void }
  32. 1 def clear_tags!
  33. 1 Thread.current[:activesupport_tagged_logging_tags] = []
  34. end
  35. 1 sig { params(tags: T::Array[String]).returns(T.untyped) }
  36. 1 def push_tags(*tags)
  37. current_tags.concat(tags)
  38. end
  39. 2 sig { params(string: String).returns(String) }
  40. 1 def scrub_string(string)
  41. # Use StringScrubber module to scrub sensitive information from strings
  42. 273 StringScrubber.scrub(string)
  43. end
  44. 2 sig { params(arg: T.untyped, recursion_depth: Integer).returns(T.untyped) }
  45. 1 def process_values(arg, recursion_depth: 0)
  46. # Prevent infinite recursion in case any args have circular references
  47. # or are too deeply nested. Just return args.
  48. 453 return arg if recursion_depth > 20
  49. 451 case arg
  50. when Hash
  51. 71 result = {}
  52. # Process each key-value pair
  53. 71 arg.each do |key, value|
  54. # Check if this key should be filtered at any depth
  55. 350 result[key] = if ParamFilters.should_filter_key?(key)
  56. # Filter the value
  57. 1 {_filtered: ParamFilters.summarize_json_attribute(key, value)}
  58. else
  59. # Process the value normally
  60. 349 process_values(value, recursion_depth: recursion_depth + 1)
  61. end
  62. end
  63. 71 result
  64. when Array
  65. 69 result = arg.map { |value| process_values(value, recursion_depth: recursion_depth + 1) }
  66. # Filter large arrays, but don't truncate backtraces (arrays of strings that look like file:line)
  67. 12 if result.size > 10 && !looks_like_backtrace?(result)
  68. 1 result = result.take(10) + ["... and #{result.size - 10} more items"]
  69. end
  70. 12 result
  71. when GlobalID::Identification
  72. begin
  73. 5 arg.to_global_id
  74. rescue
  75. begin
  76. 1 case arg
  77. when ActiveRecord::Base
  78. "#{arg.class}(##{arg.id})"
  79. else
  80. # For non-ActiveRecord objects that failed to_global_id, try to get a string representation
  81. # If this also fails, we want to catch it and return the error placeholder
  82. T.unsafe(arg).to_s
  83. end
  84. rescue => e
  85. 1 LogStruct.handle_exception(e, source: Source::LogStruct)
  86. 1 "[GLOBALID_ERROR]"
  87. end
  88. end
  89. when Source, Event
  90. arg.serialize
  91. when String
  92. 273 scrub_string(arg)
  93. when Time
  94. arg.iso8601(3)
  95. else
  96. # Any other type (e.g. Symbol, Integer, Float, Boolean etc.)
  97. 90 arg
  98. end
  99. rescue => e
  100. # Report error through LogStruct's framework
  101. context = {
  102. processor_method: "process_values",
  103. value_type: arg.class.name,
  104. recursion_depth: recursion_depth
  105. }
  106. LogStruct.handle_exception(e, source: Source::LogStruct, context: context)
  107. arg
  108. end
  109. 2 sig { params(log_value: T.untyped, time: Time).returns(T::Hash[Symbol, T.untyped]) }
  110. 1 def log_value_to_hash(log_value, time:)
  111. 48 case log_value
  112. when Log::Interfaces::CommonFields
  113. # Our log classes all implement a custom #serialize method that use symbol keys
  114. 33 log_value.serialize
  115. when T::Struct
  116. # Default T::Struct.serialize methods returns a hash with string keys, so convert them to symbols
  117. log_value.serialize.deep_symbolize_keys
  118. when Hash
  119. # Use hash as is and convert string keys to symbols
  120. 7 log_value.dup.deep_symbolize_keys
  121. else
  122. # Create a Plain log with the message as a string and serialize it with symbol keys
  123. # log_value can be literally anything: Integer, Float, Boolean, NilClass, etc.
  124. 8 log_message = case log_value
  125. # Handle all the basic types without any further processing
  126. when String, Symbol, TrueClass, FalseClass, NilClass, Array, Hash, Time, Numeric
  127. 5 log_value
  128. else
  129. # Handle the serialization of complex objects in a useful way:
  130. #
  131. # 1. For ActiveRecord models: Use as_json which includes attributes
  132. # 2. For objects with custom as_json implementations: Use their implementation
  133. # 3. For basic objects that only have ActiveSupport's as_json: Use to_s
  134. begin
  135. 3 method_owner = log_value.method(:as_json).owner
  136. # If it's ActiveRecord, ActiveModel, or a custom implementation, use as_json
  137. 2 if method_owner.to_s.include?("ActiveRecord") ||
  138. method_owner.to_s.include?("ActiveModel") ||
  139. method_owner.to_s.exclude?("ActiveSupport::CoreExtensions") &&
  140. method_owner.to_s.exclude?("Object")
  141. 1 log_value.as_json
  142. else
  143. # For plain objects with only the default ActiveSupport as_json
  144. 1 log_value.to_s
  145. end
  146. rescue => e
  147. # Handle serialization errors
  148. context = {
  149. 1 object_class: log_value.class.name,
  150. object_inspect: log_value.inspect.truncate(100)
  151. }
  152. 1 LogStruct.handle_exception(e, source: Source::LogStruct, context: context)
  153. # Fall back to the string representation to ensure we continue processing
  154. 1 log_value.to_s
  155. end
  156. end
  157. 8 Log::Plain.new(
  158. message: log_message,
  159. timestamp: time
  160. ).serialize
  161. end
  162. end
  163. # Serializes Log (or string) into JSON
  164. 2 sig { params(severity: T.any(String, Symbol, Integer), time: Time, progname: T.nilable(String), log_value: T.untyped).returns(String) }
  165. 1 def call(severity, time, progname, log_value)
  166. 41 level_enum = Level.from_severity(severity)
  167. 41 data = log_value_to_hash(log_value, time: time)
  168. # Filter params, scrub sensitive values, format ActiveJob GlobalID arguments
  169. 41 data = process_values(data)
  170. # Add standard fields if not already present
  171. 41 data[:src] ||= Source::App
  172. 41 data[:evt] ||= Event::Log
  173. 41 data[:ts] ||= time.iso8601(3)
  174. 41 data[:lvl] = level_enum # Set level from severity parameter
  175. 41 data[:prog] = progname if progname.present?
  176. 41 generate_json(data)
  177. end
  178. # Output as JSON with a newline. We mock this method in tests so we can
  179. # inspect the data right before it gets turned into a JSON string.
  180. 2 sig { params(data: T::Hash[T.untyped, T.untyped]).returns(String) }
  181. 1 def generate_json(data)
  182. 42 "#{data.to_json}\n"
  183. end
  184. # Check if an array looks like a backtrace (array of strings with file:line pattern)
  185. 2 sig { params(array: T::Array[T.untyped]).returns(T::Boolean) }
  186. 1 def looks_like_backtrace?(array)
  187. 2 return false if array.empty?
  188. # Check if most elements look like backtrace lines (file.rb:123 or similar patterns)
  189. 2 backtrace_like_count = array.first(5).count do |element|
  190. 10 element.is_a?(String) && element.match?(/\A[^:\s]+:\d+/)
  191. end
  192. # If at least 3 out of the first 5 elements look like backtrace lines, treat as backtrace
  193. 2 backtrace_like_count >= 3
  194. end
  195. end
  196. end

lib/log_struct/handlers.rb

100.0% lines covered

7 relevant lines. 7 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. # Module for custom handlers used throughout the library
  5. 1 module Handlers
  6. # Type for Lograge custom options
  7. 1 LogrageCustomOptions = T.type_alias {
  8. 1 T.proc.params(
  9. event: ActiveSupport::Notifications::Event,
  10. options: T::Hash[Symbol, T.untyped]
  11. ).returns(T.untyped)
  12. }
  13. # Type for error reporting handlers
  14. 1 ErrorReporter = T.type_alias {
  15. 1 T.proc.params(
  16. error: StandardError,
  17. context: T.nilable(T::Hash[Symbol, T.untyped]),
  18. source: Source
  19. ).void
  20. }
  21. # Type for string scrubbing handlers
  22. 2 StringScrubber = T.type_alias { T.proc.params(string: String).returns(String) }
  23. end
  24. end

lib/log_struct/hash_utils.rb

100.0% lines covered

10 relevant lines. 10 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "digest"
  4. 1 module LogStruct
  5. # Utility module for hashing sensitive data
  6. 1 module HashUtils
  7. 1 class << self
  8. 1 extend T::Sig
  9. # Create a hash of a string value for tracing while preserving privacy
  10. 2 sig { params(value: String).returns(String) }
  11. 1 def hash_value(value)
  12. 16 salt = LogStruct.config.filters.hash_salt
  13. 16 length = LogStruct.config.filters.hash_length
  14. 16 Digest::SHA256.hexdigest("#{salt}#{value}")[0...length] || "error"
  15. end
  16. end
  17. end
  18. end

lib/log_struct/integrations.rb

58.06% lines covered

31 relevant lines. 18 lines covered and 13 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "integrations/integration_interface"
  4. 1 require_relative "integrations/active_job"
  5. 1 require_relative "integrations/active_record"
  6. 1 require_relative "integrations/rack_error_handler"
  7. 1 require_relative "integrations/host_authorization"
  8. 1 require_relative "integrations/action_mailer"
  9. 1 require_relative "integrations/lograge"
  10. 1 require_relative "integrations/shrine"
  11. 1 require_relative "integrations/sidekiq"
  12. 1 require_relative "integrations/good_job"
  13. 1 require_relative "integrations/active_storage"
  14. 1 require_relative "integrations/carrierwave"
  15. 1 require_relative "integrations/sorbet"
  16. 1 module LogStruct
  17. 1 module Integrations
  18. 1 extend T::Sig
  19. 1 sig { void }
  20. 1 def self.setup_integrations
  21. config = LogStruct.config
  22. # Set up each integration with consistent configuration pattern
  23. Integrations::Lograge.setup(config) if config.integrations.enable_lograge
  24. Integrations::ActionMailer.setup(config) if config.integrations.enable_actionmailer
  25. Integrations::ActiveJob.setup(config) if config.integrations.enable_activejob
  26. Integrations::ActiveRecord.setup(config) if config.integrations.enable_sql_logging
  27. Integrations::Sidekiq.setup(config) if config.integrations.enable_sidekiq
  28. Integrations::GoodJob.setup(config) if config.integrations.enable_goodjob
  29. Integrations::HostAuthorization.setup(config) if config.integrations.enable_host_authorization
  30. Integrations::RackErrorHandler.setup(config) if config.integrations.enable_rack_error_handler
  31. Integrations::Shrine.setup(config) if config.integrations.enable_shrine
  32. Integrations::ActiveStorage.setup(config) if config.integrations.enable_activestorage
  33. Integrations::CarrierWave.setup(config) if config.integrations.enable_carrierwave
  34. Integrations::Sorbet.setup(config) if config.integrations.enable_sorbet_error_handlers
  35. end
  36. end
  37. end

lib/log_struct/integrations/action_mailer.rb

100.0% lines covered

25 relevant lines. 25 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. begin
  4. 1 require "action_mailer"
  5. rescue LoadError
  6. # actionmailer gem is not available, integration will be skipped
  7. end
  8. 1 if defined?(::ActionMailer)
  9. 1 require "logger"
  10. 1 require_relative "action_mailer/metadata_collection"
  11. 1 require_relative "action_mailer/event_logging"
  12. 1 require_relative "action_mailer/error_handling"
  13. 1 require_relative "action_mailer/callbacks"
  14. end
  15. 1 module LogStruct
  16. 1 module Integrations
  17. # ActionMailer integration for structured logging
  18. 1 module ActionMailer
  19. 1 extend T::Sig
  20. 1 extend IntegrationInterface
  21. # Set up ActionMailer structured logging
  22. 2 sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
  23. 1 def self.setup(config)
  24. 6 return nil unless defined?(::ActionMailer)
  25. 6 return nil unless config.enabled
  26. 6 return nil unless config.integrations.enable_actionmailer
  27. # Silence default ActionMailer logs (we use our own structured logging)
  28. # This is required because we replace the logging using our own callbacks
  29. 5 if defined?(::ActionMailer::Base)
  30. 5 ::ActionMailer::Base.logger = ::Logger.new(File::NULL)
  31. end
  32. # Register our custom observers and handlers
  33. # Registering these at the class level means all mailers will use them
  34. 10 ActiveSupport.on_load(:action_mailer) { prepend LogStruct::Integrations::ActionMailer::MetadataCollection }
  35. 10 ActiveSupport.on_load(:action_mailer) { prepend LogStruct::Integrations::ActionMailer::EventLogging }
  36. 10 ActiveSupport.on_load(:action_mailer) { prepend LogStruct::Integrations::ActionMailer::ErrorHandling }
  37. 10 ActiveSupport.on_load(:action_mailer) { prepend LogStruct::Integrations::ActionMailer::Callbacks }
  38. 10 ActiveSupport.on_load(:action_mailer) { LogStruct::Integrations::ActionMailer::Callbacks.patch_message_delivery }
  39. 5 true
  40. end
  41. end
  42. end
  43. end

lib/log_struct/integrations/action_mailer/callbacks.rb

88.89% lines covered

45 relevant lines. 40 lines covered and 5 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. 1 module Integrations
  5. 1 module ActionMailer
  6. # Backport of the *_deliver callbacks from Rails 7.1
  7. 1 module Callbacks
  8. 1 extend T::Sig
  9. 1 extend ::ActiveSupport::Concern
  10. # Track if we've already patched MessageDelivery
  11. 1 @patched_message_delivery = T.let(false, T::Boolean)
  12. # We can't use included block with strict typing
  13. # This will be handled by ActiveSupport::Concern at runtime
  14. 1 included do
  15. 2 include ::ActiveSupport::Callbacks
  16. 2 if defined?(::ActiveSupport) && ::ActiveSupport.gem_version >= Gem::Version.new("7.1.0")
  17. 2 define_callbacks :deliver, skip_after_callbacks_if_terminated: true
  18. else
  19. define_callbacks :deliver
  20. end
  21. end
  22. # When this module is prepended (our integration uses prepend), ensure callbacks are defined
  23. 1 if respond_to?(:prepended)
  24. 1 prepended do
  25. 1 include ::ActiveSupport::Callbacks
  26. 1 if defined?(::ActiveSupport) && ::ActiveSupport.gem_version >= Gem::Version.new("7.1.0")
  27. 1 define_callbacks :deliver, skip_after_callbacks_if_terminated: true
  28. else
  29. define_callbacks :deliver
  30. end
  31. end
  32. end
  33. # Define class methods in a separate module
  34. 1 module ClassMethods
  35. 1 extend T::Sig
  36. # Defines a callback that will get called right before the
  37. # message is sent to the delivery method.
  38. 2 sig { params(filters: T.untyped, blk: T.nilable(T.proc.bind(T.untyped).void)).void }
  39. 1 def before_deliver(*filters, &blk)
  40. # Use T.unsafe for splat arguments due to Sorbet limitation
  41. 1 T.unsafe(self).set_callback(:deliver, :before, *filters, &blk)
  42. end
  43. # Defines a callback that will get called right after the
  44. # message's delivery method is finished.
  45. 2 sig { params(filters: T.untyped, blk: T.nilable(T.proc.bind(T.untyped).void)).void }
  46. 1 def after_deliver(*filters, &blk)
  47. # Use T.unsafe for splat arguments due to Sorbet limitation
  48. 1 T.unsafe(self).set_callback(:deliver, :after, *filters, &blk)
  49. end
  50. # Defines a callback that will get called around the message's deliver method.
  51. 1 sig { params(filters: T.untyped, blk: T.nilable(T.proc.bind(T.untyped).params(arg0: T.untyped).void)).void }
  52. 1 def around_deliver(*filters, &blk)
  53. # Use T.unsafe for splat arguments due to Sorbet limitation
  54. T.unsafe(self).set_callback(:deliver, :around, *filters, &blk)
  55. end
  56. end
  57. # Module to patch ActionMailer::MessageDelivery with callback support
  58. 1 module MessageDeliveryCallbacks
  59. 1 extend T::Sig
  60. 2 sig { returns(T.untyped) }
  61. 1 def deliver_now
  62. 7 processed_mailer.run_callbacks(:deliver) do
  63. 3 message.deliver
  64. end
  65. end
  66. 1 sig { returns(T.untyped) }
  67. 1 def deliver_now!
  68. processed_mailer.run_callbacks(:deliver) do
  69. message.deliver!
  70. end
  71. end
  72. end
  73. 2 sig { returns(T::Boolean) }
  74. 1 def self.patch_message_delivery
  75. # Return early if we've already patched
  76. 12 return true if @patched_message_delivery
  77. # Prepend our module to add callback support to MessageDelivery
  78. 1 ::ActionMailer::MessageDelivery.prepend(MessageDeliveryCallbacks)
  79. # Mark as patched so we don't do it again
  80. 1 @patched_message_delivery = true
  81. 1 true
  82. end
  83. end
  84. end
  85. end
  86. end

lib/log_struct/integrations/action_mailer/error_handling.rb

87.1% lines covered

62 relevant lines. 54 lines covered and 8 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. 1 module Integrations
  5. 1 module ActionMailer
  6. # Handles error handling for ActionMailer
  7. #
  8. # IMPORTANT LIMITATIONS:
  9. # 1. This module must be included BEFORE users define rescue_from handlers
  10. # to ensure proper handler precedence (user handlers are checked first)
  11. # 2. Rails rescue_from handlers don't bubble to parent class handlers after reraise
  12. # 3. Handler order matters: Rails checks rescue_from handlers in reverse declaration order
  13. 1 module ErrorHandling
  14. 1 extend T::Sig
  15. 1 extend ActiveSupport::Concern
  16. # NOTE: rescue_from handlers are checked in reverse order of declaration.
  17. # We want LogStruct handlers to be checked AFTER user handlers (lower priority),
  18. # so we need to add them BEFORE user handlers are declared.
  19. # This will be called when the module is included/prepended
  20. 2 sig { params(base: T.untyped).void }
  21. 1 def self.install_handler(base)
  22. # Only add the handler once per class
  23. 5 return if base.instance_variable_get(:@_logstruct_handler_installed)
  24. # Add our handler FIRST so it has lower priority than user handlers
  25. 1 base.rescue_from StandardError, with: :log_and_reraise_error
  26. # Mark as installed to prevent duplicates
  27. 1 base.instance_variable_set(:@_logstruct_handler_installed, true)
  28. end
  29. 1 included do
  30. LogStruct::Integrations::ActionMailer::ErrorHandling.install_handler(self)
  31. end
  32. # Also support prepended (used by tests and manual setup)
  33. 2 sig { params(base: T.untyped).void }
  34. 1 def self.prepended(base)
  35. 5 install_handler(base)
  36. end
  37. 1 protected
  38. # Just log the error without reporting or retrying
  39. 1 sig { params(ex: StandardError).void }
  40. 1 def log_and_ignore_error(ex)
  41. log_email_delivery_error(ex, notify: false, report: false, reraise: false)
  42. end
  43. # Log and report to error service, but doesn't reraise.
  44. 1 sig { params(ex: StandardError).void }
  45. 1 def log_and_report_error(ex)
  46. log_email_delivery_error(ex, notify: false, report: true, reraise: false)
  47. end
  48. # Log, report to error service, and reraise for retry
  49. 2 sig { params(ex: StandardError).void }
  50. 1 def log_and_reraise_error(ex)
  51. 1 log_email_delivery_error(ex, notify: false, report: true, reraise: true)
  52. end
  53. 1 private
  54. # Handle an error from a mailer
  55. 2 sig { params(mailer: T.untyped, error: StandardError, message: String).void }
  56. 1 def log_structured_error(mailer, error, message)
  57. # Create a structured exception log with context
  58. context = {
  59. 1 mailer_class: mailer.class.to_s,
  60. 1 mailer_action: mailer.respond_to?(:action_name) ? mailer.action_name : nil,
  61. message: message
  62. }
  63. # Create the structured exception log
  64. 1 exception_data = Log::Error.from_exception(
  65. Source::Mailer,
  66. error,
  67. context
  68. )
  69. # Log the structured error
  70. 1 LogStruct.error(exception_data)
  71. end
  72. # Log when email delivery fails
  73. 2 sig { params(error: StandardError, notify: T::Boolean, report: T::Boolean, reraise: T::Boolean).void }
  74. 1 def log_email_delivery_error(error, notify: false, report: true, reraise: true)
  75. # Generate appropriate error message
  76. 1 message = error_message_for(error, reraise)
  77. # Use structured error logging
  78. 1 log_structured_error(self, error, message)
  79. # Handle notifications and reporting
  80. 1 handle_error_notifications(error, notify, report, reraise)
  81. end
  82. # Generate appropriate error message based on error handling strategy
  83. 2 sig { params(error: StandardError, reraise: T::Boolean).returns(String) }
  84. 1 def error_message_for(error, reraise)
  85. 1 if reraise
  86. 1 "#{error.class}: Email delivery error, will retry. Recipients: #{recipients(error)}"
  87. else
  88. "#{error.class}: Cannot send email to #{recipients(error)}"
  89. end
  90. end
  91. # Handle error notifications, reporting, and reraising
  92. 2 sig { params(error: StandardError, notify: T::Boolean, report: T::Boolean, reraise: T::Boolean).void }
  93. 1 def handle_error_notifications(error, notify, report, reraise)
  94. # Log a notification event if requested
  95. 1 log_notification_event(error) if notify
  96. # Report to error reporting service if requested
  97. 1 if report
  98. context = {
  99. 1 mailer_class: self.class.to_s,
  100. 1 mailer_action: respond_to?(:action_name) ? action_name : nil,
  101. recipients: recipients(error)
  102. }
  103. # Create an exception log for structured logging
  104. 1 exception_data = Log::Error.from_exception(
  105. Source::Mailer,
  106. error,
  107. context
  108. )
  109. # Log the exception with structured data
  110. 1 LogStruct.error(exception_data)
  111. # Call the error handler
  112. 1 LogStruct.handle_exception(error, source: Source::Mailer, context: context)
  113. end
  114. # Re-raise the error if requested
  115. Kernel.raise error if reraise
  116. end
  117. # Log a notification event that can be picked up by external systems
  118. 1 sig { params(error: StandardError).void }
  119. 1 def log_notification_event(error)
  120. # Create an error log data object
  121. exception_data = Log::Error.from_exception(
  122. Source::Mailer,
  123. error,
  124. {
  125. mailer: self.class,
  126. action: action_name,
  127. recipients: recipients(error)
  128. }
  129. )
  130. # Log the error at info level since it's not a critical error
  131. LogStruct.info(exception_data)
  132. end
  133. 2 sig { params(error: StandardError).returns(String) }
  134. 1 def recipients(error)
  135. # Extract recipient info if available
  136. 2 if error.respond_to?(:recipients) && T.unsafe(error).recipients.present?
  137. T.unsafe(error).recipients.join(", ")
  138. else
  139. 2 "unknown"
  140. end
  141. end
  142. end
  143. end
  144. end
  145. end

lib/log_struct/integrations/action_mailer/event_logging.rb

100.0% lines covered

40 relevant lines. 40 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. 1 module Integrations
  5. 1 module ActionMailer
  6. # Handles logging of email delivery events
  7. 1 module EventLogging
  8. 1 extend ActiveSupport::Concern
  9. 1 extend T::Sig
  10. 1 extend T::Helpers
  11. 1 requires_ancestor { ::ActionMailer::Base }
  12. 1 included do
  13. 1 T.bind(self, ActionMailer::Callbacks::ClassMethods)
  14. # Add callbacks for delivery events
  15. 1 before_deliver :log_email_delivery
  16. 1 after_deliver :log_email_delivered
  17. end
  18. 1 protected
  19. # Log when an email is about to be delivered
  20. 2 sig { void }
  21. 1 def log_email_delivery
  22. 1 log_mailer_event(Event::Delivery)
  23. end
  24. # Log when an email is delivered
  25. 2 sig { void }
  26. 1 def log_email_delivered
  27. 1 log_mailer_event(Event::Delivered)
  28. end
  29. 1 private
  30. # Log a mailer event with the given event type
  31. 1 sig do
  32. 1 params(event_type: Log::ActionMailer::ActionMailerEvent,
  33. level: Symbol,
  34. additional_data: T::Hash[Symbol, T.untyped]).returns(T.untyped)
  35. end
  36. 1 def log_mailer_event(event_type, level = :info, additional_data = {})
  37. # Get message (self refers to the mailer instance)
  38. 2 mailer_message = message if respond_to?(:message)
  39. # Prepare data for the log entry
  40. data = {
  41. 2 message_id: extract_message_id,
  42. mailer_class: self.class.to_s,
  43. mailer_action: action_name.to_s
  44. }.compact
  45. # Add any additional metadata
  46. 2 MetadataCollection.add_message_metadata(self, data)
  47. 2 MetadataCollection.add_context_metadata(self, data)
  48. 2 data.merge!(additional_data) if additional_data.present?
  49. # Extract email fields (these will be filtered if email_addresses=true)
  50. 2 to = mailer_message&.to
  51. 2 from = mailer_message&.from&.first
  52. 2 subject = mailer_message&.subject
  53. # Create a structured log entry
  54. 2 log_data = Log::ActionMailer.new(
  55. event: event_type,
  56. to: to,
  57. from: from,
  58. subject: subject,
  59. additional_data: data
  60. )
  61. 2 LogStruct.info(log_data)
  62. 2 log_data
  63. end
  64. # Extract message ID from the mailer
  65. 2 sig { returns(T.nilable(String)) }
  66. 1 def extract_message_id
  67. 2 return nil unless respond_to?(:message)
  68. 2 mail_message = message
  69. 2 return nil unless mail_message.respond_to?(:message_id)
  70. 2 mail_message.message_id
  71. end
  72. end
  73. end
  74. end
  75. end

lib/log_struct/integrations/action_mailer/metadata_collection.rb

88.89% lines covered

36 relevant lines. 32 lines covered and 4 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. 1 module Integrations
  5. 1 module ActionMailer
  6. # Handles collection of metadata for email logging
  7. 1 module MetadataCollection
  8. 1 extend T::Sig
  9. # Add message-specific metadata to log data
  10. 2 sig { params(mailer: T.untyped, log_data: T::Hash[Symbol, T.untyped]).void }
  11. 1 def self.add_message_metadata(mailer, log_data)
  12. 4 message = mailer.respond_to?(:message) ? mailer.message : nil
  13. # Add recipient count if message is available
  14. 4 if message
  15. # Don't log actual email addresses
  16. 3 log_data[:recipient_count] = [message.to, message.cc, message.bcc].flatten.compact.count
  17. # Handle case when attachments might be nil
  18. 3 log_data[:has_attachments] = message.attachments&.any? || false
  19. 3 log_data[:attachment_count] = message.attachments&.count || 0
  20. else
  21. 1 log_data[:recipient_count] = 0
  22. 1 log_data[:has_attachments] = false
  23. 1 log_data[:attachment_count] = 0
  24. end
  25. end
  26. # Add context metadata to log data
  27. 2 sig { params(mailer: T.untyped, log_data: T::Hash[Symbol, T.untyped]).void }
  28. 1 def self.add_context_metadata(mailer, log_data)
  29. # Add account ID information if available (but not user email)
  30. 4 extract_ids_to_log_data(mailer, log_data)
  31. # Add any current tags from ActiveJob or ActionMailer
  32. 4 add_current_tags_to_log_data(log_data)
  33. end
  34. 2 sig { params(mailer: T.untyped, log_data: T::Hash[Symbol, T.untyped]).void }
  35. 1 def self.extract_ids_to_log_data(mailer, log_data)
  36. # Extract account ID if available
  37. 4 if mailer.instance_variable_defined?(:@account)
  38. account = mailer.instance_variable_get(:@account)
  39. log_data[:account_id] = account.id if account.respond_to?(:id)
  40. end
  41. # Extract user ID if available
  42. 4 return unless mailer.instance_variable_defined?(:@user)
  43. user = mailer.instance_variable_get(:@user)
  44. log_data[:user_id] = user.id if user.respond_to?(:id)
  45. end
  46. 2 sig { params(log_data: T::Hash[Symbol, T.untyped]).void }
  47. 1 def self.add_current_tags_to_log_data(log_data)
  48. # Get current tags from ActiveSupport::TaggedLogging if available
  49. 4 if ::ActiveSupport::TaggedLogging.respond_to?(:current_tags)
  50. 4 tags = T.unsafe(::ActiveSupport::TaggedLogging).current_tags
  51. 4 log_data[:tags] = tags if tags.present?
  52. end
  53. # Get request_id from ActionDispatch if available
  54. 4 if ::ActionDispatch::Request.respond_to?(:current_request_id) &&
  55. T.unsafe(::ActionDispatch::Request).current_request_id.present?
  56. 4 log_data[:request_id] = T.unsafe(::ActionDispatch::Request).current_request_id
  57. end
  58. # Get job_id from ActiveJob if available
  59. 4 if defined?(::ActiveJob::Logging) && ::ActiveJob::Logging.respond_to?(:job_id) &&
  60. T.unsafe(::ActiveJob::Logging).job_id.present?
  61. 3 log_data[:job_id] = T.unsafe(::ActiveJob::Logging).job_id
  62. end
  63. end
  64. end
  65. end
  66. end
  67. end

lib/log_struct/integrations/active_job.rb

58.82% lines covered

17 relevant lines. 10 lines covered and 7 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. begin
  4. 1 require "active_job"
  5. 1 require "active_job/log_subscriber"
  6. rescue LoadError
  7. # ActiveJob gem is not available, integration will be skipped
  8. end
  9. 1 require_relative "active_job/log_subscriber" if defined?(::ActiveJob::LogSubscriber)
  10. 1 module LogStruct
  11. 1 module Integrations
  12. # ActiveJob integration for structured logging
  13. 1 module ActiveJob
  14. 1 extend T::Sig
  15. 1 extend IntegrationInterface
  16. # Set up ActiveJob structured logging
  17. 1 sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
  18. 1 def self.setup(config)
  19. return nil unless defined?(::ActiveJob::LogSubscriber)
  20. return nil unless config.enabled
  21. return nil unless config.integrations.enable_activejob
  22. ::ActiveSupport.on_load(:active_job) do
  23. # Detach the default text formatter
  24. ::ActiveJob::LogSubscriber.detach_from :active_job
  25. # Attach our structured formatter
  26. Integrations::ActiveJob::LogSubscriber.attach_to :active_job
  27. end
  28. true
  29. end
  30. end
  31. end
  32. end

lib/log_struct/integrations/active_job/log_subscriber.rb

55.81% lines covered

43 relevant lines. 24 lines covered and 19 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "../../enums/source"
  4. 1 require_relative "../../enums/event"
  5. 1 require_relative "../../log/active_job"
  6. 1 require_relative "../../log/error"
  7. 1 module LogStruct
  8. 1 module Integrations
  9. 1 module ActiveJob
  10. # Structured logging for ActiveJob
  11. 1 class LogSubscriber < ::ActiveJob::LogSubscriber
  12. 1 extend T::Sig
  13. 1 sig { params(event: T.untyped).void }
  14. 1 def enqueue(event)
  15. job = event.payload[:job]
  16. log_job_event(Event::Enqueue, job, event)
  17. end
  18. 1 sig { params(event: T.untyped).void }
  19. 1 def enqueue_at(event)
  20. job = event.payload[:job]
  21. log_job_event(Event::Schedule, job, event, scheduled_at: job.scheduled_at)
  22. end
  23. 1 sig { params(event: T.untyped).void }
  24. 1 def perform(event)
  25. job = event.payload[:job]
  26. exception = event.payload[:exception_object]
  27. if exception
  28. # Log the exception with the job context
  29. log_exception(exception, job, event)
  30. else
  31. log_job_event(Event::Finish, job, event, duration: event.duration.round(2))
  32. end
  33. end
  34. 1 sig { params(event: T.untyped).void }
  35. 1 def perform_start(event)
  36. job = event.payload[:job]
  37. log_job_event(Event::Start, job, event)
  38. end
  39. 1 private
  40. 1 sig { params(event_type: T.any(Event::Enqueue, Event::Schedule, Event::Start, Event::Finish), job: T.untyped, _event: T.untyped, additional_data: T::Hash[Symbol, T.untyped]).void }
  41. 1 def log_job_event(event_type, job, _event, additional_data = {})
  42. # Create structured log data
  43. log_data = Log::ActiveJob.new(
  44. event: event_type,
  45. job_id: job.job_id,
  46. job_class: job.class.to_s,
  47. queue_name: job.queue_name,
  48. duration: additional_data[:duration],
  49. # Add arguments if the job class allows it
  50. arguments: job.class.log_arguments? ? job.arguments : nil,
  51. # Store additional data in the data hash
  52. additional_data: {
  53. executions: job.executions,
  54. scheduled_at: additional_data[:scheduled_at],
  55. provider_job_id: job.provider_job_id
  56. }.compact
  57. )
  58. # Use Rails logger with our structured formatter
  59. logger.info(log_data)
  60. end
  61. 1 sig { params(exception: StandardError, job: T.untyped, _event: T.untyped).void }
  62. 1 def log_exception(exception, job, _event)
  63. # Create job context data for the exception
  64. job_context = {
  65. job_id: job.job_id,
  66. job_class: job.class.to_s,
  67. queue_name: job.queue_name,
  68. executions: job.executions,
  69. provider_job_id: job.provider_job_id
  70. }
  71. # Add arguments if the job class allows it
  72. job_context[:arguments] = job.arguments if job.class.log_arguments?
  73. # Create exception log with job source and context
  74. log_data = Log::Error.from_exception(
  75. Source::Job,
  76. exception,
  77. job_context
  78. )
  79. # Use Rails logger with our structured formatter
  80. logger.error(log_data)
  81. end
  82. 1 sig { returns(::ActiveSupport::Logger) }
  83. 1 def logger
  84. ::ActiveJob::Base.logger
  85. end
  86. end
  87. end
  88. end
  89. end

lib/log_struct/integrations/active_record.rb

95.45% lines covered

110 relevant lines. 105 lines covered and 5 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "active_support/notifications"
  4. 1 module LogStruct
  5. 1 module Integrations
  6. # ActiveRecord Integration for SQL Query Logging
  7. #
  8. # This integration captures and structures all SQL queries executed through ActiveRecord,
  9. # providing detailed performance and debugging information in a structured format.
  10. #
  11. # ## Features:
  12. # - Captures all SQL queries with execution time
  13. # - Safely filters sensitive data from bind parameters
  14. # - Extracts database operation metadata
  15. # - Provides connection pool monitoring information
  16. # - Identifies query types and table names
  17. #
  18. # ## Performance Considerations:
  19. # - Minimal overhead on query execution
  20. # - Async logging prevents I/O blocking
  21. # - Configurable to disable in production if needed
  22. # - Smart filtering reduces log volume for repetitive queries
  23. #
  24. # ## Security:
  25. # - SQL queries are always parameterized (safe)
  26. # - Bind parameters filtered through LogStruct's param filters
  27. # - Sensitive patterns automatically scrubbed
  28. #
  29. # ## Configuration:
  30. # ```ruby
  31. # LogStruct.configure do |config|
  32. # config.integrations.enable_sql_logging = true
  33. # config.integrations.sql_slow_query_threshold = 100.0 # ms
  34. # config.integrations.sql_log_bind_params = false # disable in production
  35. # end
  36. # ```
  37. 1 module ActiveRecord
  38. 1 extend T::Sig
  39. 1 extend IntegrationInterface
  40. # Set up SQL query logging integration
  41. 2 sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
  42. 1 def self.setup(config)
  43. 16 return nil unless config.integrations.enable_sql_logging
  44. 15 return nil unless defined?(::ActiveRecord::Base)
  45. 14 subscribe_to_sql_notifications
  46. 14 true
  47. end
  48. 1 private_class_method
  49. # Subscribe to ActiveRecord's sql.active_record notifications
  50. 2 sig { void }
  51. 1 def self.subscribe_to_sql_notifications
  52. 14 ::ActiveSupport::Notifications.subscribe("sql.active_record") do |name, start, finish, id, payload|
  53. 9 handle_sql_event(name, start, finish, id, payload)
  54. rescue => error
  55. 1 LogStruct.handle_exception(error, source: LogStruct::Source::LogStruct)
  56. end
  57. end
  58. # Process SQL notification event and create structured log
  59. 2 sig { params(name: String, start: T.untyped, finish: T.untyped, id: String, payload: T::Hash[Symbol, T.untyped]).void }
  60. 1 def self.handle_sql_event(name, start, finish, id, payload)
  61. # Skip schema queries and Rails internal queries
  62. 31 return if skip_query?(payload)
  63. 24 duration = ((finish - start) * 1000.0).round(2)
  64. # Skip fast queries if threshold is configured
  65. 24 config = LogStruct.config
  66. 24 if config.integrations.sql_slow_query_threshold&.positive?
  67. 2 return if duration < config.integrations.sql_slow_query_threshold
  68. end
  69. 23 sql_log = Log::SQL.new(
  70. message: format_sql_message(payload),
  71. source: Source::App,
  72. event: Event::Database,
  73. sql: payload[:sql]&.strip || "",
  74. name: payload[:name] || "SQL Query",
  75. duration: duration,
  76. row_count: extract_row_count(payload),
  77. connection_adapter: extract_adapter_name(payload),
  78. bind_params: extract_and_filter_binds(payload),
  79. database_name: extract_database_name(payload),
  80. connection_pool_size: extract_pool_size(payload),
  81. active_connections: extract_active_connections(payload),
  82. operation_type: extract_operation_type(payload),
  83. table_names: extract_table_names(payload)
  84. )
  85. 22 LogStruct.info(sql_log)
  86. end
  87. # Determine if query should be skipped from logging
  88. 2 sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T::Boolean) }
  89. 1 def self.skip_query?(payload)
  90. 31 query_name = payload[:name]
  91. 31 sql = payload[:sql]
  92. # Skip Rails schema queries
  93. 31 return true if query_name&.include?("SCHEMA")
  94. 30 return true if query_name&.include?("CACHE")
  95. # Skip common Rails internal queries
  96. 29 return true if sql&.include?("schema_migrations")
  97. 28 return true if sql&.include?("ar_internal_metadata")
  98. # Skip SHOW/DESCRIBE queries
  99. 27 return true if sql&.match?(/\A\s*(SHOW|DESCRIBE|EXPLAIN)\s/i)
  100. 24 false
  101. end
  102. # Format a readable message for the SQL log
  103. 2 sig { params(payload: T::Hash[Symbol, T.untyped]).returns(String) }
  104. 1 def self.format_sql_message(payload)
  105. 23 operation_name = payload[:name] || "SQL Query"
  106. 23 "#{operation_name} executed"
  107. end
  108. # Extract row count from payload
  109. 2 sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(Integer)) }
  110. 1 def self.extract_row_count(payload)
  111. 23 row_count = payload[:row_count]
  112. 23 row_count.is_a?(Integer) ? row_count : nil
  113. end
  114. # Extract database adapter name
  115. 2 sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(String)) }
  116. 1 def self.extract_adapter_name(payload)
  117. 23 connection = payload[:connection]
  118. 23 return nil unless connection
  119. 22 adapter_name = connection.class.name
  120. 22 adapter_name&.split("::")&.last
  121. end
  122. # Extract and filter bind parameters
  123. 2 sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(T::Array[T.untyped])) }
  124. 1 def self.extract_and_filter_binds(payload)
  125. 23 return nil unless LogStruct.config.integrations.sql_log_bind_params
  126. # Prefer type_casted_binds as they're more readable
  127. 22 binds = payload[:type_casted_binds] || payload[:binds]
  128. 22 return nil unless binds
  129. # Filter sensitive data from bind parameters
  130. 2 binds.map do |bind|
  131. 4 filter_bind_parameter(bind)
  132. end
  133. end
  134. # Extract database name from connection
  135. 2 sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(String)) }
  136. 1 def self.extract_database_name(payload)
  137. 23 connection = payload[:connection]
  138. 23 return nil unless connection
  139. 22 if connection.respond_to?(:current_database)
  140. 22 connection.current_database
  141. elsif connection.respond_to?(:database)
  142. connection.database
  143. end
  144. rescue
  145. nil
  146. end
  147. # Extract connection pool size
  148. 2 sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(Integer)) }
  149. 1 def self.extract_pool_size(payload)
  150. 23 connection = payload[:connection]
  151. 23 return nil unless connection
  152. 22 pool = connection.pool if connection.respond_to?(:pool)
  153. 22 pool&.size
  154. rescue
  155. nil
  156. end
  157. # Extract active connection count
  158. 2 sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(Integer)) }
  159. 1 def self.extract_active_connections(payload)
  160. 23 connection = payload[:connection]
  161. 23 return nil unless connection
  162. 22 pool = connection.pool if connection.respond_to?(:pool)
  163. 22 pool&.stat&.[](:busy)
  164. rescue
  165. nil
  166. end
  167. # Extract SQL operation type (SELECT, INSERT, etc.)
  168. 2 sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(String)) }
  169. 1 def self.extract_operation_type(payload)
  170. 23 sql = payload[:sql]
  171. 23 return nil unless sql
  172. # Extract first word of SQL query
  173. 23 match = sql.strip.match(/\A\s*(\w+)/i)
  174. 23 match&.captures&.first&.upcase
  175. end
  176. # Extract table names from SQL query
  177. 2 sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(T::Array[String])) }
  178. 1 def self.extract_table_names(payload)
  179. 23 sql = payload[:sql]
  180. 23 return nil unless sql
  181. # Simple regex to extract table names (basic implementation)
  182. # This covers most common cases but could be enhanced
  183. 23 tables = []
  184. # Match FROM, JOIN, UPDATE, INSERT INTO, DELETE FROM patterns
  185. 23 sql.scan(/(?:FROM|JOIN|UPDATE|INTO|DELETE\s+FROM)\s+["`]?(\w+)["`]?/i) do |match|
  186. 23 table_name = match[0]
  187. 23 tables << table_name unless tables.include?(table_name)
  188. end
  189. 23 tables.empty? ? nil : tables
  190. end
  191. # Filter individual bind parameter values to remove sensitive data
  192. 2 sig { params(value: T.untyped).returns(T.untyped) }
  193. 1 def self.filter_bind_parameter(value)
  194. 4 case value
  195. when String
  196. # Filter strings that look like passwords, tokens, secrets, etc.
  197. 2 if looks_sensitive?(value)
  198. 1 "[FILTERED]"
  199. else
  200. 1 value
  201. end
  202. else
  203. 2 value
  204. end
  205. end
  206. # Check if a string value looks sensitive and should be filtered
  207. 2 sig { params(value: String).returns(T::Boolean) }
  208. 1 def self.looks_sensitive?(value)
  209. # Filter very long strings that might be tokens
  210. 2 return true if value.length > 50
  211. # Filter strings that look like hashed passwords, API keys, tokens
  212. 2 return true if value.match?(/\A[a-f0-9]{32,}\z/i) # MD5, SHA, etc.
  213. 2 return true if value.match?(/\A[A-Za-z0-9+\/]{20,}={0,2}\z/) # Base64
  214. 2 return true if value.match?(/(password|secret|token|key|auth)/i)
  215. 1 false
  216. end
  217. end
  218. end
  219. end

lib/log_struct/integrations/active_storage.rb

34.21% lines covered

38 relevant lines. 13 lines covered and 25 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "../enums/source"
  4. 1 require_relative "../enums/event"
  5. 1 require_relative "../log/active_storage"
  6. 1 module LogStruct
  7. 1 module Integrations
  8. # Integration for ActiveStorage structured logging
  9. 1 module ActiveStorage
  10. 1 extend T::Sig
  11. 1 extend IntegrationInterface
  12. # Set up ActiveStorage structured logging
  13. 1 sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
  14. 1 def self.setup(config)
  15. return nil unless defined?(::ActiveStorage)
  16. return nil unless config.enabled
  17. return nil unless config.integrations.enable_activestorage
  18. # Subscribe to all ActiveStorage service events
  19. ::ActiveSupport::Notifications.subscribe(/service_.*\.active_storage/) do |*args|
  20. process_active_storage_event(::ActiveSupport::Notifications::Event.new(*args), config)
  21. end
  22. true
  23. end
  24. 1 private_class_method
  25. # Process ActiveStorage events and create structured logs
  26. 1 sig { params(event: ActiveSupport::Notifications::Event, config: LogStruct::Configuration).void }
  27. 1 def self.process_active_storage_event(event, config)
  28. return unless config.enabled
  29. return unless config.integrations.enable_activestorage
  30. # Extract key information from the event
  31. event_name = event.name.sub(/\.active_storage$/, "")
  32. service_name = event.payload[:service]
  33. duration = event.duration
  34. # Map service events to log event types
  35. event_type = case event_name
  36. when "service_upload"
  37. Event::Upload
  38. when "service_download"
  39. Event::Download
  40. when "service_delete"
  41. Event::Delete
  42. when "service_delete_prefixed"
  43. Event::Delete
  44. when "service_exist"
  45. Event::Exist
  46. when "service_url"
  47. Event::Url
  48. when "service_download_chunk"
  49. Event::Download
  50. when "service_stream"
  51. Event::Stream
  52. when "service_update_metadata"
  53. Event::Metadata
  54. else
  55. Event::Unknown
  56. end
  57. # Map the event name to an operation
  58. operation = event_name.sub(/^service_/, "").to_sym
  59. # Create structured log event specific to ActiveStorage
  60. log_data = Log::ActiveStorage.new(
  61. event: event_type,
  62. operation: operation,
  63. storage: service_name.to_s,
  64. file_id: event.payload[:key].to_s,
  65. checksum: event.payload[:checksum].to_s,
  66. duration: duration,
  67. # Add other fields where available
  68. metadata: event.payload[:metadata],
  69. exist: event.payload[:exist],
  70. url: event.payload[:url],
  71. filename: event.payload[:filename],
  72. mime_type: event.payload[:content_type],
  73. size: event.payload[:byte_size],
  74. prefix: event.payload[:prefix],
  75. range: event.payload[:range]
  76. )
  77. # Log structured data
  78. LogStruct.info(log_data)
  79. end
  80. end
  81. end
  82. end

lib/log_struct/integrations/carrierwave.rb

44.44% lines covered

36 relevant lines. 16 lines covered and 20 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. begin
  4. 1 require "carrierwave"
  5. rescue LoadError
  6. # CarrierWave gem is not available, integration will be skipped
  7. end
  8. 1 module LogStruct
  9. 1 module Integrations
  10. # CarrierWave integration for structured logging
  11. 1 module CarrierWave
  12. 1 extend T::Sig
  13. 1 extend IntegrationInterface
  14. # Set up CarrierWave structured logging
  15. 1 sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
  16. 1 def self.setup(config)
  17. return nil unless defined?(::CarrierWave)
  18. return nil unless config.enabled
  19. return nil unless config.integrations.enable_carrierwave
  20. # Patch CarrierWave to add logging
  21. ::CarrierWave::Uploader::Base.prepend(LoggingMethods)
  22. true
  23. end
  24. # Methods to add logging to CarrierWave operations
  25. 1 module LoggingMethods
  26. 1 extend T::Sig
  27. 1 extend T::Helpers
  28. 1 requires_ancestor { ::CarrierWave::Uploader::Base }
  29. # Log file storage operations
  30. 1 sig { params(args: T.untyped).returns(T.untyped) }
  31. 1 def store!(*args)
  32. start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
  33. result = super
  34. duration = Process.clock_gettime(Process::CLOCK_MONOTONIC) - start_time
  35. # Extract file information
  36. file_size = file.size if file.respond_to?(:size)
  37. {
  38. identifier: identifier,
  39. filename: file.filename,
  40. content_type: file.content_type,
  41. size: file_size,
  42. store_path: store_path,
  43. extension: file.extension
  44. }
  45. # Log the store operation with structured data
  46. log_data = Log::CarrierWave.new(
  47. source: Source::CarrierWave,
  48. event: Event::Upload,
  49. duration: duration * 1000.0, # Convert to ms
  50. model: model.class.name,
  51. uploader: self.class.name,
  52. storage: storage.class.name,
  53. mount_point: mounted_as.to_s,
  54. filename: file.filename,
  55. mime_type: file.content_type,
  56. size: file_size,
  57. file_id: identifier,
  58. additional_data: {
  59. version: version_name.to_s,
  60. store_path: store_path,
  61. extension: file.extension
  62. }
  63. )
  64. ::Rails.logger.info(log_data)
  65. result
  66. end
  67. # Log file retrieve operations
  68. 1 sig { params(identifier: T.untyped, args: T.untyped).returns(T.untyped) }
  69. 1 def retrieve_from_store!(identifier, *args)
  70. start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
  71. result = super
  72. duration = Process.clock_gettime(Process::CLOCK_MONOTONIC) - start_time
  73. # Extract file information if available
  74. file_size = file.size if file&.respond_to?(:size)
  75. # Log the retrieve operation with structured data
  76. log_data = Log::CarrierWave.new(
  77. source: Source::CarrierWave,
  78. event: Event::Download,
  79. duration: duration * 1000.0, # Convert to ms
  80. uploader: self.class.name,
  81. storage: storage.class.name,
  82. mount_point: mounted_as.to_s,
  83. file_id: identifier,
  84. filename: file&.filename,
  85. mime_type: file&.content_type,
  86. size: file_size,
  87. additional_data: {
  88. version: version_name.to_s
  89. }
  90. )
  91. ::Rails.logger.info(log_data)
  92. result
  93. end
  94. end
  95. end
  96. end
  97. end

lib/log_struct/integrations/good_job.rb

53.13% lines covered

32 relevant lines. 17 lines covered and 15 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. begin
  4. 1 require "good_job"
  5. rescue LoadError
  6. # GoodJob gem is not available, integration will be skipped
  7. end
  8. 1 require_relative "good_job/logger" if defined?(::GoodJob)
  9. 1 require_relative "good_job/log_subscriber" if defined?(::GoodJob)
  10. 1 module LogStruct
  11. 1 module Integrations
  12. # GoodJob integration for structured logging
  13. #
  14. # GoodJob is a PostgreSQL-based ActiveJob backend that provides reliable,
  15. # scalable job processing for Rails applications. This integration provides
  16. # structured logging for all GoodJob operations.
  17. #
  18. # ## Features:
  19. # - Structured logging for job execution lifecycle
  20. # - Error tracking and retry logging
  21. # - Performance metrics and timing data
  22. # - Database operation logging
  23. # - Thread and process tracking
  24. # - Custom GoodJob logger with LogStruct formatting
  25. #
  26. # ## Integration Points:
  27. # - Replaces GoodJob.logger with LogStruct-compatible logger
  28. # - Subscribes to GoodJob's ActiveSupport notifications
  29. # - Captures job execution events, errors, and performance metrics
  30. # - Logs database operations and connection information
  31. #
  32. # ## Configuration:
  33. # The integration is automatically enabled when GoodJob is detected and
  34. # LogStruct configuration allows it. It can be disabled by setting:
  35. #
  36. # ```ruby
  37. # config.integrations.enable_goodjob = false
  38. # ```
  39. 1 module GoodJob
  40. 1 extend T::Sig
  41. 1 extend IntegrationInterface
  42. # Set up GoodJob structured logging
  43. #
  44. # This method configures GoodJob to use LogStruct's structured logging
  45. # by replacing the default logger and subscribing to job events.
  46. #
  47. # @param config [LogStruct::Configuration] The LogStruct configuration
  48. # @return [Boolean, nil] Returns true if setup was successful, nil if skipped
  49. 2 sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
  50. 1 def self.setup(config)
  51. 3 return nil unless defined?(::GoodJob)
  52. return nil unless config.enabled
  53. return nil unless config.integrations.enable_goodjob
  54. # Replace GoodJob's logger with our structured logger
  55. configure_logger
  56. # Subscribe to GoodJob's ActiveSupport notifications
  57. subscribe_to_notifications
  58. true
  59. end
  60. # Configure GoodJob to use LogStruct's structured logger
  61. 1 sig { void }
  62. 1 def self.configure_logger
  63. return unless defined?(::GoodJob)
  64. # Use direct reference to avoid const_get - GoodJob is guaranteed to be defined here
  65. goodjob_module = T.unsafe(GoodJob)
  66. # Replace GoodJob.logger with our structured logger if GoodJob is available
  67. if goodjob_module.respond_to?(:logger=)
  68. goodjob_module.logger = LogStruct::Integrations::GoodJob::Logger.new("GoodJob")
  69. end
  70. # Configure error handling for thread errors if GoodJob supports it
  71. if goodjob_module.respond_to?(:on_thread_error=)
  72. goodjob_module.on_thread_error = ->(exception) do
  73. # Log the error using our structured format
  74. log_entry = LogStruct::Log::GoodJob.new(
  75. event: Event::Error,
  76. level: Level::Error,
  77. error_class: exception.class.name,
  78. error_message: exception.message,
  79. error_backtrace: exception.backtrace
  80. )
  81. goodjob_module.logger.error(log_entry)
  82. end
  83. end
  84. end
  85. # Subscribe to GoodJob's ActiveSupport notifications
  86. 1 sig { void }
  87. 1 def self.subscribe_to_notifications
  88. return unless defined?(::GoodJob)
  89. # Subscribe to our custom log subscriber for GoodJob events
  90. LogStruct::Integrations::GoodJob::LogSubscriber.attach_to :good_job
  91. end
  92. 1 private_class_method :configure_logger
  93. 1 private_class_method :subscribe_to_notifications
  94. end
  95. end
  96. end

lib/log_struct/integrations/good_job/log_subscriber.rb

98.68% lines covered

76 relevant lines. 75 lines covered and 1 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. begin
  4. 1 require "active_support/log_subscriber"
  5. rescue LoadError
  6. # ActiveSupport is not available, log subscriber will be skipped
  7. end
  8. 1 require_relative "../../log/good_job"
  9. 1 require_relative "../../enums/event"
  10. 1 require_relative "../../enums/level"
  11. 1 module LogStruct
  12. 1 module Integrations
  13. 1 module GoodJob
  14. # LogSubscriber for GoodJob ActiveSupport notifications
  15. #
  16. # This subscriber captures GoodJob's ActiveSupport notifications and converts
  17. # them into structured LogStruct::Log::GoodJob entries. It provides detailed
  18. # logging for job lifecycle events, performance metrics, and error tracking.
  19. #
  20. # ## Supported Events:
  21. # - job.enqueue - Job queued for execution
  22. # - job.start - Job execution started
  23. # - job.finish - Job completed successfully
  24. # - job.error - Job failed with error
  25. # - job.retry - Job retry initiated
  26. # - job.schedule - Job scheduled for future execution
  27. #
  28. # ## Event Data Captured:
  29. # - Job identification (ID, class, queue)
  30. # - Execution context (arguments, priority, scheduled time)
  31. # - Performance metrics (execution time, wait time)
  32. # - Error information (class, message, backtrace)
  33. # - Process and thread information
  34. 1 class LogSubscriber < ::ActiveSupport::LogSubscriber
  35. 1 extend T::Sig
  36. # Job enqueued event
  37. 2 sig { params(event: T.untyped).void }
  38. 1 def enqueue(event)
  39. 2 job_data = extract_job_data(event)
  40. 2 log_entry = LogStruct::Log::GoodJob.new(
  41. event: Event::Enqueue,
  42. level: Level::Info,
  43. job_id: job_data[:job_id],
  44. job_class: job_data[:job_class],
  45. queue_name: job_data[:queue_name],
  46. arguments: job_data[:arguments],
  47. scheduled_at: job_data[:scheduled_at],
  48. priority: job_data[:priority],
  49. execution_time: event.duration,
  50. additional_data: {
  51. enqueue_caller: job_data[:caller_location]
  52. }
  53. )
  54. 2 logger.info(log_entry)
  55. end
  56. # Job execution started event
  57. 2 sig { params(event: T.untyped).void }
  58. 1 def start(event)
  59. 1 job_data = extract_job_data(event)
  60. 1 log_entry = LogStruct::Log::GoodJob.new(
  61. event: Event::Start,
  62. level: Level::Info,
  63. job_id: job_data[:job_id],
  64. job_class: job_data[:job_class],
  65. queue_name: job_data[:queue_name],
  66. arguments: job_data[:arguments],
  67. executions: job_data[:executions],
  68. wait_time: job_data[:wait_time],
  69. scheduled_at: job_data[:scheduled_at],
  70. process_id: ::Process.pid,
  71. thread_id: Thread.current.object_id.to_s(36)
  72. )
  73. 1 logger.info(log_entry)
  74. end
  75. # Job completed successfully event
  76. 2 sig { params(event: T.untyped).void }
  77. 1 def finish(event)
  78. 1 job_data = extract_job_data(event)
  79. 1 log_entry = LogStruct::Log::GoodJob.new(
  80. event: Event::Finish,
  81. level: Level::Info,
  82. job_id: job_data[:job_id],
  83. job_class: job_data[:job_class],
  84. queue_name: job_data[:queue_name],
  85. executions: job_data[:executions],
  86. run_time: event.duration,
  87. finished_at: Time.now,
  88. process_id: ::Process.pid,
  89. thread_id: Thread.current.object_id.to_s(36),
  90. additional_data: {
  91. result: job_data[:result]
  92. }
  93. )
  94. 1 logger.info(log_entry)
  95. end
  96. # Job failed with error event
  97. 2 sig { params(event: T.untyped).void }
  98. 1 def error(event)
  99. 2 job_data = extract_job_data(event)
  100. 2 log_entry = LogStruct::Log::GoodJob.new(
  101. event: Event::Error,
  102. level: Level::Error,
  103. job_id: job_data[:job_id],
  104. job_class: job_data[:job_class],
  105. queue_name: job_data[:queue_name],
  106. executions: job_data[:executions],
  107. exception_executions: job_data[:exception_executions],
  108. error_class: job_data[:error_class],
  109. error_message: job_data[:error_message],
  110. error_backtrace: job_data[:error_backtrace],
  111. run_time: event.duration,
  112. process_id: ::Process.pid,
  113. thread_id: Thread.current.object_id.to_s(36)
  114. )
  115. 2 logger.error(log_entry)
  116. end
  117. # Job scheduled for future execution event
  118. 2 sig { params(event: T.untyped).void }
  119. 1 def schedule(event)
  120. 1 job_data = extract_job_data(event)
  121. 1 log_entry = LogStruct::Log::GoodJob.new(
  122. event: Event::Schedule,
  123. level: Level::Info,
  124. job_id: job_data[:job_id],
  125. job_class: job_data[:job_class],
  126. queue_name: job_data[:queue_name],
  127. arguments: job_data[:arguments],
  128. scheduled_at: job_data[:scheduled_at],
  129. priority: job_data[:priority],
  130. cron_key: job_data[:cron_key],
  131. execution_time: event.duration
  132. )
  133. 1 logger.info(log_entry)
  134. end
  135. 1 private
  136. # Extract job data from ActiveSupport event payload
  137. 2 sig { params(event: T.untyped).returns(T::Hash[Symbol, T.untyped]) }
  138. 1 def extract_job_data(event)
  139. 7 payload = event.payload || {}
  140. 7 job = payload[:job]
  141. 7 execution = payload[:execution] || payload[:good_job_execution]
  142. 7 exception = payload[:exception] || payload[:error]
  143. 7 data = {}
  144. # Basic job information
  145. 7 if job
  146. 6 data[:job_id] = job.job_id if job.respond_to?(:job_id)
  147. 6 data[:job_class] = job.job_class if job.respond_to?(:job_class)
  148. 6 data[:queue_name] = job.queue_name if job.respond_to?(:queue_name)
  149. 6 data[:arguments] = job.arguments if job.respond_to?(:arguments)
  150. 6 data[:priority] = job.priority if job.respond_to?(:priority)
  151. 6 data[:scheduled_at] = job.scheduled_at if job.respond_to?(:scheduled_at)
  152. 6 data[:cron_key] = job.cron_key if job.respond_to?(:cron_key)
  153. 6 data[:caller_location] = job.enqueue_caller_location if job.respond_to?(:enqueue_caller_location)
  154. end
  155. # Execution-specific information
  156. 7 if execution
  157. 3 data[:executions] = execution.executions if execution.respond_to?(:executions)
  158. 3 data[:exception_executions] = execution.exception_executions if execution.respond_to?(:exception_executions)
  159. # Use existing wait_time if available, otherwise calculate it
  160. 3 if execution.respond_to?(:wait_time) && execution.wait_time
  161. 1 data[:wait_time] = execution.wait_time
  162. 2 elsif execution.respond_to?(:created_at)
  163. 2 data[:wait_time] = calculate_wait_time(execution)
  164. end
  165. 3 data[:batch_id] = execution.batch_id if execution.respond_to?(:batch_id)
  166. 3 data[:cron_key] ||= execution.cron_key if execution.respond_to?(:cron_key)
  167. end
  168. # Error information
  169. 7 if exception
  170. 2 data[:error_class] = exception.class.name
  171. 2 data[:error_message] = exception.message
  172. 2 data[:error_backtrace] = exception.backtrace&.first(20) # Limit backtrace size
  173. end
  174. # Result information
  175. 7 data[:result] = payload[:result] if payload.key?(:result)
  176. 7 data
  177. end
  178. # Calculate wait time from job creation to execution start
  179. 2 sig { params(execution: T.untyped).returns(T.nilable(Float)) }
  180. 1 def calculate_wait_time(execution)
  181. 4 return nil unless execution.respond_to?(:created_at)
  182. 4 return nil unless execution.respond_to?(:performed_at)
  183. 4 return nil unless execution.created_at && execution.performed_at
  184. 3 (execution.performed_at - execution.created_at).to_f
  185. rescue
  186. # Return nil if calculation fails
  187. nil
  188. end
  189. # Get the appropriate logger for GoodJob events
  190. 2 sig { returns(T.untyped) }
  191. 1 def logger
  192. # Always use Rails.logger - in production it will be configured by the integration setup,
  193. # in tests it will be set up by the test harness
  194. 7 Rails.logger
  195. end
  196. end
  197. end
  198. end
  199. end

lib/log_struct/integrations/good_job/logger.rb

100.0% lines covered

23 relevant lines. 23 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "../../semantic_logger/logger"
  4. 1 require_relative "../../log/good_job"
  5. 1 require_relative "../../enums/source"
  6. 1 module LogStruct
  7. 1 module Integrations
  8. 1 module GoodJob
  9. # Custom Logger for GoodJob that creates LogStruct::Log::GoodJob entries
  10. #
  11. # This logger extends LogStruct's SemanticLogger to provide optimal logging
  12. # performance while creating structured log entries specifically for GoodJob
  13. # operations and events.
  14. #
  15. # ## Benefits:
  16. # - High-performance logging with SemanticLogger backend
  17. # - Structured GoodJob-specific log entries
  18. # - Automatic job context capture
  19. # - Thread and process information
  20. # - Performance metrics and timing data
  21. #
  22. # ## Usage:
  23. # This logger is automatically configured when the GoodJob integration
  24. # is enabled. It replaces GoodJob.logger to provide structured logging
  25. # for all GoodJob operations.
  26. 1 class Logger < LogStruct::SemanticLogger::Logger
  27. 1 extend T::Sig
  28. # Override log methods to create GoodJob-specific log structs
  29. 1 %i[debug info warn error fatal].each do |level|
  30. 5 define_method(level) do |message = nil, payload = nil, &block|
  31. # Extract basic job context from thread-local variables
  32. 12 job_context = {}
  33. 12 if Thread.current[:good_job_execution]
  34. 2 execution = Thread.current[:good_job_execution]
  35. 2 if execution.respond_to?(:job_id)
  36. 2 job_context[:job_id] = execution.job_id
  37. 2 job_context[:job_class] = execution.job_class if execution.respond_to?(:job_class)
  38. 2 job_context[:queue_name] = execution.queue_name if execution.respond_to?(:queue_name)
  39. 2 job_context[:executions] = execution.executions if execution.respond_to?(:executions)
  40. 2 job_context[:scheduled_at] = execution.scheduled_at if execution.respond_to?(:scheduled_at)
  41. 2 job_context[:priority] = execution.priority if execution.respond_to?(:priority)
  42. end
  43. end
  44. # Create a GoodJob log struct with the context
  45. 12 log_struct = Log::GoodJob.new(
  46. event: Event::Log,
  47. level: LogStruct::Level.from_severity(level.to_s.upcase),
  48. process_id: ::Process.pid,
  49. thread_id: Thread.current.object_id.to_s(36),
  50. job_id: job_context[:job_id],
  51. job_class: job_context[:job_class],
  52. queue_name: job_context[:queue_name],
  53. executions: job_context[:executions],
  54. scheduled_at: job_context[:scheduled_at],
  55. priority: job_context[:priority],
  56. additional_data: {
  57. 1 message: message || (block ? block.call : "")
  58. }
  59. )
  60. # Pass the struct to SemanticLogger
  61. 12 super(log_struct, payload, &nil)
  62. end
  63. end
  64. end
  65. end
  66. end
  67. end

lib/log_struct/integrations/host_authorization.rb

52.17% lines covered

23 relevant lines. 12 lines covered and 11 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "action_dispatch/middleware/host_authorization"
  4. 1 require_relative "../enums/event"
  5. 1 module LogStruct
  6. 1 module Integrations
  7. # Host Authorization integration for structured logging of blocked hosts
  8. 1 module HostAuthorization
  9. 1 extend T::Sig
  10. 1 extend IntegrationInterface
  11. 1 RESPONSE_HTML = T.let(
  12. "<html><head><title>Blocked Host</title></head><body>" \
  13. "<h1>Blocked Host</h1>" \
  14. "<p>This host is not permitted to access this application.</p>" \
  15. "<p>If you are the administrator, check your configuration.</p>" \
  16. "</body></html>",
  17. String
  18. )
  19. 1 RESPONSE_HEADERS = T.let(
  20. {
  21. "Content-Type" => "text/html",
  22. "Content-Length" => RESPONSE_HTML.bytesize.to_s
  23. }.freeze,
  24. T::Hash[String, String]
  25. )
  26. 1 FORBIDDEN_STATUS = T.let(403, Integer)
  27. # Set up host authorization logging
  28. 1 sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
  29. 1 def self.setup(config)
  30. return nil unless config.enabled
  31. return nil unless config.integrations.enable_host_authorization
  32. # Define the response app as a separate variable to fix block alignment
  33. response_app = lambda do |env|
  34. request = ::ActionDispatch::Request.new(env)
  35. # Include the blocked hosts app configuration in the log entry
  36. # This can be helpful later when reviewing logs.
  37. blocked_hosts = env["action_dispatch.blocked_hosts"]
  38. # Create a security error to be handled
  39. blocked_host_error = ::ActionController::BadRequest.new(
  40. "Blocked host detected: #{request.host}"
  41. )
  42. # Create request context hash
  43. context = {
  44. blocked_host: request.host,
  45. client_ip: request.ip,
  46. x_forwarded_for: request.x_forwarded_for,
  47. http_method: request.method,
  48. path: request.path,
  49. user_agent: request.user_agent,
  50. allowed_hosts: blocked_hosts.allowed_hosts,
  51. allow_ip_hosts: blocked_hosts.allow_ip_hosts
  52. }
  53. # Handle error according to configured mode (log, report, raise)
  54. LogStruct.handle_exception(
  55. blocked_host_error,
  56. source: Source::Security,
  57. context: context
  58. )
  59. # Use pre-defined headers and response if we are only logging or reporting
  60. [FORBIDDEN_STATUS, RESPONSE_HEADERS, [RESPONSE_HTML]]
  61. end
  62. # Replace the default HostAuthorization app with our custom app for logging
  63. Rails.application.config.host_authorization = {
  64. response_app: response_app
  65. }
  66. true
  67. end
  68. end
  69. end
  70. end

lib/log_struct/integrations/integration_interface.rb

100.0% lines covered

8 relevant lines. 8 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. 1 module Integrations
  5. # Interface that all integrations must implement
  6. # This ensures consistent behavior across all integration modules
  7. 1 module IntegrationInterface
  8. 1 extend T::Sig
  9. 1 extend T::Helpers
  10. # This is an interface that should be implemented by all integration modules
  11. 1 interface!
  12. # All integrations must implement this method to set up their functionality
  13. # @return [Boolean, nil] Returns true if setup was successful, nil if skipped
  14. 2 sig { abstract.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
  15. 1 def setup(config); end
  16. end
  17. end
  18. end

lib/log_struct/integrations/lograge.rb

41.86% lines covered

43 relevant lines. 18 lines covered and 25 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. begin
  4. 1 require "lograge"
  5. rescue LoadError
  6. # Lograge gem is not available, integration will be skipped
  7. end
  8. 1 module LogStruct
  9. 1 module Integrations
  10. # Lograge integration for structured request logging
  11. 1 module Lograge
  12. 1 extend IntegrationInterface
  13. 1 class << self
  14. 1 extend T::Sig
  15. # Set up lograge for structured request logging
  16. 1 sig { override.params(logstruct_config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
  17. 1 def setup(logstruct_config)
  18. return nil unless defined?(::Lograge)
  19. return nil unless logstruct_config.enabled
  20. return nil unless logstruct_config.integrations.enable_lograge
  21. configure_lograge(logstruct_config)
  22. true
  23. end
  24. 1 private_class_method
  25. 1 sig { params(logstruct_config: LogStruct::Configuration).void }
  26. 1 def configure_lograge(logstruct_config)
  27. ::Rails.application.configure do
  28. config.lograge.enabled = true
  29. # Use a raw formatter that just returns the log struct.
  30. # The struct is converted to JSON by our Formatter (after filtering, etc.)
  31. config.lograge.formatter = T.let(
  32. lambda do |data|
  33. # Convert the data hash to a Log::Request struct
  34. Log::Request.new(
  35. source: Source::Rails,
  36. event: Event::Request,
  37. timestamp: Time.now,
  38. http_method: data[:method],
  39. path: data[:path],
  40. format: data[:format],
  41. controller: data[:controller],
  42. action: data[:action],
  43. status: data[:status],
  44. duration: data[:duration],
  45. view: data[:view],
  46. db: data[:db],
  47. params: data[:params]
  48. )
  49. end,
  50. T.proc.params(hash: T::Hash[Symbol, T.untyped]).returns(Log::Request)
  51. )
  52. # Add custom options to lograge
  53. config.lograge.custom_options = lambda do |event|
  54. Integrations::Lograge.lograge_default_options(event)
  55. end
  56. end
  57. end
  58. 1 sig { params(event: ActiveSupport::Notifications::Event).returns(T::Hash[Symbol, T.untyped]) }
  59. 1 def lograge_default_options(event)
  60. # Extract essential fields from the payload
  61. options = event.payload.slice(
  62. :request_id,
  63. :host,
  64. :source_ip
  65. ).compact
  66. if event.payload[:params].present?
  67. options[:params] = event.payload[:params].except("controller", "action")
  68. end
  69. # Process headers if available
  70. process_headers(event, options)
  71. # Apply custom options from application if provided
  72. apply_custom_options(event, options)
  73. options
  74. end
  75. # Process headers from the event payload
  76. 1 sig { params(event: ActiveSupport::Notifications::Event, options: T::Hash[Symbol, T.untyped]).void }
  77. 1 def process_headers(event, options)
  78. headers = event.payload[:headers]
  79. return if headers.blank?
  80. options[:user_agent] = headers["HTTP_USER_AGENT"]
  81. options[:content_type] = headers["CONTENT_TYPE"]
  82. options[:accept] = headers["HTTP_ACCEPT"]
  83. end
  84. # Apply custom options from the application's configuration
  85. 1 sig { params(event: ActiveSupport::Notifications::Event, options: T::Hash[Symbol, T.untyped]).void }
  86. 1 def apply_custom_options(event, options)
  87. custom_options_proc = LogStruct.config.integrations.lograge_custom_options
  88. return unless custom_options_proc&.respond_to?(:call)
  89. # Call the proc with the event and options
  90. # The proc can modify the options hash directly
  91. custom_options_proc.call(event, options)
  92. end
  93. end
  94. end
  95. end
  96. end

lib/log_struct/integrations/rack_error_handler.rb

71.43% lines covered

14 relevant lines. 10 lines covered and 4 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "rack"
  4. 1 require "action_dispatch/middleware/show_exceptions"
  5. 1 require_relative "rack_error_handler/middleware"
  6. 1 module LogStruct
  7. 1 module Integrations
  8. # Rack middleware integration for structured logging
  9. 1 module RackErrorHandler
  10. 1 extend T::Sig
  11. 1 extend IntegrationInterface
  12. # Set up Rack middleware for structured error logging
  13. 1 sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
  14. 1 def self.setup(config)
  15. return nil unless config.enabled
  16. return nil unless config.integrations.enable_rack_error_handler
  17. # Add structured logging middleware for security violations and errors
  18. # Need to insert after ShowExceptions to catch IP spoofing errors
  19. ::Rails.application.middleware.insert_after(
  20. ::ActionDispatch::ShowExceptions,
  21. Integrations::RackErrorHandler::Middleware
  22. )
  23. true
  24. end
  25. end
  26. end
  27. end

lib/log_struct/integrations/rack_error_handler/middleware.rb

44.74% lines covered

38 relevant lines. 17 lines covered and 21 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. 1 module Integrations
  5. 1 module RackErrorHandler
  6. # Custom middleware to enhance Rails error logging with JSON format and request details
  7. 1 class Middleware
  8. 1 extend T::Sig
  9. # IP Spoofing error response
  10. 1 IP_SPOOF_HTML = T.let(
  11. "<html><head><title>IP Spoofing Detected</title></head><body>" \
  12. "<h1>Forbidden</h1>" \
  13. "<p>IP spoofing detected. This request has been blocked for security reasons.</p>" \
  14. "</body></html>",
  15. String
  16. )
  17. # CSRF error response
  18. 1 CSRF_HTML = T.let(
  19. "<html><head><title>CSRF Error</title></head><body>" \
  20. "<h1>Forbidden</h1>" \
  21. "<p>Invalid authenticity token. This request has been blocked to prevent cross-site request forgery.</p>" \
  22. "</body></html>",
  23. String
  24. )
  25. # Response headers calculated at load time
  26. 1 IP_SPOOF_HEADERS = T.let(
  27. {
  28. "Content-Type" => "text/html",
  29. "Content-Length" => IP_SPOOF_HTML.bytesize.to_s
  30. }.freeze,
  31. T::Hash[String, String]
  32. )
  33. 1 CSRF_HEADERS = T.let(
  34. {
  35. "Content-Type" => "text/html",
  36. "Content-Length" => CSRF_HTML.bytesize.to_s
  37. }.freeze,
  38. T::Hash[String, String]
  39. )
  40. # HTTP status code for forbidden responses
  41. 1 FORBIDDEN_STATUS = T.let(403, Integer)
  42. 1 sig { params(app: T.untyped).void }
  43. 1 def initialize(app)
  44. @app = app
  45. end
  46. 1 sig { params(env: T.untyped).returns(T.untyped) }
  47. 1 def call(env)
  48. return @app.call(env) unless LogStruct.enabled?
  49. # Try to process the request
  50. begin
  51. @app.call(env)
  52. rescue ::ActionDispatch::RemoteIp::IpSpoofAttackError => ip_spoof_error
  53. # Create a security log for IP spoofing
  54. security_log = Log::Security.new(
  55. event: Event::IPSpoof,
  56. message: ip_spoof_error.message,
  57. # Can't call .remote_ip on the request because that's what raises the error.
  58. # Have to pass the client_ip and x_forwarded_for headers.
  59. client_ip: env["HTTP_CLIENT_IP"],
  60. x_forwarded_for: env["HTTP_X_FORWARDED_FOR"],
  61. path: env["PATH_INFO"],
  62. http_method: env["REQUEST_METHOD"],
  63. user_agent: env["HTTP_USER_AGENT"],
  64. referer: env["HTTP_REFERER"],
  65. request_id: env["action_dispatch.request_id"]
  66. )
  67. # Log the structured data
  68. ::Rails.logger.warn(security_log)
  69. # Report the error
  70. context = extract_request_context(env)
  71. LogStruct.handle_exception(ip_spoof_error, source: Source::Security, context: context)
  72. # If handle_exception raised an exception then Rails will deal with it (e.g. config.exceptions_app)
  73. # If we are only logging or reporting these security errors, then return a default response
  74. [FORBIDDEN_STATUS, IP_SPOOF_HEADERS, [IP_SPOOF_HTML]]
  75. rescue ::ActionController::InvalidAuthenticityToken => invalid_auth_token_error
  76. # Create a security log for CSRF error
  77. request = ::ActionDispatch::Request.new(env)
  78. security_log = Log::Security.new(
  79. event: Event::CSRFViolation,
  80. message: invalid_auth_token_error.message,
  81. path: request.path,
  82. http_method: request.method,
  83. source_ip: request.remote_ip,
  84. user_agent: request.user_agent,
  85. referer: request.referer,
  86. request_id: request.request_id
  87. )
  88. LogStruct.error(security_log)
  89. # Report to error reporting service and/or re-raise
  90. context = extract_request_context(env)
  91. LogStruct.handle_exception(invalid_auth_token_error, source: Source::Security, context: context)
  92. # If handle_exception raised an exception then Rails will deal with it (e.g. config.exceptions_app)
  93. # If we are only logging or reporting these security errors, then return a default response
  94. [FORBIDDEN_STATUS, CSRF_HEADERS, [CSRF_HTML]]
  95. rescue => error
  96. # Extract request context for error reporting
  97. context = extract_request_context(env)
  98. # Create and log a structured exception with request context
  99. exception_log = Log::Error.from_exception(
  100. Source::Rails,
  101. error,
  102. context
  103. )
  104. LogStruct.error(exception_log)
  105. # Re-raise any standard errors to let Rails or error reporter handle it.
  106. # Rails will also log the request details separately
  107. raise error
  108. end
  109. end
  110. 1 private
  111. 1 sig { params(env: T::Hash[String, T.untyped]).returns(T::Hash[Symbol, T.untyped]) }
  112. 1 def extract_request_context(env)
  113. request = ::ActionDispatch::Request.new(env)
  114. {
  115. request_id: request.request_id,
  116. path: request.path,
  117. method: request.method,
  118. user_agent: request.user_agent,
  119. referer: request.referer
  120. }
  121. rescue => error
  122. # If we can't extract request context, return minimal info
  123. {error_extracting_context: error.message}
  124. end
  125. end
  126. end
  127. end
  128. end

lib/log_struct/integrations/shrine.rb

33.33% lines covered

24 relevant lines. 8 lines covered and 16 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. begin
  4. 1 require "shrine"
  5. rescue LoadError
  6. # Shrine gem is not available, integration will be skipped
  7. end
  8. 1 module LogStruct
  9. 1 module Integrations
  10. # Shrine integration for structured logging
  11. 1 module Shrine
  12. 1 extend T::Sig
  13. 1 extend IntegrationInterface
  14. # Set up Shrine structured logging
  15. 1 sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
  16. 1 def self.setup(config)
  17. return nil unless defined?(::Shrine)
  18. return nil unless config.enabled
  19. return nil unless config.integrations.enable_shrine
  20. # Create a structured log subscriber for Shrine
  21. # ActiveSupport::Notifications::Event has name, time, end, transaction_id, payload, and duration
  22. shrine_log_subscriber = T.unsafe(lambda do |event|
  23. payload = event.payload.except(:io, :metadata, :name).dup
  24. # Map event name to Event type
  25. event_type = case event.name
  26. when :upload then Event::Upload
  27. when :download then Event::Download
  28. when :delete then Event::Delete
  29. when :metadata then Event::Metadata
  30. when :exists then Event::Exist # ActiveStorage uses 'exist', may as well use that
  31. else Event::Unknown
  32. end
  33. # Create structured log data
  34. log_data = Log::Shrine.new(
  35. source: Source::Shrine,
  36. event: event_type,
  37. duration: event.duration,
  38. storage: payload[:storage],
  39. location: payload[:location],
  40. uploader: payload[:uploader],
  41. upload_options: payload[:upload_options],
  42. download_options: payload[:download_options],
  43. options: payload[:options],
  44. # Data is flattened by the JSON formatter
  45. additional_data: payload.except(
  46. :storage,
  47. :location,
  48. :uploader,
  49. :upload_options,
  50. :download_options,
  51. :options
  52. )
  53. )
  54. # Pass the structured hash to the logger
  55. # If Rails.logger has our Formatter, it will handle JSON conversion
  56. ::Shrine.logger.info log_data
  57. end)
  58. # Configure Shrine to use our structured log subscriber
  59. ::Shrine.plugin :instrumentation,
  60. events: %i[upload exists download delete],
  61. log_subscriber: shrine_log_subscriber
  62. true
  63. end
  64. end
  65. end
  66. end

lib/log_struct/integrations/sidekiq.rb

52.94% lines covered

17 relevant lines. 9 lines covered and 8 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. begin
  4. 1 require "sidekiq"
  5. rescue LoadError
  6. # Sidekiq gem is not available, integration will be skipped
  7. end
  8. 1 require_relative "sidekiq/logger" if defined?(::Sidekiq)
  9. 1 module LogStruct
  10. 1 module Integrations
  11. # Sidekiq integration for structured logging
  12. 1 module Sidekiq
  13. 1 extend T::Sig
  14. 1 extend IntegrationInterface
  15. # Set up Sidekiq structured logging
  16. 1 sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
  17. 1 def self.setup(config)
  18. return nil unless defined?(::Sidekiq)
  19. return nil unless config.enabled
  20. return nil unless config.integrations.enable_sidekiq
  21. # Configure Sidekiq server (worker) to use our logger
  22. ::Sidekiq.configure_server do |sidekiq_config|
  23. sidekiq_config.logger = LogStruct::Integrations::Sidekiq::Logger.new("Sidekiq-Server")
  24. end
  25. # Configure Sidekiq client (Rails app) to use our logger
  26. ::Sidekiq.configure_client do |sidekiq_config|
  27. sidekiq_config.logger = LogStruct::Integrations::Sidekiq::Logger.new("Sidekiq-Client")
  28. end
  29. true
  30. end
  31. end
  32. end
  33. end

lib/log_struct/integrations/sorbet.rb

42.11% lines covered

19 relevant lines. 8 lines covered and 11 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "sorbet-runtime"
  4. 1 module LogStruct
  5. 1 module Integrations
  6. # Integration for Sorbet runtime type checking error handlers
  7. # This module installs error handlers that report type errors through LogStruct
  8. # These handlers can be enabled/disabled using configuration
  9. 1 module Sorbet
  10. 1 extend T::Sig
  11. 1 extend IntegrationInterface
  12. # Set up Sorbet error handlers to report errors through LogStruct
  13. 1 sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
  14. 1 def self.setup(config)
  15. return nil unless config.integrations.enable_sorbet_error_handlers
  16. # Install inline type error handler
  17. # Called when T.let, T.cast, T.must, etc. fail
  18. T::Configuration.inline_type_error_handler = lambda do |error, _opts|
  19. LogStruct.handle_exception(error, source: LogStruct::Source::TypeChecking)
  20. end
  21. # Install call validation error handler
  22. # Called when method signature validation fails
  23. T::Configuration.call_validation_error_handler = lambda do |_signature, opts|
  24. error = TypeError.new(opts[:pretty_message])
  25. LogStruct.handle_exception(error, source: LogStruct::Source::TypeChecking)
  26. end
  27. # Install sig builder error handler
  28. # Called when there's a problem with a signature definition
  29. T::Configuration.sig_builder_error_handler = lambda do |error, _location|
  30. LogStruct.handle_exception(error, source: LogStruct::Source::TypeChecking)
  31. end
  32. # Install sig validation error handler
  33. # Called when there's a problem with a signature validation
  34. T::Configuration.sig_validation_error_handler = lambda do |error, _opts|
  35. LogStruct.handle_exception(error, source: LogStruct::Source::TypeChecking)
  36. end
  37. true
  38. end
  39. end
  40. end
  41. end

lib/log_struct/log.rb

94.44% lines covered

18 relevant lines. 17 lines covered and 1 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. # Common Enums
  4. 1 require_relative "enums/source"
  5. 1 require_relative "enums/event"
  6. 1 require_relative "enums/level"
  7. # Log Structs
  8. 1 require_relative "log/carrierwave"
  9. 1 require_relative "log/action_mailer"
  10. 1 require_relative "log/active_storage"
  11. 1 require_relative "log/active_job"
  12. 1 require_relative "log/error"
  13. 1 require_relative "log/good_job"
  14. 1 require_relative "log/plain"
  15. 1 require_relative "log/request"
  16. 1 require_relative "log/security"
  17. 1 require_relative "log/shrine"
  18. 1 require_relative "log/sidekiq"
  19. 1 require_relative "log/sql"
  20. 1 module LogStruct
  21. # Type aliases for all possible log types
  22. # This should be updated whenever a new log type is added
  23. # (Can't use sealed! unless we want to put everything in one giant file.)
  24. 1 LogClassType = T.type_alias do
  25. T.any(
  26. T.class_of(LogStruct::Log::CarrierWave),
  27. T.class_of(LogStruct::Log::ActionMailer),
  28. T.class_of(LogStruct::Log::ActiveStorage),
  29. T.class_of(LogStruct::Log::ActiveJob),
  30. T.class_of(LogStruct::Log::Error),
  31. T.class_of(LogStruct::Log::GoodJob),
  32. T.class_of(LogStruct::Log::Plain),
  33. T.class_of(LogStruct::Log::Request),
  34. T.class_of(LogStruct::Log::Security),
  35. T.class_of(LogStruct::Log::Shrine),
  36. T.class_of(LogStruct::Log::Sidekiq),
  37. T.class_of(LogStruct::Log::SQL)
  38. )
  39. end
  40. end

lib/log_struct/log/action_mailer.rb

100.0% lines covered

34 relevant lines. 34 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "interfaces/common_fields"
  4. 1 require_relative "interfaces/additional_data_field"
  5. 1 require_relative "shared/serialize_common"
  6. 1 require_relative "shared/merge_additional_data_fields"
  7. 1 require_relative "../enums/source"
  8. 1 require_relative "../enums/event"
  9. 1 require_relative "../enums/level"
  10. 1 require_relative "../log_keys"
  11. 1 module LogStruct
  12. 1 module Log
  13. # Email log entry for structured logging
  14. 1 class ActionMailer < T::Struct
  15. 1 extend T::Sig
  16. 1 include Interfaces::CommonFields
  17. 1 include Interfaces::AdditionalDataField
  18. 1 include SerializeCommon
  19. 1 include MergeAdditionalDataFields
  20. 1 ActionMailerEvent = T.type_alias {
  21. 1 T.any(Event::Delivery, Event::Delivered)
  22. }
  23. # Common fields
  24. 1 const :source, Source::Mailer, default: T.let(Source::Mailer, Source::Mailer)
  25. 1 const :event, ActionMailerEvent
  26. 3 const :timestamp, Time, factory: -> { Time.now }
  27. 1 const :level, Level, default: T.let(Level::Info, Level)
  28. # Email-specific fields
  29. 1 const :to, T.nilable(T.any(String, T::Array[String])), default: nil
  30. 1 const :from, T.nilable(String), default: nil
  31. 1 const :subject, T.nilable(String), default: nil
  32. 1 const :additional_data, T::Hash[Symbol, T.untyped], default: {}
  33. # Convert the log entry to a hash for serialization
  34. 2 sig { override.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  35. 1 def serialize(strict = true)
  36. 2 hash = serialize_common(strict)
  37. 2 merge_additional_data_fields(hash)
  38. # Add email-specific fields if they're present
  39. 2 hash[LOG_KEYS.fetch(:to)] = to if to
  40. 2 hash[LOG_KEYS.fetch(:from)] = from if from
  41. 2 hash[LOG_KEYS.fetch(:subject)] = subject if subject
  42. 2 hash
  43. end
  44. end
  45. end
  46. end

lib/log_struct/log/active_job.rb

78.95% lines covered

38 relevant lines. 30 lines covered and 8 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "interfaces/common_fields"
  4. 1 require_relative "interfaces/additional_data_field"
  5. 1 require_relative "shared/serialize_common"
  6. 1 require_relative "shared/merge_additional_data_fields"
  7. 1 require_relative "../enums/source"
  8. 1 require_relative "../enums/event"
  9. 1 require_relative "../enums/level"
  10. 1 require_relative "../log_keys"
  11. 1 module LogStruct
  12. 1 module Log
  13. # ActiveJob log entry for structured logging
  14. 1 class ActiveJob < T::Struct
  15. 1 extend T::Sig
  16. 1 include Interfaces::CommonFields
  17. 1 include Interfaces::AdditionalDataField
  18. 1 include SerializeCommon
  19. 1 include MergeAdditionalDataFields
  20. 1 ActiveJobEvent = T.type_alias {
  21. 1 T.any(
  22. Event::Enqueue,
  23. Event::Schedule,
  24. Event::Start,
  25. Event::Finish
  26. )
  27. }
  28. # Common fields
  29. 1 const :source, Source::Job, default: T.let(Source::Job, Source::Job)
  30. 1 const :event, ActiveJobEvent
  31. 1 const :timestamp, Time, factory: -> { Time.now }
  32. 1 const :level, Level, default: T.let(Level::Info, Level)
  33. # Job-specific fields
  34. 1 const :job_id, T.nilable(String), default: nil
  35. 1 const :job_class, T.nilable(String), default: nil
  36. 1 const :queue_name, T.nilable(String), default: nil
  37. 1 const :arguments, T.nilable(T::Array[T.untyped]), default: nil
  38. 1 const :duration, T.nilable(Float), default: nil
  39. 1 const :additional_data, T::Hash[Symbol, T.untyped], default: {}
  40. # Convert the log entry to a hash for serialization
  41. 1 sig { override.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  42. 1 def serialize(strict = true)
  43. hash = serialize_common(strict)
  44. merge_additional_data_fields(hash)
  45. # Add job-specific fields if they're present
  46. hash[LOG_KEYS.fetch(:job_id)] = job_id if job_id
  47. hash[LOG_KEYS.fetch(:job_class)] = job_class if job_class
  48. hash[LOG_KEYS.fetch(:queue_name)] = queue_name if queue_name
  49. hash[LOG_KEYS.fetch(:arguments)] = arguments if arguments
  50. hash[LOG_KEYS.fetch(:duration)] = duration if duration
  51. hash
  52. end
  53. end
  54. end
  55. end

lib/log_struct/log/active_storage.rb

68.09% lines covered

47 relevant lines. 32 lines covered and 15 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "interfaces/common_fields"
  4. 1 require_relative "shared/serialize_common"
  5. 1 require_relative "../enums/source"
  6. 1 require_relative "../enums/event"
  7. 1 require_relative "../enums/level"
  8. 1 module LogStruct
  9. 1 module Log
  10. # ActiveStorage log entry for structured logging
  11. 1 class ActiveStorage < T::Struct
  12. 1 extend T::Sig
  13. 1 include Interfaces::CommonFields
  14. 1 include SerializeCommon
  15. # Define valid event types for ActiveStorage
  16. 1 ActiveStorageEvent = T.type_alias {
  17. 1 T.any(
  18. Event::Upload,
  19. Event::Download,
  20. Event::Delete,
  21. Event::Metadata,
  22. Event::Exist,
  23. Event::Stream,
  24. Event::Url,
  25. Event::Unknown
  26. )
  27. }
  28. # Common fields
  29. 1 const :source, Source::Storage, default: T.let(Source::Storage, Source::Storage)
  30. 1 const :event, ActiveStorageEvent
  31. 1 const :timestamp, Time, factory: -> { Time.now }
  32. 1 const :level, Level, default: T.let(Level::Info, Level)
  33. # ActiveStorage-specific fields
  34. 1 const :operation, T.nilable(Symbol), default: nil
  35. 1 const :storage, T.nilable(String), default: nil
  36. 1 const :file_id, T.nilable(String), default: nil
  37. 1 const :filename, T.nilable(String), default: nil
  38. 1 const :mime_type, T.nilable(String), default: nil
  39. 1 const :size, T.nilable(Integer), default: nil
  40. 1 const :metadata, T.nilable(T::Hash[String, T.untyped]), default: nil
  41. 1 const :duration, T.nilable(Float), default: nil
  42. 1 const :checksum, T.nilable(String), default: nil
  43. 1 const :exist, T.nilable(T::Boolean), default: nil
  44. 1 const :url, T.nilable(String), default: nil
  45. 1 const :prefix, T.nilable(String), default: nil
  46. 1 const :range, T.nilable(String), default: nil
  47. # Convert the log entry to a hash for serialization
  48. 1 sig { override.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  49. 1 def serialize(strict = true)
  50. hash = serialize_common(strict)
  51. # Add ActiveStorage-specific fields - only include non-nil values
  52. hash[LOG_KEYS.fetch(:operation)] = operation if operation
  53. hash[LOG_KEYS.fetch(:storage)] = storage if storage
  54. hash[LOG_KEYS.fetch(:file_id)] = file_id if file_id
  55. hash[LOG_KEYS.fetch(:filename)] = filename if filename
  56. hash[LOG_KEYS.fetch(:mime_type)] = mime_type if mime_type
  57. hash[LOG_KEYS.fetch(:size)] = size if size
  58. hash[LOG_KEYS.fetch(:metadata)] = metadata if metadata
  59. hash[LOG_KEYS.fetch(:duration)] = duration if duration
  60. hash[LOG_KEYS.fetch(:checksum)] = checksum if checksum
  61. hash[LOG_KEYS.fetch(:exist)] = exist if !exist.nil?
  62. hash[LOG_KEYS.fetch(:url)] = url if url
  63. hash[LOG_KEYS.fetch(:prefix)] = prefix if prefix
  64. hash[LOG_KEYS.fetch(:range)] = range if range
  65. hash
  66. end
  67. end
  68. end
  69. end

lib/log_struct/log/carrierwave.rb

72.0% lines covered

50 relevant lines. 36 lines covered and 14 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "interfaces/common_fields"
  4. 1 require_relative "interfaces/additional_data_field"
  5. 1 require_relative "shared/serialize_common"
  6. 1 require_relative "shared/merge_additional_data_fields"
  7. 1 require_relative "../enums/source"
  8. 1 require_relative "../enums/event"
  9. 1 require_relative "../enums/level"
  10. 1 require_relative "../log_keys"
  11. 1 module LogStruct
  12. 1 module Log
  13. # CarrierWave log entry for structured logging
  14. 1 class CarrierWave < T::Struct
  15. 1 extend T::Sig
  16. 1 include Interfaces::CommonFields
  17. 1 include Interfaces::AdditionalDataField
  18. 1 include SerializeCommon
  19. 1 include MergeAdditionalDataFields
  20. 1 CarrierWaveEvent = T.type_alias {
  21. 1 T.any(
  22. Event::Upload,
  23. Event::Download,
  24. Event::Delete,
  25. Event::Metadata,
  26. Event::Exist,
  27. Event::Unknown
  28. )
  29. }
  30. # Common fields
  31. 1 const :source, Source::CarrierWave, default: T.let(Source::CarrierWave, Source::CarrierWave)
  32. 1 const :event, CarrierWaveEvent
  33. 1 const :timestamp, Time, factory: -> { Time.now }
  34. 1 const :level, Level, default: T.let(Level::Info, Level)
  35. # File-specific fields
  36. 1 const :operation, T.nilable(Symbol), default: nil
  37. 1 const :storage, T.nilable(String), default: nil
  38. 1 const :file_id, T.nilable(String), default: nil
  39. 1 const :filename, T.nilable(String), default: nil
  40. 1 const :mime_type, T.nilable(String), default: nil
  41. 1 const :size, T.nilable(Integer), default: nil
  42. 1 const :metadata, T.nilable(T::Hash[String, T.untyped]), default: nil
  43. 1 const :duration, T.nilable(Float), default: nil
  44. # CarrierWave-specific fields
  45. 1 const :uploader, T.nilable(String), default: nil
  46. 1 const :model, T.nilable(String), default: nil
  47. 1 const :mount_point, T.nilable(String), default: nil
  48. 1 const :additional_data, T::Hash[Symbol, T.untyped], default: {}
  49. # Convert the log entry to a hash for serialization
  50. 1 sig { override.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  51. 1 def serialize(strict = true)
  52. hash = serialize_common(strict)
  53. merge_additional_data_fields(hash)
  54. # Add file-specific fields if they're present
  55. hash[LOG_KEYS.fetch(:storage)] = storage if storage
  56. hash[LOG_KEYS.fetch(:operation)] = operation if operation
  57. hash[LOG_KEYS.fetch(:file_id)] = file_id if file_id
  58. hash[LOG_KEYS.fetch(:filename)] = filename if filename
  59. hash[LOG_KEYS.fetch(:mime_type)] = mime_type if mime_type
  60. hash[LOG_KEYS.fetch(:size)] = size if size
  61. hash[LOG_KEYS.fetch(:metadata)] = metadata if metadata
  62. hash[LOG_KEYS.fetch(:duration)] = duration if duration
  63. # Add CarrierWave-specific fields if they're present
  64. hash[LOG_KEYS.fetch(:uploader)] = uploader if uploader
  65. hash[LOG_KEYS.fetch(:model)] = model if model
  66. hash[LOG_KEYS.fetch(:mount_point)] = mount_point if mount_point
  67. hash
  68. end
  69. end
  70. end
  71. end

lib/log_struct/log/error.rb

97.5% lines covered

40 relevant lines. 39 lines covered and 1 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "interfaces/common_fields"
  4. 1 require_relative "interfaces/additional_data_field"
  5. 1 require_relative "interfaces/message_field"
  6. 1 require_relative "shared/serialize_common"
  7. 1 require_relative "shared/merge_additional_data_fields"
  8. 1 require_relative "../enums/source"
  9. 1 require_relative "../enums/event"
  10. 1 require_relative "../enums/level"
  11. 1 require_relative "../log_keys"
  12. 1 module LogStruct
  13. 1 module Log
  14. # Exception log entry for Ruby exceptions with class, message, and backtrace
  15. 1 class Error < T::Struct
  16. 1 extend T::Sig
  17. 1 include Interfaces::CommonFields
  18. 1 include Interfaces::AdditionalDataField
  19. 1 include Interfaces::MessageField
  20. 1 include MergeAdditionalDataFields
  21. 1 ErrorEvent = T.type_alias {
  22. 1 Event::Error
  23. }
  24. # Common fields
  25. 1 const :source, Source # Used by all sources, should not have a default.
  26. 1 const :event, ErrorEvent, default: T.let(Event::Error, ErrorEvent)
  27. 6 const :timestamp, Time, factory: -> { Time.now }
  28. 1 const :level, Level, default: T.let(Level::Error, Level)
  29. # Exception-specific fields
  30. 1 const :err_class, T.class_of(StandardError)
  31. 1 const :message, String
  32. 1 const :backtrace, T.nilable(T::Array[String]), default: nil
  33. 1 const :additional_data, T::Hash[Symbol, T.untyped], default: {}
  34. # Convert the log entry to a hash for serialization
  35. 2 sig { override.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  36. 1 def serialize(strict = true)
  37. 2 hash = serialize_common(strict)
  38. 2 merge_additional_data_fields(hash)
  39. # Add exception-specific fields
  40. 2 hash[LOG_KEYS.fetch(:err_class)] = err_class.name
  41. 2 hash[LOG_KEYS.fetch(:message)] = message
  42. 2 if backtrace.is_a?(Array) && backtrace&.any?
  43. hash[LOG_KEYS.fetch(:backtrace)] = backtrace&.first(10)
  44. end
  45. 2 hash
  46. end
  47. # Create an Error log from a Ruby StandardError
  48. 1 sig {
  49. 1 params(
  50. source: Source,
  51. ex: StandardError,
  52. additional_data: T::Hash[Symbol, T.untyped]
  53. ).returns(Log::Error)
  54. }
  55. 1 def self.from_exception(source, ex, additional_data = {})
  56. 4 new(
  57. source: source,
  58. message: ex.message,
  59. err_class: ex.class,
  60. backtrace: ex.backtrace,
  61. additional_data: additional_data
  62. )
  63. end
  64. end
  65. end
  66. end

lib/log_struct/log/good_job.rb

100.0% lines covered

70 relevant lines. 70 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "interfaces/common_fields"
  4. 1 require_relative "interfaces/additional_data_field"
  5. 1 require_relative "shared/serialize_common"
  6. 1 require_relative "shared/merge_additional_data_fields"
  7. 1 require_relative "../enums/source"
  8. 1 require_relative "../enums/event"
  9. 1 require_relative "../enums/level"
  10. 1 require_relative "../log_keys"
  11. 1 module LogStruct
  12. 1 module Log
  13. # GoodJob log entry for structured logging
  14. #
  15. # GoodJob is a PostgreSQL-based ActiveJob backend that provides reliable,
  16. # scalable job processing for Rails applications. This log class captures
  17. # GoodJob-specific events including job execution, database operations,
  18. # error handling, and performance metrics.
  19. #
  20. # ## Key Features Logged:
  21. # - Job execution lifecycle (enqueue, start, finish, retry)
  22. # - Database-backed job persistence events
  23. # - Error handling and retry logic
  24. # - Job batching and bulk operations
  25. # - Performance metrics and timing data
  26. # - Thread and process information
  27. #
  28. # ## Usage Examples:
  29. #
  30. # ```ruby
  31. # # Job execution logging
  32. # LogStruct::Log::GoodJob.new(
  33. # event: Event::Start,
  34. # job_id: "job_123",
  35. # job_class: "UserNotificationJob",
  36. # queue_name: "default",
  37. # execution_time: 1.5
  38. # )
  39. #
  40. # # Error logging
  41. # LogStruct::Log::GoodJob.new(
  42. # event: Event::Error,
  43. # job_id: "job_123",
  44. # error_class: "StandardError",
  45. # error_message: "Connection failed"
  46. # )
  47. # ```
  48. 1 class GoodJob < T::Struct
  49. 1 extend T::Sig
  50. 1 include Interfaces::CommonFields
  51. 1 include Interfaces::AdditionalDataField
  52. 1 include SerializeCommon
  53. 1 include MergeAdditionalDataFields
  54. # Valid event types for GoodJob operations
  55. 1 GoodJobEvent = T.type_alias {
  56. 1 T.any(
  57. Event::Log, # General logging
  58. Event::Enqueue, # Job queued
  59. Event::Start, # Job execution started
  60. Event::Finish, # Job completed successfully
  61. Event::Error, # Job failed with error
  62. Event::Schedule # Job scheduled for future execution
  63. )
  64. }
  65. # Common fields
  66. 1 const :source, Source::Job, default: T.let(Source::Job, Source::Job)
  67. 1 const :event, GoodJobEvent
  68. 36 const :timestamp, Time, factory: -> { Time.now }
  69. 1 const :level, Level, default: T.let(Level::Info, Level)
  70. # Job identification fields
  71. 1 const :job_id, T.nilable(String), default: nil
  72. 1 const :job_class, T.nilable(String), default: nil
  73. 1 const :queue_name, T.nilable(String), default: nil
  74. 1 const :batch_id, T.nilable(String), default: nil
  75. 1 const :job_label, T.nilable(String), default: nil
  76. # Job execution context
  77. 1 const :arguments, T.nilable(T::Array[T.untyped]), default: nil
  78. 1 const :executions, T.nilable(Integer), default: nil
  79. 1 const :exception_executions, T.nilable(Integer), default: nil
  80. 1 const :execution_time, T.nilable(Float), default: nil
  81. 1 const :scheduled_at, T.nilable(Time), default: nil
  82. # Error information
  83. 1 const :error_class, T.nilable(String), default: nil
  84. 1 const :error_message, T.nilable(String), default: nil
  85. 1 const :error_backtrace, T.nilable(T::Array[String]), default: nil
  86. # GoodJob-specific metadata
  87. 1 const :process_id, T.nilable(Integer), default: nil
  88. 1 const :thread_id, T.nilable(String), default: nil
  89. 1 const :priority, T.nilable(Integer), default: nil
  90. 1 const :cron_key, T.nilable(String), default: nil
  91. 1 const :database_connection_name, T.nilable(String), default: nil
  92. # Performance and metrics
  93. 1 const :wait_time, T.nilable(Float), default: nil
  94. 1 const :run_time, T.nilable(Float), default: nil
  95. 1 const :finished_at, T.nilable(Time), default: nil
  96. # Additional contextual data
  97. 1 const :additional_data, T::Hash[Symbol, T.untyped], default: {}
  98. # Convert the log entry to a hash for serialization
  99. 2 sig { override.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  100. 1 def serialize(strict = true)
  101. 22 hash = serialize_common(strict)
  102. 22 merge_additional_data_fields(hash)
  103. # Add job identification fields
  104. 22 hash[LOG_KEYS.fetch(:job_id)] = job_id if job_id
  105. 22 hash[LOG_KEYS.fetch(:job_class)] = job_class if job_class
  106. 22 hash[LOG_KEYS.fetch(:queue_name)] = queue_name if queue_name
  107. 22 hash[:batch_id] = batch_id if batch_id
  108. 22 hash[:job_label] = job_label if job_label
  109. # Add execution context
  110. 22 hash[LOG_KEYS.fetch(:arguments)] = arguments if arguments
  111. 22 hash[:executions] = executions if executions
  112. 22 hash[:exception_executions] = exception_executions if exception_executions
  113. 22 hash[:execution_time] = execution_time if execution_time
  114. 22 hash[:scheduled_at] = scheduled_at&.iso8601 if scheduled_at
  115. # Add error information
  116. 22 hash[LOG_KEYS.fetch(:err_class)] = error_class if error_class
  117. 22 hash[:error_message] = error_message if error_message
  118. 22 hash[LOG_KEYS.fetch(:backtrace)] = error_backtrace if error_backtrace
  119. # Add GoodJob-specific metadata
  120. 22 hash[LOG_KEYS.fetch(:process_id)] = process_id if process_id
  121. 22 hash[LOG_KEYS.fetch(:thread_id)] = thread_id if thread_id
  122. 22 hash[:priority] = priority if priority
  123. 22 hash[:cron_key] = cron_key if cron_key
  124. 22 hash[:database_connection_name] = database_connection_name if database_connection_name
  125. # Add performance metrics
  126. 22 hash[:wait_time] = wait_time if wait_time
  127. 22 hash[:run_time] = run_time if run_time
  128. 22 hash[:finished_at] = finished_at&.iso8601 if finished_at
  129. 22 hash
  130. end
  131. end
  132. end
  133. end

lib/log_struct/log/interfaces/additional_data_field.rb

100.0% lines covered

9 relevant lines. 9 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. 1 module Log
  5. 1 module Interfaces
  6. # Common interface for logs that include an additional_data field
  7. 1 module AdditionalDataField
  8. 1 extend T::Sig
  9. 1 extend T::Helpers
  10. 1 interface!
  11. # Additional data field for extra context
  12. 1 sig { abstract.returns(T::Hash[Symbol, T.untyped]) }
  13. 1 def additional_data; end
  14. end
  15. end
  16. end
  17. end

lib/log_struct/log/interfaces/common_fields.rb

100.0% lines covered

20 relevant lines. 20 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "../../enums/source"
  4. 1 require_relative "../../enums/event"
  5. 1 require_relative "../../enums/level"
  6. 1 module LogStruct
  7. 1 module Log
  8. 1 module Interfaces
  9. # Common interface that all log entry types must implement
  10. 1 module CommonFields
  11. 1 extend T::Sig
  12. 1 extend T::Helpers
  13. 1 interface!
  14. # The source of the log entry (JSON property: src)
  15. 1 sig { abstract.returns(Source) }
  16. 1 def source; end
  17. # The event type of the log entry (JSON property: evt)
  18. 1 sig { abstract.returns(Event) }
  19. 1 def event; end
  20. # The log level (JSON property: lvl)
  21. 1 sig { abstract.returns(Level) }
  22. 1 def level; end
  23. # The timestamp of the log entry (JSON property: ts)
  24. 1 sig { abstract.returns(Time) }
  25. 1 def timestamp; end
  26. # All logs must define a custom serialize method
  27. # If the class is a T::Struct that responds to serialize then we can be sure
  28. # we're getting symbols as keys and don't need to call #serialize.deep_symbolize_keys
  29. 2 sig { abstract.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  30. 1 def serialize(strict = true); end
  31. end
  32. end
  33. end
  34. end

lib/log_struct/log/interfaces/message_field.rb

100.0% lines covered

9 relevant lines. 9 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. 1 module Log
  5. 1 module Interfaces
  6. # Common interface for logs that include a message field
  7. 1 module MessageField
  8. 1 extend T::Sig
  9. 1 extend T::Helpers
  10. 1 interface!
  11. # Message field
  12. 1 sig { abstract.returns(T.nilable(String)) }
  13. 1 def message; end
  14. end
  15. end
  16. end
  17. end

lib/log_struct/log/interfaces/request_fields.rb

100.0% lines covered

19 relevant lines. 19 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. 1 module Log
  5. 1 module Interfaces
  6. # Common interface for request-related fields
  7. # Used by both Request and Security logs
  8. 1 module RequestFields
  9. 1 extend T::Sig
  10. 1 extend T::Helpers
  11. 1 interface!
  12. # Common request fields
  13. 1 sig { abstract.returns(T.nilable(String)) }
  14. 1 def path; end
  15. 1 sig { abstract.returns(T.nilable(String)) }
  16. 1 def http_method; end
  17. 1 sig { abstract.returns(T.nilable(String)) }
  18. 1 def source_ip; end
  19. 1 sig { abstract.returns(T.nilable(String)) }
  20. 1 def user_agent; end
  21. 1 sig { abstract.returns(T.nilable(String)) }
  22. 1 def referer; end
  23. 1 sig { abstract.returns(T.nilable(String)) }
  24. 1 def request_id; end
  25. end
  26. end
  27. end
  28. end

lib/log_struct/log/plain.rb

100.0% lines covered

30 relevant lines. 30 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "interfaces/common_fields"
  4. 1 require_relative "interfaces/additional_data_field"
  5. 1 require_relative "shared/serialize_common"
  6. 1 require_relative "shared/merge_additional_data_fields"
  7. 1 require_relative "../enums/source"
  8. 1 require_relative "../enums/event"
  9. 1 require_relative "../enums/level"
  10. 1 require_relative "../log_keys"
  11. 1 module LogStruct
  12. 1 module Log
  13. # Plain log entry for structured logging
  14. 1 class Plain < T::Struct
  15. 1 extend T::Sig
  16. 1 include Interfaces::CommonFields
  17. 1 include Interfaces::AdditionalDataField
  18. 1 include SerializeCommon
  19. 1 include MergeAdditionalDataFields
  20. 1 PlainEvent = T.type_alias {
  21. 1 Event::Log
  22. }
  23. # Common fields
  24. 1 const :source, Source, default: T.let(Source::App, Source)
  25. 1 const :event, PlainEvent, default: T.let(Event::Log, PlainEvent)
  26. 1 const :level, Level, default: T.let(Level::Info, Level)
  27. 6 const :timestamp, Time, factory: -> { Time.now }
  28. # Plain log messages can be any type (String, Number, Array, Hash, etc.)
  29. # Developers might do something like Rails.logger.info(123) or Rails.logger.info(@variable)
  30. # when debugging, or gems might send all kinds of random stuff to the logger.
  31. # We don't want to crash with a type error in any of these cases.
  32. 1 const :message, T.untyped # rubocop:disable Sorbet/ForbidUntypedStructProps
  33. # Allow people to submit additional data
  34. 1 const :additional_data, T::Hash[Symbol, T.untyped], default: {}
  35. # Convert the log entry to a hash for serialization
  36. 2 sig { override.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  37. 1 def serialize(strict = true)
  38. 19 hash = serialize_common(strict)
  39. 19 merge_additional_data_fields(hash)
  40. 19 hash[LOG_KEYS.fetch(:message)] = message
  41. 19 hash
  42. end
  43. end
  44. end
  45. end

lib/log_struct/log/request.rb

69.09% lines covered

55 relevant lines. 38 lines covered and 17 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "interfaces/common_fields"
  4. 1 require_relative "interfaces/request_fields"
  5. 1 require_relative "shared/serialize_common"
  6. 1 require_relative "shared/add_request_fields"
  7. 1 require_relative "../enums/source"
  8. 1 require_relative "../enums/event"
  9. 1 require_relative "../enums/level"
  10. 1 require_relative "../log_keys"
  11. 1 module LogStruct
  12. 1 module Log
  13. # Request log entry for structured logging
  14. 1 class Request < T::Struct
  15. 1 extend T::Sig
  16. 1 include Interfaces::CommonFields
  17. 1 include Interfaces::RequestFields
  18. 1 include SerializeCommon
  19. 1 include AddRequestFields
  20. 1 RequestEvent = T.type_alias {
  21. 1 Event::Request
  22. }
  23. # Common fields
  24. 1 const :source, Source::Rails, default: T.let(Source::Rails, Source::Rails)
  25. 1 const :event, RequestEvent, default: T.let(Event::Request, RequestEvent)
  26. 2 const :timestamp, Time, factory: -> { Time.now }
  27. 1 const :level, Level, default: T.let(Level::Info, Level)
  28. # Request-specific fields
  29. # NOTE: `method` is a reserved word, so we use `http_method`
  30. # prop while setting `method` in the serialized output
  31. 1 const :http_method, T.nilable(String), default: nil
  32. 1 const :path, T.nilable(String), default: nil
  33. 1 const :format, T.nilable(String), default: nil
  34. 1 const :controller, T.nilable(String), default: nil
  35. 1 const :action, T.nilable(String), default: nil
  36. 1 const :status, T.nilable(Integer), default: nil
  37. 1 const :duration, T.nilable(Float), default: nil
  38. 1 const :view, T.nilable(Float), default: nil
  39. 1 const :db, T.nilable(Float), default: nil
  40. 1 const :params, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
  41. 1 const :source_ip, T.nilable(String), default: nil
  42. 1 const :user_agent, T.nilable(String), default: nil
  43. 1 const :referer, T.nilable(String), default: nil
  44. 1 const :request_id, T.nilable(String), default: nil
  45. # Convert the log entry to a hash for serialization
  46. 1 sig { override.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  47. 1 def serialize(strict = true)
  48. hash = serialize_common(strict)
  49. add_request_fields(hash)
  50. hash[LOG_KEYS.fetch(:http_method)] = http_method if http_method
  51. hash[LOG_KEYS.fetch(:path)] = path if path
  52. hash[LOG_KEYS.fetch(:format)] = format if format
  53. hash[LOG_KEYS.fetch(:controller)] = controller if controller
  54. hash[LOG_KEYS.fetch(:action)] = action if action
  55. hash[LOG_KEYS.fetch(:status)] = status if status
  56. hash[LOG_KEYS.fetch(:duration)] = duration if duration
  57. hash[LOG_KEYS.fetch(:view)] = view if view
  58. hash[LOG_KEYS.fetch(:db)] = db if db
  59. hash[LOG_KEYS.fetch(:params)] = params if params
  60. hash[LOG_KEYS.fetch(:source_ip)] = source_ip if source_ip
  61. hash[LOG_KEYS.fetch(:user_agent)] = user_agent if user_agent
  62. hash[LOG_KEYS.fetch(:referer)] = referer if referer
  63. hash[LOG_KEYS.fetch(:request_id)] = request_id if request_id
  64. hash
  65. end
  66. end
  67. end
  68. end

lib/log_struct/log/security.rb

82.35% lines covered

51 relevant lines. 42 lines covered and 9 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "interfaces/common_fields"
  4. 1 require_relative "interfaces/additional_data_field"
  5. 1 require_relative "interfaces/message_field"
  6. 1 require_relative "interfaces/request_fields"
  7. 1 require_relative "shared/add_request_fields"
  8. 1 require_relative "shared/merge_additional_data_fields"
  9. 1 require_relative "shared/serialize_common"
  10. 1 require_relative "../enums/event"
  11. 1 require_relative "../enums/level"
  12. 1 require_relative "../enums/source"
  13. 1 require_relative "../log_keys"
  14. 1 module LogStruct
  15. 1 module Log
  16. # Security log entry for structured logging of security-related events
  17. 1 class Security < T::Struct
  18. 1 extend T::Sig
  19. 1 include Interfaces::CommonFields
  20. 1 include Interfaces::AdditionalDataField
  21. 1 include Interfaces::MessageField
  22. 1 include Interfaces::RequestFields
  23. 1 include SerializeCommon
  24. 1 include AddRequestFields
  25. 1 include MergeAdditionalDataFields
  26. 1 SecurityEvent = T.type_alias {
  27. 1 T.any(
  28. Event::IPSpoof,
  29. Event::CSRFViolation,
  30. Event::BlockedHost
  31. )
  32. }
  33. # Common fields
  34. 1 const :source, Source::Security, default: T.let(Source::Security, Source::Security)
  35. 1 const :event, SecurityEvent
  36. 1 const :timestamp, Time, factory: -> { Time.now }
  37. 1 const :level, Level, default: T.let(Level::Error, Level)
  38. # Security-specific fields
  39. 1 const :message, T.nilable(String), default: nil
  40. 1 const :blocked_host, T.nilable(String), default: nil
  41. 1 const :blocked_hosts, T.nilable(T::Array[String]), default: nil
  42. 1 const :client_ip, T.nilable(String), default: nil
  43. 1 const :x_forwarded_for, T.nilable(String), default: nil
  44. # Additional data (merged into hash)
  45. 1 const :additional_data, T::Hash[Symbol, T.untyped], default: {}
  46. # Common request fields
  47. 1 const :path, T.nilable(String), default: nil
  48. 1 const :http_method, T.nilable(String), default: nil, name: "method"
  49. 1 const :source_ip, T.nilable(String), default: nil
  50. 1 const :user_agent, T.nilable(String), default: nil
  51. 1 const :referer, T.nilable(String), default: nil
  52. 1 const :request_id, T.nilable(String), default: nil
  53. # Convert the log entry to a hash for serialization
  54. 1 sig { override.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  55. 1 def serialize(strict = true)
  56. hash = serialize_common(strict)
  57. add_request_fields(hash)
  58. merge_additional_data_fields(hash)
  59. # Add security-specific fields
  60. hash[LOG_KEYS.fetch(:message)] = message if message
  61. hash[LOG_KEYS.fetch(:blocked_host)] = blocked_host if blocked_host
  62. hash[LOG_KEYS.fetch(:blocked_hosts)] = blocked_hosts if blocked_hosts
  63. hash[LOG_KEYS.fetch(:client_ip)] = client_ip if client_ip
  64. hash[LOG_KEYS.fetch(:x_forwarded_for)] = x_forwarded_for if x_forwarded_for
  65. hash
  66. end
  67. end
  68. end
  69. end

lib/log_struct/log/shared/add_request_fields.rb

62.5% lines covered

16 relevant lines. 10 lines covered and 6 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "../../log_keys"
  4. 1 require_relative "../interfaces/request_fields"
  5. 1 module LogStruct
  6. 1 module Log
  7. # Common log serialization method
  8. 1 module AddRequestFields
  9. 1 extend T::Sig
  10. 1 extend T::Helpers
  11. 1 requires_ancestor { Interfaces::RequestFields }
  12. # Helper method to serialize request fields
  13. 1 sig { params(hash: T::Hash[Symbol, T.untyped]).void }
  14. 1 def add_request_fields(hash)
  15. # Add request-specific fields if they're present
  16. hash[LOG_KEYS.fetch(:path)] = path if path
  17. hash[LOG_KEYS.fetch(:http_method)] = http_method if http_method # Use `method` in JSON
  18. hash[LOG_KEYS.fetch(:source_ip)] = source_ip if source_ip
  19. hash[LOG_KEYS.fetch(:user_agent)] = user_agent if user_agent
  20. hash[LOG_KEYS.fetch(:referer)] = referer if referer
  21. hash[LOG_KEYS.fetch(:request_id)] = request_id if request_id
  22. end
  23. end
  24. end
  25. end

lib/log_struct/log/shared/merge_additional_data_fields.rb

100.0% lines covered

15 relevant lines. 15 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "../../log_keys"
  4. 1 require_relative "../interfaces/additional_data_field"
  5. 1 require_relative "serialize_common"
  6. 1 module LogStruct
  7. 1 module Log
  8. # Helper module for merging additional data into serialized logs
  9. 1 module MergeAdditionalDataFields
  10. 1 extend T::Sig
  11. 1 extend T::Helpers
  12. 1 include SerializeCommon
  13. 1 requires_ancestor { T::Struct }
  14. 1 requires_ancestor { Interfaces::AdditionalDataField }
  15. 2 sig { params(hash: T::Hash[Symbol, T.untyped]).void }
  16. 1 def merge_additional_data_fields(hash)
  17. 55 additional_data.each do |key, value|
  18. 46 hash[key.to_sym] = value
  19. end
  20. end
  21. end
  22. end
  23. end

lib/log_struct/log/shared/serialize_common.rb

100.0% lines covered

14 relevant lines. 14 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "../../log_keys"
  4. 1 require_relative "../interfaces/common_fields"
  5. 1 module LogStruct
  6. 1 module Log
  7. # Common log serialization method
  8. 1 module SerializeCommon
  9. 1 extend T::Sig
  10. 1 extend T::Helpers
  11. 1 requires_ancestor { Interfaces::CommonFields }
  12. # Convert the log entry to a hash for serialization.
  13. # (strict param is unused, but need same signature as default T::Struct.serialize)
  14. 2 sig { params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  15. 1 def serialize_common(strict = true)
  16. {
  17. 55 LOG_KEYS.fetch(:source) => source.serialize.to_s,
  18. LOG_KEYS.fetch(:event) => event.serialize.to_s,
  19. LOG_KEYS.fetch(:level) => level.serialize.to_s,
  20. LOG_KEYS.fetch(:timestamp) => timestamp.iso8601(3)
  21. }
  22. end
  23. # Override as_json to use our custom serialize method instead of default T::Struct serialization
  24. 2 sig { params(options: T.untyped).returns(T::Hash[String, T.untyped]) }
  25. 1 def as_json(options = nil)
  26. # Convert symbol keys to strings for JSON
  27. 5 serialize.transform_keys(&:to_s)
  28. end
  29. end
  30. end
  31. end

lib/log_struct/log/shrine.rb

76.19% lines covered

42 relevant lines. 32 lines covered and 10 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "interfaces/common_fields"
  4. 1 require_relative "interfaces/additional_data_field"
  5. 1 require_relative "shared/serialize_common"
  6. 1 require_relative "shared/merge_additional_data_fields"
  7. 1 require_relative "../enums/source"
  8. 1 require_relative "../enums/event"
  9. 1 require_relative "../enums/level"
  10. 1 require_relative "../log_keys"
  11. 1 module LogStruct
  12. 1 module Log
  13. # Shrine log entry for structured logging
  14. 1 class Shrine < T::Struct
  15. 1 extend T::Sig
  16. 1 include Interfaces::CommonFields
  17. 1 include Interfaces::AdditionalDataField
  18. 1 include SerializeCommon
  19. 1 include MergeAdditionalDataFields
  20. 1 ShrineEvent = T.type_alias {
  21. 1 T.any(
  22. Event::Upload,
  23. Event::Download,
  24. Event::Delete,
  25. Event::Metadata,
  26. Event::Exist,
  27. Event::Unknown
  28. )
  29. }
  30. # Common fields
  31. 1 const :source, Source::Shrine, default: T.let(Source::Shrine, Source::Shrine)
  32. 1 const :event, ShrineEvent
  33. 1 const :timestamp, Time, factory: -> { Time.now }
  34. 1 const :level, Level, default: T.let(Level::Info, Level)
  35. # Shrine-specific fields
  36. 1 const :storage, T.nilable(String), default: nil
  37. 1 const :location, T.nilable(String), default: nil
  38. 1 const :upload_options, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
  39. 1 const :download_options, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
  40. 1 const :options, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
  41. 1 const :uploader, T.nilable(String), default: nil
  42. 1 const :duration, T.nilable(Float), default: nil
  43. 1 const :additional_data, T::Hash[Symbol, T.untyped], default: {}
  44. # Convert the log entry to a hash for serialization
  45. 1 sig { override.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  46. 1 def serialize(strict = true)
  47. hash = serialize_common(strict)
  48. merge_additional_data_fields(hash)
  49. # Add Shrine-specific fields if they're present
  50. hash[LOG_KEYS.fetch(:storage)] = storage if storage
  51. hash[LOG_KEYS.fetch(:location)] = location if location
  52. hash[LOG_KEYS.fetch(:upload_options)] = upload_options if upload_options
  53. hash[LOG_KEYS.fetch(:download_options)] = download_options if download_options
  54. hash[LOG_KEYS.fetch(:options)] = options if options
  55. hash[LOG_KEYS.fetch(:uploader)] = uploader if uploader
  56. hash[LOG_KEYS.fetch(:duration)] = duration if duration
  57. hash
  58. end
  59. end
  60. end
  61. end

lib/log_struct/log/sidekiq.rb

79.31% lines covered

29 relevant lines. 23 lines covered and 6 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "interfaces/common_fields"
  4. 1 require_relative "shared/serialize_common"
  5. 1 require_relative "../enums/source"
  6. 1 require_relative "../enums/event"
  7. 1 require_relative "../enums/level"
  8. 1 require_relative "../log_keys"
  9. 1 module LogStruct
  10. 1 module Log
  11. # Sidekiq log entry for structured logging
  12. 1 class Sidekiq < T::Struct
  13. 1 extend T::Sig
  14. 1 include Interfaces::CommonFields
  15. 1 include SerializeCommon
  16. # Define valid event types for Sidekiq (currently only Log is used)
  17. 2 SidekiqEvent = T.type_alias { Event::Log }
  18. # Common fields
  19. 1 const :source, Source::Sidekiq, default: T.let(Source::Sidekiq, Source::Sidekiq)
  20. 1 const :event, SidekiqEvent, default: T.let(Event::Log, SidekiqEvent)
  21. 1 const :timestamp, Time, factory: -> { Time.now }
  22. 1 const :level, Level, default: T.let(Level::Info, Level)
  23. # Sidekiq-specific fields
  24. 1 const :process_id, T.nilable(Integer), default: nil
  25. 1 const :thread_id, T.nilable(T.any(Integer, String)), default: nil
  26. 1 const :message, T.nilable(String), default: nil
  27. 1 const :context, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
  28. # Convert the log entry to a hash for serialization
  29. 1 sig { override.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  30. 1 def serialize(strict = true)
  31. hash = serialize_common(strict)
  32. # Add Sidekiq-specific fields if they're present
  33. hash[LOG_KEYS.fetch(:message)] = message if message
  34. hash[LOG_KEYS.fetch(:context)] = context if context
  35. hash[LOG_KEYS.fetch(:process_id)] = process_id if process_id
  36. hash[LOG_KEYS.fetch(:thread_id)] = thread_id if thread_id
  37. hash
  38. end
  39. end
  40. end
  41. end

lib/log_struct/log/sql.rb

100.0% lines covered

48 relevant lines. 48 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "interfaces/common_fields"
  4. 1 require_relative "interfaces/additional_data_field"
  5. 1 require_relative "shared/serialize_common"
  6. 1 require_relative "shared/merge_additional_data_fields"
  7. 1 module LogStruct
  8. 1 module Log
  9. # SQL Query Log Structure
  10. #
  11. # Captures detailed information about SQL queries executed through ActiveRecord.
  12. # This provides structured logging for database operations, including:
  13. # - Query text and operation name
  14. # - Execution timing and performance metrics
  15. # - Row counts and connection information
  16. # - Safely filtered bind parameters
  17. #
  18. # ## Use Cases:
  19. # - Development debugging of N+1 queries
  20. # - Production performance monitoring
  21. # - Database query analysis and optimization
  22. # - Audit trails for data access patterns
  23. #
  24. # ## Security:
  25. # - SQL queries are safe (always parameterized with ?)
  26. # - Bind parameters are filtered through LogStruct's param filters
  27. # - Sensitive data like passwords, tokens are automatically scrubbed
  28. #
  29. # ## Example Usage:
  30. #
  31. # ```ruby
  32. # # Automatically captured when SQL query integration is enabled
  33. # LogStruct.config.integrations.enable_sql_logging = true
  34. #
  35. # # Manual logging (rare)
  36. # sql_log = LogStruct::Log::SQL.new(
  37. # message: "User lookup query",
  38. # sql: "SELECT * FROM users WHERE id = ?",
  39. # name: "User Load",
  40. # duration: 2.3,
  41. # row_count: 1,
  42. # bind_params: [123]
  43. # )
  44. # LogStruct.info(sql_log)
  45. # ```
  46. 1 class SQL < T::Struct
  47. 1 extend T::Sig
  48. 1 include Interfaces::CommonFields
  49. 1 include Interfaces::AdditionalDataField
  50. 1 include SerializeCommon
  51. 1 include MergeAdditionalDataFields
  52. 1 SQLEvent = T.type_alias {
  53. 1 Event::Database
  54. }
  55. # Common fields
  56. 1 const :source, Source, default: T.let(Source::App, Source)
  57. 1 const :event, SQLEvent, default: T.let(Event::Database, SQLEvent)
  58. 1 const :level, Level, default: T.let(Level::Info, Level)
  59. 34 const :timestamp, Time, factory: -> { Time.now }
  60. 1 const :message, String
  61. # The SQL query that was executed (parameterized, safe to log)
  62. 1 const :sql, String
  63. # The name of the database operation (e.g., "User Load", "Post Create")
  64. 1 const :name, String
  65. # Duration of the query execution in milliseconds
  66. 1 const :duration, Float
  67. # Number of rows affected or returned by the query
  68. 1 const :row_count, T.nilable(Integer)
  69. # Database connection information (adapter name)
  70. 1 const :connection_adapter, T.nilable(String)
  71. # Filtered bind parameters (sensitive data removed)
  72. 1 const :bind_params, T.nilable(T::Array[T.untyped])
  73. # Database name (if available)
  74. 1 const :database_name, T.nilable(String)
  75. # Connection pool size information (for monitoring)
  76. 1 const :connection_pool_size, T.nilable(Integer)
  77. # Active connection count (for monitoring)
  78. 1 const :active_connections, T.nilable(Integer)
  79. # SQL operation type (SELECT, INSERT, UPDATE, DELETE, etc.)
  80. 1 const :operation_type, T.nilable(String)
  81. # Table names involved in the query (extracted from SQL)
  82. 1 const :table_names, T.nilable(T::Array[String])
  83. # Allow additional custom data
  84. 1 const :additional_data, T::Hash[Symbol, T.untyped], default: {}
  85. # Convert the log entry to a hash for serialization
  86. 2 sig { override.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  87. 1 def serialize(strict = true)
  88. 10 hash = serialize_common(strict)
  89. 10 merge_additional_data_fields(hash)
  90. # Add SQL-specific fields using LOG_KEYS mapping for consistency
  91. 10 hash[LOG_KEYS.fetch(:message)] = message
  92. 10 hash[LOG_KEYS.fetch(:sql)] = sql
  93. 10 hash[LOG_KEYS.fetch(:name)] = name
  94. 10 hash[LOG_KEYS.fetch(:duration)] = duration
  95. 10 hash[LOG_KEYS.fetch(:row_count)] = row_count
  96. 10 hash[LOG_KEYS.fetch(:connection_adapter)] = connection_adapter
  97. 10 hash[LOG_KEYS.fetch(:bind_params)] = bind_params
  98. 10 hash[LOG_KEYS.fetch(:database_name)] = database_name
  99. 10 hash[LOG_KEYS.fetch(:connection_pool_size)] = connection_pool_size
  100. 10 hash[LOG_KEYS.fetch(:active_connections)] = active_connections
  101. 10 hash[LOG_KEYS.fetch(:operation_type)] = operation_type
  102. 10 hash[LOG_KEYS.fetch(:table_names)] = table_names
  103. 10 hash
  104. end
  105. end
  106. end
  107. end

lib/log_struct/log_keys.rb

100.0% lines covered

2 relevant lines. 2 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. # Define a mapping of property names to JSON keys
  5. 1 LOG_KEYS = T.let({
  6. # Ruby struct property name => JSON key name
  7. # Shared fields
  8. source: :src,
  9. event: :evt,
  10. timestamp: :ts,
  11. level: :lvl,
  12. # Common fields
  13. message: :msg,
  14. data: :data,
  15. # Request-related fields
  16. path: :path,
  17. http_method: :method, # Use `http_method` because `method` is a reserved word
  18. source_ip: :source_ip,
  19. user_agent: :user_agent,
  20. referer: :referer,
  21. request_id: :request_id,
  22. # HTTP-specific fields
  23. format: :format,
  24. controller: :controller,
  25. action: :action,
  26. status: :status,
  27. duration: :duration,
  28. view: :view,
  29. db: :db,
  30. params: :params,
  31. # Security-specific fields
  32. blocked_host: :blocked_host,
  33. blocked_hosts: :blocked_hosts,
  34. client_ip: :client_ip,
  35. x_forwarded_for: :x_forwarded_for,
  36. # Email-specific fields
  37. to: :to,
  38. from: :from,
  39. subject: :subject,
  40. # Error fields
  41. err_class: :err_class,
  42. backtrace: :backtrace,
  43. # Job-specific fields
  44. job_id: :job_id,
  45. job_class: :job_class,
  46. queue_name: :queue_name,
  47. arguments: :arguments,
  48. retry_count: :retry_count,
  49. # Sidekiq-specific fields
  50. process_id: :pid,
  51. thread_id: :tid,
  52. context: :ctx,
  53. # Storage-specific fields (ActiveStorage)
  54. checksum: :checksum,
  55. exist: :exist,
  56. url: :url,
  57. prefix: :prefix,
  58. range: :range,
  59. # Storage-specific fields (Shrine)
  60. storage: :storage,
  61. operation: :op,
  62. file_id: :file_id,
  63. filename: :filename,
  64. mime_type: :mime_type,
  65. size: :size,
  66. metadata: :metadata,
  67. location: :location,
  68. upload_options: :upload_opts,
  69. download_options: :download_opts,
  70. options: :opts,
  71. uploader: :uploader,
  72. # CarrierWave-specific fields
  73. model: :model,
  74. mount_point: :mount_point,
  75. # SQL-specific fields
  76. sql: :sql,
  77. name: :name,
  78. row_count: :row_count,
  79. connection_adapter: :connection_adapter,
  80. bind_params: :bind_params,
  81. database_name: :database_name,
  82. connection_pool_size: :connection_pool_size,
  83. active_connections: :active_connections,
  84. operation_type: :operation_type,
  85. table_names: :table_names
  86. }.freeze,
  87. T::Hash[Symbol, Symbol])
  88. end

lib/log_struct/monkey_patches/active_support/tagged_logging/formatter.rb

100.0% lines covered

14 relevant lines. 14 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "active_support/tagged_logging"
  4. # Monkey-patch ActiveSupport::TaggedLogging::Formatter to support hash inputs
  5. # This allows us to pass structured data to the logger and have tags incorporated
  6. # directly into the hash instead of being prepended as strings
  7. 1 module ActiveSupport
  8. 1 module TaggedLogging
  9. 1 module FormatterExtension
  10. 1 extend T::Sig
  11. 1 extend T::Helpers
  12. 1 requires_ancestor { ::ActiveSupport::TaggedLogging::Formatter }
  13. # Override the call method to support hash input/output, and wrap
  14. # plain strings in a Hash under a `msg` key.
  15. # The data is then passed to our custom log formatter that transforms it
  16. # into a JSON string before logging.
  17. 2 sig { params(severity: T.any(String, Symbol), time: Time, progname: T.untyped, data: T.untyped).returns(String) }
  18. 1 def call(severity, time, progname, data)
  19. # Convert data to a hash if it's not already one
  20. 74 data = {message: data.to_s} unless data.is_a?(Hash)
  21. # Add current tags to the hash if present
  22. 74 tags = current_tags
  23. 74 data[:tags] = tags if tags.present?
  24. # Call the original formatter with our enhanced data
  25. 74 super
  26. end
  27. end
  28. end
  29. end
  30. 1 ActiveSupport::TaggedLogging::Formatter.prepend(ActiveSupport::TaggedLogging::FormatterExtension)

lib/log_struct/multi_error_reporter.rb

81.16% lines covered

69 relevant lines. 56 lines covered and 13 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require_relative "enums/error_reporter"
  4. # Try to require all supported error reporting libraries
  5. # Users may have multiple installed, so we should load all of them
  6. 1 %w[sentry-ruby bugsnag rollbar honeybadger].each do |gem_name|
  7. 4 require gem_name
  8. rescue LoadError
  9. # If a particular gem is not available, we'll still load the others
  10. end
  11. 1 module LogStruct
  12. # MultiErrorReporter provides a unified interface for reporting errors to various services.
  13. # You can also override this with your own error reporter by setting
  14. # LogStruct#.config.error_reporting_handler
  15. # NOTE: This is used for cases where an error should be reported
  16. # but the operation should be allowed to continue (e.g. scrubbing log data.)
  17. 1 class MultiErrorReporter
  18. # Class variable to store the selected reporter
  19. 1 @reporter = T.let(nil, T.nilable(ErrorReporter))
  20. 1 class << self
  21. 1 extend T::Sig
  22. 2 sig { returns(ErrorReporter) }
  23. 1 def reporter
  24. 11 @reporter ||= detect_reporter
  25. end
  26. # Set the reporter to use (user-friendly API that accepts symbols)
  27. 2 sig { params(reporter_type: T.any(ErrorReporter, Symbol)).returns(ErrorReporter) }
  28. 1 def reporter=(reporter_type)
  29. 5 @reporter = case reporter_type
  30. when ErrorReporter
  31. reporter_type
  32. when Symbol
  33. 5 case reporter_type
  34. 1 when :sentry then ErrorReporter::Sentry
  35. 1 when :bugsnag then ErrorReporter::Bugsnag
  36. 1 when :rollbar then ErrorReporter::Rollbar
  37. 1 when :honeybadger then ErrorReporter::Honeybadger
  38. 1 when :rails_logger then ErrorReporter::RailsLogger
  39. else
  40. valid_types = ErrorReporter.values.map { |v| ":#{v.serialize}" }.join(", ")
  41. raise ArgumentError, "Unknown reporter type: #{reporter_type}. Valid types are: #{valid_types}"
  42. end
  43. end
  44. end
  45. # Auto-detect which error reporting service to use
  46. 2 sig { returns(ErrorReporter) }
  47. 1 def detect_reporter
  48. 1 if defined?(::Sentry)
  49. 1 ErrorReporter::Sentry
  50. elsif defined?(::Bugsnag)
  51. ErrorReporter::Bugsnag
  52. elsif defined?(::Rollbar)
  53. ErrorReporter::Rollbar
  54. elsif defined?(::Honeybadger)
  55. ErrorReporter::Honeybadger
  56. else
  57. ErrorReporter::RailsLogger
  58. end
  59. end
  60. # Report an error to the configured error reporting service
  61. 2 sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
  62. 1 def report_error(error, context = {})
  63. # Call the appropriate reporter method based on what's available
  64. 6 case reporter
  65. when ErrorReporter::Sentry
  66. 2 report_to_sentry(error, context)
  67. when ErrorReporter::Bugsnag
  68. 1 report_to_bugsnag(error, context)
  69. when ErrorReporter::Rollbar
  70. 1 report_to_rollbar(error, context)
  71. when ErrorReporter::Honeybadger
  72. 1 report_to_honeybadger(error, context)
  73. else
  74. 1 fallback_logging(error, context)
  75. end
  76. end
  77. 1 private
  78. # Report to Sentry
  79. 2 sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
  80. 1 def report_to_sentry(error, context = {})
  81. 2 return unless defined?(::Sentry)
  82. # Use the proper Sentry interface defined in the RBI
  83. 2 ::Sentry.capture_exception(error, extra: context)
  84. rescue => e
  85. 1 fallback_logging(e, {original_error: error.class.to_s})
  86. end
  87. # Report to Bugsnag
  88. 2 sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
  89. 1 def report_to_bugsnag(error, context = {})
  90. 1 return unless defined?(::Bugsnag)
  91. 1 ::Bugsnag.notify(error) do |report|
  92. 1 report.add_metadata(:context, context)
  93. end
  94. rescue => e
  95. fallback_logging(e, {original_error: error.class.to_s})
  96. end
  97. # Report to Rollbar
  98. 2 sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
  99. 1 def report_to_rollbar(error, context = {})
  100. 1 return unless defined?(::Rollbar)
  101. 1 ::Rollbar.error(error, context)
  102. rescue => e
  103. fallback_logging(e, {original_error: error.class.to_s})
  104. end
  105. # Report to Honeybadger
  106. 2 sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
  107. 1 def report_to_honeybadger(error, context = {})
  108. 1 return unless defined?(::Honeybadger)
  109. 1 ::Honeybadger.notify(error, context: context)
  110. rescue => e
  111. fallback_logging(e, {original_error: error.class.to_s})
  112. end
  113. # Fallback logging when no error reporting services are available
  114. # Uses the LogStruct.error method to properly log the error
  115. 2 sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
  116. 1 def fallback_logging(error, context = {})
  117. 2 return if error.nil?
  118. # Create a proper error log entry
  119. 2 error_log = Log::Error.from_exception(
  120. Source::LogStruct,
  121. error,
  122. context
  123. )
  124. # Use LogStruct.error to properly log the error
  125. 2 LogStruct.error(error_log)
  126. end
  127. end
  128. end
  129. end

lib/log_struct/param_filters.rb

100.0% lines covered

37 relevant lines. 37 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "digest"
  4. 1 require_relative "hash_utils"
  5. 1 module LogStruct
  6. # This class contains methods for filtering sensitive data in logs
  7. # It is used by Formatter to determine which keys should be filtered
  8. 1 class ParamFilters
  9. 1 class << self
  10. 1 extend T::Sig
  11. # Check if a key should be filtered based on our defined sensitive keys
  12. 2 sig { params(key: T.any(String, Symbol)).returns(T::Boolean) }
  13. 1 def should_filter_key?(key)
  14. 357 LogStruct.config.filters.filter_keys.include?(key.to_s.downcase.to_sym)
  15. end
  16. # Check if a key should be hashed rather than completely filtered
  17. 2 sig { params(key: T.any(String, Symbol)).returns(T::Boolean) }
  18. 1 def should_include_string_hash?(key)
  19. 5 LogStruct.config.filters.filter_keys_with_hashes.include?(key.to_s.downcase.to_sym)
  20. end
  21. # Convert a value to a filtered summary hash (e.g. { _filtered: { class: "String", ... }})
  22. 2 sig { params(key: T.any(String, Symbol), data: T.untyped).returns(T::Hash[Symbol, T.untyped]) }
  23. 1 def summarize_json_attribute(key, data)
  24. 6 case data
  25. when Hash
  26. 1 summarize_hash(data)
  27. when Array
  28. 1 summarize_array(data)
  29. when String
  30. 3 summarize_string(data, should_include_string_hash?(key))
  31. else
  32. 1 {_class: data.class}
  33. end
  34. end
  35. # Summarize a String for logging, including details and an SHA256 hash (if configured)
  36. 2 sig { params(string: String, include_hash: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  37. 1 def summarize_string(string, include_hash)
  38. filtered_string = {
  39. 5 _class: String
  40. }
  41. 5 if include_hash
  42. 2 filtered_string[:_hash] = HashUtils.hash_value(string)
  43. else
  44. 3 filtered_string[:_bytes] = string.bytesize
  45. end
  46. 5 filtered_string
  47. end
  48. # Summarize a Hash for logging, including details about the size and keys
  49. 2 sig { params(hash: T::Hash[T.untyped, T.untyped]).returns(T::Hash[Symbol, T.untyped]) }
  50. 1 def summarize_hash(hash)
  51. 3 return {_class: "Hash", _empty: true} if hash.empty?
  52. # Don't include byte size if hash contains any filtered keys
  53. 7 has_sensitive_keys = hash.keys.any? { |key| should_filter_key?(key) }
  54. summary = {
  55. 3 _class: Hash,
  56. _keys_count: hash.keys.size,
  57. _keys: hash.keys.map(&:to_sym).take(10)
  58. }
  59. # Only add byte size if no sensitive keys are present
  60. 3 summary[:_bytes] = hash.to_json.bytesize unless has_sensitive_keys
  61. 3 summary
  62. end
  63. # Summarize an Array for logging, including details about the size and items
  64. 2 sig { params(array: T::Array[T.untyped]).returns(T::Hash[Symbol, T.untyped]) }
  65. 1 def summarize_array(array)
  66. 3 return {_class: "Array", _empty: true} if array.empty?
  67. {
  68. 2 _class: Array,
  69. _count: array.size,
  70. _bytes: array.to_json.bytesize
  71. }
  72. end
  73. end
  74. end
  75. end

lib/log_struct/railtie.rb

61.54% lines covered

13 relevant lines. 8 lines covered and 5 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "rails"
  4. 1 require "semantic_logger"
  5. 1 require_relative "formatter"
  6. 1 require_relative "semantic_logger/setup"
  7. 1 module LogStruct
  8. # Railtie to integrate with Rails
  9. 1 class Railtie < ::Rails::Railtie
  10. # Configure early, right after logger initialization
  11. 1 initializer "logstruct.configure_logger", after: :initialize_logger do |app|
  12. next unless LogStruct.enabled?
  13. # Use SemanticLogger for powerful logging features
  14. LogStruct::SemanticLogger::Setup.configure_semantic_logger(app)
  15. end
  16. # Setup all integrations after logger setup is complete
  17. 1 initializer "logstruct.setup", before: :build_middleware_stack do |app|
  18. next unless LogStruct.enabled?
  19. # Merge Rails filter parameters into our filters
  20. LogStruct.merge_rails_filter_parameters!
  21. # Set up all integrations
  22. Integrations.setup_integrations
  23. end
  24. end
  25. end

lib/log_struct/semantic_logger/color_formatter.rb

73.81% lines covered

84 relevant lines. 62 lines covered and 22 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "semantic_logger"
  4. 1 require_relative "formatter"
  5. 1 module LogStruct
  6. 1 module SemanticLogger
  7. # Development-Optimized Colorized JSON Formatter
  8. #
  9. # This formatter extends SemanticLogger's Color formatter to provide beautiful,
  10. # readable JSON output in development environments. It significantly improves
  11. # the developer experience when working with structured logs.
  12. #
  13. # ## Benefits of Colorized Output:
  14. #
  15. # ### Readability
  16. # - **Syntax highlighting**: JSON keys, values, and data types are color-coded
  17. # - **Visual hierarchy**: Different colors help identify structure at a glance
  18. # - **Error spotting**: Quickly identify malformed data or unexpected values
  19. # - **Context separation**: Log entries are visually distinct from each other
  20. #
  21. # ### Performance in Development
  22. # - **Faster debugging**: Quickly scan logs without reading every character
  23. # - **Pattern recognition**: Colors help identify common log patterns
  24. # - **Reduced cognitive load**: Less mental effort required to parse log output
  25. # - **Improved workflow**: Spend less time reading logs, more time coding
  26. #
  27. # ### Customization
  28. # - **Configurable colors**: Customize colors for keys, strings, numbers, etc.
  29. # - **Environment-aware**: Automatically disabled in production/CI environments
  30. # - **Fallback support**: Gracefully falls back to standard formatting if needed
  31. #
  32. # ## Color Mapping:
  33. # - **Keys**: Yellow - Easy to spot field names
  34. # - **Strings**: Green - Clear indication of text values
  35. # - **Numbers**: Blue - Numeric values stand out
  36. # - **Booleans**: Magenta - true/false values are distinctive
  37. # - **Null**: Red - Missing values are immediately visible
  38. # - **Logger names**: Cyan - Source identification
  39. #
  40. # ## Integration with SemanticLogger:
  41. # This formatter preserves all SemanticLogger benefits (performance, threading,
  42. # reliability) while adding visual enhancements. It processes LogStruct types,
  43. # hashes, and plain messages with appropriate colorization.
  44. #
  45. # The formatter is automatically enabled in development when `enable_color_output`
  46. # is true (default), providing zero-configuration enhanced logging experience.
  47. 1 class ColorFormatter < ::SemanticLogger::Formatters::Color
  48. 1 extend T::Sig
  49. 2 sig { params(color_map: T.nilable(T::Hash[Symbol, Symbol]), args: T.untyped).void }
  50. 1 def initialize(color_map: nil, **args)
  51. 8 super(**args)
  52. 8 @logstruct_formatter = T.let(LogStruct::Formatter.new, LogStruct::Formatter)
  53. # Set up custom color mapping
  54. 8 @custom_colors = T.let(color_map || default_color_map, T::Hash[Symbol, Symbol])
  55. end
  56. 2 sig { override.params(log: ::SemanticLogger::Log, logger: T.untyped).returns(String) }
  57. 1 def call(log, logger)
  58. # Handle LogStruct types specially with colorization
  59. 6 if log.payload.is_a?(LogStruct::Log::Interfaces::CommonFields)
  60. # Get the LogStruct formatted JSON
  61. logstruct_json = @logstruct_formatter.call(log.level, log.time, log.name, log.payload)
  62. # Parse and colorize it
  63. begin
  64. parsed_data = T.let(JSON.parse(logstruct_json), T::Hash[String, T.untyped])
  65. colorized_json = colorize_json(parsed_data)
  66. # Use SemanticLogger's prefix formatting but with our colorized content
  67. prefix = format("%<time>s %<level>s [%<process>s] %<name>s -- ",
  68. time: format_time(log.time),
  69. level: format_level(log.level),
  70. process: log.process_info,
  71. name: format_name(log.name))
  72. "#{prefix}#{colorized_json}\n"
  73. rescue JSON::ParserError
  74. # Fallback to standard formatting
  75. super
  76. end
  77. 6 elsif log.payload.is_a?(Hash) || log.payload.is_a?(T::Struct)
  78. # Process hashes through our formatter then colorize
  79. begin
  80. 4 logstruct_json = @logstruct_formatter.call(log.level, log.time, log.name, log.payload)
  81. 4 parsed_data = T.let(JSON.parse(logstruct_json), T::Hash[String, T.untyped])
  82. 4 colorized_json = colorize_json(parsed_data)
  83. 4 prefix = format("%<time>s %<level>s [%<process>s] %<name>s -- ",
  84. time: format_time(log.time),
  85. level: format_level(log.level),
  86. process: log.process_info,
  87. name: format_name(log.name))
  88. 4 "#{prefix}#{colorized_json}\n"
  89. rescue JSON::ParserError
  90. # Fallback to standard formatting
  91. super
  92. end
  93. else
  94. # For plain messages, use SemanticLogger's default colorization
  95. 2 super
  96. end
  97. end
  98. 1 private
  99. 1 sig { returns(LogStruct::Formatter) }
  100. 1 attr_reader :logstruct_formatter
  101. # Default color mapping for LogStruct JSON
  102. 2 sig { returns(T::Hash[Symbol, Symbol]) }
  103. 1 def default_color_map
  104. 7 {
  105. key: :yellow,
  106. string: :green,
  107. number: :blue,
  108. bool: :magenta,
  109. nil: :red,
  110. name: :cyan
  111. }
  112. end
  113. # Simple JSON colorizer that adds ANSI codes
  114. 2 sig { params(data: T::Hash[String, T.untyped]).returns(String) }
  115. 1 def colorize_json(data)
  116. # For now, just return a simple colorized version of the JSON
  117. # This is much simpler than the full recursive approach
  118. 4 json_str = JSON.pretty_generate(data)
  119. # Apply basic colorization with regex
  120. 4 json_str.gsub(/"([^"]+)":/, colorize_text('\1', :key) + ":")
  121. .gsub(/: "([^"]*)"/, ": " + colorize_text('\1', :string))
  122. .gsub(/: (\d+\.?\d*)/, ": " + colorize_text('\1', :number))
  123. .gsub(/: (true|false)/, ": " + colorize_text('\1', :bool))
  124. .gsub(": null", ": " + colorize_text("null", :nil))
  125. end
  126. # Add ANSI color codes to text
  127. 2 sig { params(text: String, color_type: Symbol).returns(String) }
  128. 1 def colorize_text(text, color_type)
  129. 20 color = @custom_colors[color_type] || :white
  130. 20 "\e[#{color_code_for(color)}m#{text}\e[0m"
  131. end
  132. # Format timestamp
  133. 2 sig { params(time: Time).returns(String) }
  134. 1 def format_time(time)
  135. 6 time.strftime("%Y-%m-%d %H:%M:%S.%6N")
  136. end
  137. # Format log level with color
  138. 2 sig { params(level: T.any(String, Symbol)).returns(String) }
  139. 1 def format_level(level)
  140. 4 level_str = level.to_s.upcase[0]
  141. 4 color = level_color_for(level.to_sym)
  142. 4 "\e[#{color_code_for(color)}m#{level_str}\e[0m"
  143. end
  144. # Format logger name with color
  145. 2 sig { params(name: T.nilable(String)).returns(String) }
  146. 1 def format_name(name)
  147. 4 return "" unless name
  148. 4 color = @custom_colors[:name] || :cyan
  149. 4 "\e[#{color_code_for(color)}m#{name}\e[0m"
  150. end
  151. # Get color for log level
  152. 2 sig { params(level: Symbol).returns(Symbol) }
  153. 1 def level_color_for(level)
  154. 4 case level
  155. when :debug then :magenta
  156. 4 when :info then :cyan
  157. when :warn then :yellow
  158. when :error then :red
  159. when :fatal then :red
  160. else :cyan
  161. end
  162. end
  163. # Get ANSI color code for color symbol
  164. 2 sig { params(color: Symbol).returns(String) }
  165. 1 def color_code_for(color)
  166. 28 case color
  167. when :black then "30"
  168. 4 when :red then "31"
  169. 4 when :green then "32"
  170. 3 when :yellow then "33"
  171. 4 when :blue then "34"
  172. 3 when :magenta then "35"
  173. 8 when :cyan then "36"
  174. 2 when :white then "37"
  175. when :bright_black then "90"
  176. when :bright_red then "91"
  177. when :bright_green then "92"
  178. when :bright_yellow then "93"
  179. when :bright_blue then "94"
  180. when :bright_magenta then "95"
  181. when :bright_cyan then "96"
  182. when :bright_white then "97"
  183. else "37" # default to white
  184. end
  185. end
  186. end
  187. end
  188. end

lib/log_struct/semantic_logger/formatter.rb

92.31% lines covered

26 relevant lines. 24 lines covered and 2 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "semantic_logger"
  4. 1 require_relative "../formatter"
  5. 1 module LogStruct
  6. 1 module SemanticLogger
  7. # High-Performance JSON Formatter with LogStruct Integration
  8. #
  9. # This formatter extends SemanticLogger's JSON formatter to provide optimal
  10. # JSON serialization performance while preserving all LogStruct features
  11. # including data filtering, sensitive data scrubbing, and type-safe structures.
  12. #
  13. # ## Performance Advantages Over Rails Logger:
  14. #
  15. # ### Serialization Performance
  16. # - **Direct JSON generation**: Bypasses intermediate object creation
  17. # - **Streaming serialization**: Memory-efficient processing of large objects
  18. # - **Type-optimized paths**: Fast serialization for common data types
  19. # - **Zero-copy operations**: Minimal memory allocation during serialization
  20. #
  21. # ### Memory Efficiency
  22. # - **Object reuse**: Formatter instances are reused across log calls
  23. # - **Lazy evaluation**: Only processes data that will be included in output
  24. # - **Efficient buffering**: Optimal buffer sizes for JSON generation
  25. # - **Garbage collection friendly**: Minimal object allocation reduces GC pressure
  26. #
  27. # ### Integration Benefits
  28. # - **LogStruct compatibility**: Native support for typed log structures
  29. # - **Filter preservation**: Maintains all LogStruct filtering capabilities
  30. # - **Scrubbing integration**: Seamless sensitive data scrubbing
  31. # - **Error handling**: Robust handling of serialization errors
  32. #
  33. # ## Feature Preservation:
  34. # This formatter maintains full compatibility with LogStruct's features:
  35. # - Sensitive data filtering (passwords, tokens, etc.)
  36. # - Recursive object scrubbing and processing
  37. # - Type-safe log structure handling
  38. # - Custom field transformations
  39. # - Metadata preservation and enrichment
  40. #
  41. # ## JSON Output Structure:
  42. # The formatter produces consistent, parseable JSON that includes:
  43. # - Standard log fields (timestamp, level, message, logger name)
  44. # - LogStruct-specific fields (source, event, context)
  45. # - SemanticLogger metadata (process ID, thread ID, tags)
  46. # - Application-specific payload data
  47. #
  48. # This combination provides the performance benefits of SemanticLogger with
  49. # the structured data benefits of LogStruct, resulting in faster, more
  50. # reliable logging for high-traffic applications.
  51. 1 class Formatter < ::SemanticLogger::Formatters::Json
  52. 1 extend T::Sig
  53. 2 sig { void }
  54. 1 def initialize
  55. 32 super
  56. 32 @logstruct_formatter = T.let(LogStruct::Formatter.new, LogStruct::Formatter)
  57. end
  58. 2 sig { params(log: ::SemanticLogger::Log, logger: T.untyped).returns(String) }
  59. 1 def call(log, logger)
  60. # Handle LogStruct types specially - they get wrapped in payload hash by SemanticLogger
  61. 24 if log.payload.is_a?(Hash) && log.payload[:payload].is_a?(LogStruct::Log::Interfaces::CommonFields)
  62. # Use our formatter to process LogStruct types
  63. 20 @logstruct_formatter.call(log.level, log.time, log.name, log.payload[:payload])
  64. 4 elsif log.payload.is_a?(LogStruct::Log::Interfaces::CommonFields)
  65. # Direct LogStruct (fallback case)
  66. @logstruct_formatter.call(log.level, log.time, log.name, log.payload)
  67. 4 elsif log.payload.is_a?(Hash) && log.payload[:payload].is_a?(T::Struct)
  68. # T::Struct wrapped in payload hash
  69. @logstruct_formatter.call(log.level, log.time, log.name, log.payload[:payload])
  70. 4 elsif log.payload.is_a?(Hash) || log.payload.is_a?(T::Struct)
  71. # Process hashes and T::Structs through our formatter
  72. 1 @logstruct_formatter.call(log.level, log.time, log.name, log.payload)
  73. else
  74. # For plain messages, create a Plain log entry
  75. 3 message_data = log.payload || log.message
  76. 3 plain_log = LogStruct::Log::Plain.new(
  77. message: message_data,
  78. timestamp: log.time
  79. )
  80. 3 @logstruct_formatter.call(log.level, log.time, log.name, plain_log)
  81. end
  82. end
  83. 1 private
  84. 1 sig { returns(LogStruct::Formatter) }
  85. 1 attr_reader :logstruct_formatter
  86. end
  87. end
  88. end

lib/log_struct/semantic_logger/logger.rb

77.78% lines covered

36 relevant lines. 28 lines covered and 8 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "semantic_logger"
  4. 1 module LogStruct
  5. 1 module SemanticLogger
  6. # High-Performance Logger with LogStruct Integration
  7. #
  8. # This logger extends SemanticLogger::Logger to provide optimal logging performance
  9. # while seamlessly integrating with LogStruct's typed logging system.
  10. #
  11. # ## Key Benefits Over Rails.logger:
  12. #
  13. # ### Performance
  14. # - **10-100x faster** than Rails' default logger for high-volume applications
  15. # - **Non-blocking I/O**: Uses background threads for actual log writes
  16. # - **Minimal memory allocation**: Efficient object reuse and zero-copy operations
  17. # - **Batched writes**: Reduces system calls by batching multiple log entries
  18. #
  19. # ### Reliability
  20. # - **Thread-safe operations**: Safe for use in multi-threaded environments
  21. # - **Error resilience**: Logger failures don't crash your application
  22. # - **Graceful fallbacks**: Continues operating even if appenders fail
  23. #
  24. # ### Features
  25. # - **Structured logging**: Native support for LogStruct types and hashes
  26. # - **Rich metadata**: Automatic inclusion of process ID, thread ID, timestamps
  27. # - **Tagged context**: Hierarchical tagging for request/job tracking
  28. # - **Multiple destinations**: Simultaneously log to files, STDOUT, cloud services
  29. #
  30. # ### Development Experience
  31. # - **Colorized output**: Beautiful ANSI-colored logs in development
  32. # - **Detailed timing**: Built-in measurement of log processing time
  33. # - **Context preservation**: Maintains Rails.logger compatibility
  34. #
  35. # ## Usage Examples
  36. #
  37. # The logger automatically handles LogStruct types, hashes, and plain messages:
  38. #
  39. # ```ruby
  40. # logger = LogStruct::SemanticLogger::Logger.new("MyApp")
  41. #
  42. # # LogStruct typed logging (optimal performance)
  43. # log_entry = LogStruct::Log::Plain.new(
  44. # message: "User authenticated",
  45. # source: LogStruct::Source::App,
  46. # event: LogStruct::Event::Security
  47. # )
  48. # logger.info(log_entry)
  49. #
  50. # # Hash logging (automatically structured)
  51. # logger.info({
  52. # action: "user_login",
  53. # user_id: 123,
  54. # ip_address: "192.168.1.1"
  55. # })
  56. #
  57. # # Plain string logging (backward compatibility)
  58. # logger.info("User logged in successfully")
  59. # ```
  60. #
  61. # The logger is a drop-in replacement for Rails.logger and maintains full
  62. # API compatibility while providing significantly enhanced performance.
  63. 1 class Logger < ::SemanticLogger::Logger
  64. 1 extend T::Sig
  65. 2 sig { params(name: T.any(String, Symbol, Module, T::Class[T.anything]), level: T.nilable(Symbol), filter: T.untyped).void }
  66. 1 def initialize(name = "Application", level: nil, filter: nil)
  67. # SemanticLogger::Logger expects positional arguments, not named arguments
  68. 35 super(name, level, filter)
  69. end
  70. # Override log methods to handle LogStruct types
  71. 1 %i[debug info warn error fatal].each do |level|
  72. 5 define_method(level) do |message = nil, payload = nil, &block|
  73. # If message is a LogStruct type, use it as payload
  74. 30 if message.is_a?(LogStruct::Log::Interfaces::CommonFields) ||
  75. message.is_a?(T::Struct) ||
  76. message.is_a?(Hash)
  77. 25 payload = message
  78. 25 message = nil
  79. 25 super(message, payload: payload, &block)
  80. else
  81. # For plain string messages, pass them through normally
  82. 5 super(message, payload, &block)
  83. end
  84. end
  85. end
  86. # Support for tagged logging
  87. 2 sig { params(tags: T.untyped, block: T.proc.returns(T.untyped)).returns(T.untyped) }
  88. 1 def tagged(*tags, &block)
  89. # Convert tags to array and pass individually to avoid splat issues
  90. 1 tag_array = tags.flatten
  91. 1 if tag_array.empty?
  92. super(&block)
  93. else
  94. 1 super(*T.unsafe(tag_array), &block)
  95. end
  96. end
  97. # Ensure compatibility with Rails.logger interface
  98. 1 sig { returns(T::Array[T.any(String, Symbol)]) }
  99. 1 def current_tags
  100. ::SemanticLogger.tags
  101. end
  102. 1 sig { void }
  103. 1 def clear_tags!
  104. # SemanticLogger doesn't have clear_tags!, use pop_tags instead
  105. count = ::SemanticLogger.tags.length
  106. ::SemanticLogger.pop_tags(count) if count > 0
  107. end
  108. 1 sig { params(tags: T.untyped).returns(T::Array[T.untyped]) }
  109. 1 def push_tags(*tags)
  110. flat = tags.flatten.compact
  111. flat.each { |tag| ::SemanticLogger.push_tags(tag) }
  112. flat
  113. end
  114. 1 sig { params(count: Integer).void }
  115. 1 def pop_tags(count = 1)
  116. ::SemanticLogger.pop_tags(count)
  117. end
  118. end
  119. end
  120. end

lib/log_struct/semantic_logger/setup.rb

34.48% lines covered

58 relevant lines. 20 lines covered and 38 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "semantic_logger"
  4. 1 require_relative "formatter"
  5. 1 require_relative "color_formatter"
  6. 1 require_relative "logger"
  7. 1 module LogStruct
  8. # SemanticLogger Integration
  9. #
  10. # LogStruct uses SemanticLogger as its core logging engine, providing significant
  11. # performance and functionality benefits over Rails' default logger:
  12. #
  13. # ## Performance Benefits
  14. # - **Asynchronous logging**: Logs are written in a background thread, eliminating
  15. # I/O blocking in your main application threads
  16. # - **High throughput**: Can handle 100,000+ log entries per second
  17. # - **Memory efficient**: Structured data processing with minimal allocations
  18. # - **Zero-copy serialization**: Direct JSON generation without intermediate objects
  19. #
  20. # ## Reliability Benefits
  21. # - **Thread-safe**: All operations are thread-safe by design
  22. # - **Graceful degradation**: Continues logging even if appenders fail
  23. # - **Error isolation**: Logging errors don't crash your application
  24. # - **Buffered writes**: Reduces disk I/O with intelligent batching
  25. #
  26. # ## Feature Benefits
  27. # - **Multiple appenders**: Log to files, STDOUT, databases, cloud services simultaneously
  28. # - **Structured metadata**: Rich context including process ID, thread ID, tags, and more
  29. # - **Log filtering**: Runtime filtering by logger name, level, or custom rules
  30. # - **Formatters**: Pluggable output formatting (JSON, colorized, custom)
  31. # - **Metrics integration**: Built-in performance metrics and timing data
  32. #
  33. # ## Development Experience
  34. # - **Colorized output**: Beautiful, readable logs in development with ANSI colors
  35. # - **Tagged logging**: Hierarchical context tracking (requests, jobs, etc.)
  36. # - **Debugging tools**: Detailed timing and memory usage information
  37. # - **Hot reloading**: Configuration changes without application restart
  38. #
  39. # ## Production Benefits
  40. # - **Log rotation**: Automatic file rotation with size/time-based policies
  41. # - **Compression**: Automatic log compression to save disk space
  42. # - **Cloud integration**: Direct integration with CloudWatch, Splunk, etc.
  43. # - **Alerting**: Built-in support for error alerting and monitoring
  44. #
  45. # ## LogStruct Specific Enhancements
  46. # - **Type safety**: Full Sorbet type annotations for compile-time error detection
  47. # - **Structured data**: Native support for LogStruct's typed log structures
  48. # - **Filtering integration**: Seamless integration with LogStruct's data filters
  49. # - **Error handling**: Enhanced error reporting with full stack traces and context
  50. #
  51. # SemanticLogger is a production-grade logging framework used by companies processing
  52. # millions of requests per day. It provides the performance and reliability needed
  53. # for high-traffic Rails applications while maintaining an elegant developer experience.
  54. 1 module SemanticLogger
  55. # Handles setup and configuration of SemanticLogger for Rails applications
  56. #
  57. # This module provides the core integration between LogStruct and SemanticLogger,
  58. # configuring appenders, formatters, and logger replacement to provide optimal
  59. # logging performance while maintaining full compatibility with Rails conventions.
  60. 1 module Setup
  61. 1 extend T::Sig
  62. # Configures SemanticLogger as the primary logging engine for the Rails application
  63. #
  64. # This method replaces Rails' default logger with SemanticLogger, providing:
  65. # - **10-100x performance improvement** for high-volume logging
  66. # - **Non-blocking I/O** through background thread processing
  67. # - **Enhanced reliability** with graceful error handling
  68. # - **Multiple output destinations** (files, STDOUT, cloud services)
  69. # - **Structured metadata** including process/thread IDs and timing
  70. #
  71. # The configuration automatically:
  72. # - Determines optimal log levels based on environment
  73. # - Sets up appropriate appenders (console, file, etc.)
  74. # - Enables colorized output in development
  75. # - Replaces Rails.logger and component loggers
  76. # - Preserves full Rails.logger API compatibility
  77. #
  78. # @param app [Rails::Application] The Rails application instance
  79. 1 sig { params(app: T.untyped).void }
  80. 1 def self.configure_semantic_logger(app)
  81. # Set SemanticLogger configuration
  82. ::SemanticLogger.application = Rails.application.class.module_parent_name
  83. ::SemanticLogger.environment = Rails.env
  84. # Determine log level from Rails config
  85. log_level = determine_log_level(app)
  86. ::SemanticLogger.default_level = log_level
  87. # Clear existing appenders
  88. ::SemanticLogger.clear_appenders!
  89. # Add appropriate appenders based on environment
  90. add_appenders(app)
  91. # Replace Rails.logger with SemanticLogger
  92. replace_rails_logger(app)
  93. end
  94. 1 sig { params(app: T.untyped).returns(Symbol) }
  95. 1 def self.determine_log_level(app)
  96. if app.config.log_level
  97. app.config.log_level
  98. elsif Rails.env.production?
  99. :info
  100. elsif Rails.env.test?
  101. :warn
  102. else
  103. :debug
  104. end
  105. end
  106. 1 sig { params(app: T.untyped).void }
  107. 1 def self.add_appenders(app)
  108. config = LogStruct.config
  109. # Determine output destination
  110. io = determine_output(app)
  111. if Rails.env.development? && config.integrations.enable_color_output
  112. # Use our colorized LogStruct formatter for development
  113. ::SemanticLogger.add_appender(
  114. io: io,
  115. formatter: LogStruct::SemanticLogger::ColorFormatter.new(
  116. color_map: config.integrations.color_map
  117. ),
  118. filter: determine_filter
  119. )
  120. else
  121. # Use our custom JSON formatter
  122. ::SemanticLogger.add_appender(
  123. io: io,
  124. formatter: LogStruct::SemanticLogger::Formatter.new,
  125. filter: determine_filter
  126. )
  127. end
  128. # Add file appender if configured and not already logging to STDOUT/StringIO
  129. if app.config.paths["log"].first && io != $stdout && !io.is_a?(StringIO)
  130. ::SemanticLogger.add_appender(
  131. file_name: app.config.paths["log"].first,
  132. formatter: LogStruct::SemanticLogger::Formatter.new,
  133. filter: determine_filter
  134. )
  135. end
  136. end
  137. 1 sig { params(app: T.untyped).returns(T.any(IO, StringIO)) }
  138. 1 def self.determine_output(app)
  139. if ENV["RAILS_LOG_TO_STDOUT"].present?
  140. $stdout
  141. elsif Rails.env.test?
  142. # Use StringIO for tests to avoid cluttering test output
  143. StringIO.new
  144. else
  145. # Prefer file logging when not explicitly configured for STDOUT
  146. $stdout
  147. end
  148. end
  149. 1 sig { returns(T.nilable(Regexp)) }
  150. 1 def self.determine_filter
  151. # Filter out noisy loggers if configured
  152. config = LogStruct.config
  153. return nil unless config.integrations.filter_noisy_loggers
  154. # Common noisy loggers to filter
  155. /\A(ActionView|ActionController::RoutingError|ActiveRecord::SchemaMigration)/
  156. end
  157. # Replaces Rails.logger and all component loggers with LogStruct's SemanticLogger
  158. #
  159. # This method provides seamless integration by replacing the default Rails logger
  160. # throughout the entire Rails stack, ensuring all logging flows through the
  161. # high-performance SemanticLogger system.
  162. #
  163. # ## Benefits of Complete Logger Replacement:
  164. # - **Consistent performance**: All Rails components benefit from SemanticLogger speed
  165. # - **Unified formatting**: All logs use the same structured JSON format
  166. # - **Centralized configuration**: Single point of control for all logging
  167. # - **Complete compatibility**: Maintains all Rails.logger API contracts
  168. #
  169. # ## Components Updated:
  170. # - Rails.logger (framework core)
  171. # - ActiveRecord::Base.logger (database queries)
  172. # - ActionController::Base.logger (request processing)
  173. # - ActionMailer::Base.logger (email delivery)
  174. # - ActiveJob::Base.logger (background jobs)
  175. # - ActionView::Base.logger (template rendering)
  176. # - ActionCable.server.config.logger (WebSocket connections)
  177. #
  178. # After replacement, all Rails logging maintains API compatibility while gaining
  179. # SemanticLogger's performance, reliability, and feature benefits.
  180. #
  181. # @param app [Rails::Application] The Rails application instance
  182. 1 sig { params(app: T.untyped).void }
  183. 1 def self.replace_rails_logger(app)
  184. # Create new SemanticLogger instance
  185. logger = LogStruct::SemanticLogger::Logger.new("Rails")
  186. # Replace Rails.logger
  187. Rails.logger = logger
  188. # Also replace various component loggers
  189. ActiveRecord::Base.logger = logger if defined?(ActiveRecord::Base)
  190. ActionController::Base.logger = logger if defined?(ActionController::Base)
  191. ActionMailer::Base.logger = logger if defined?(ActionMailer::Base)
  192. ActiveJob::Base.logger = logger if defined?(ActiveJob::Base)
  193. ActionView::Base.logger = logger if defined?(ActionView::Base)
  194. ActionCable.server.config.logger = logger if defined?(ActionCable)
  195. # Store reference in app config
  196. app.config.logger = logger
  197. end
  198. end
  199. end
  200. end

lib/log_struct/sorbet.rb

100.0% lines covered

2 relevant lines. 2 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. # Note: We use T::Struct for our Log classes so Sorbet is a hard requirement,
  4. # not an optional dependency.
  5. 1 require "sorbet-runtime"
  6. 1 require "log_struct/sorbet/serialize_symbol_keys"
  7. # Don't extend T::Sig to all modules! We're just a library, not a private Rails application
  8. # See: https://sorbet.org/docs/sigs
  9. # class Module
  10. # include T::Sig
  11. # end

lib/log_struct/sorbet/serialize_symbol_keys.rb

83.33% lines covered

12 relevant lines. 10 lines covered and 2 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 module LogStruct
  4. 1 module Sorbet
  5. 1 module SerializeSymbolKeys
  6. 1 extend T::Sig
  7. 1 extend T::Helpers
  8. 1 requires_ancestor { T::Struct }
  9. 1 sig { params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
  10. 1 def serialize(strict = true)
  11. super.deep_symbolize_keys
  12. end
  13. 1 sig { returns(T::Hash[Symbol, T.untyped]) }
  14. 1 def to_h
  15. serialize
  16. end
  17. end
  18. end
  19. end

lib/log_struct/string_scrubber.rb

100.0% lines covered

39 relevant lines. 39 lines covered and 0 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. 1 require "digest"
  4. 1 module LogStruct
  5. # StringScrubber is inspired by logstop by @ankane: https://github.com/ankane/logstop
  6. # Enhancements:
  7. # - Shows which type of data was filtered
  8. # - Includes an SHA256 hash with filtered emails for request tracing
  9. # - Uses configuration options from LogStruct.config
  10. 1 module StringScrubber
  11. 1 class << self
  12. 1 extend T::Sig
  13. # Also supports URL-encoded URLs like https%3A%2F%2Fuser%3Asecret%40example.com
  14. # cspell:ignore Fuser Asecret
  15. 1 URL_PASSWORD_REGEX = /((?:\/\/|%2F%2F)[^\s\/]+(?::|%3A))[^\s\/]+(@|%40)/
  16. 1 URL_PASSWORD_REPLACEMENT = '\1[PASSWORD]\2'
  17. 1 EMAIL_REGEX = /\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}\b/i
  18. 1 CREDIT_CARD_REGEX_SHORT = /\b[3456]\d{15}\b/
  19. 1 CREDIT_CARD_REGEX_DELIMITERS = /\b[3456]\d{3}[\s-]\d{4}[\s-]\d{4}[\s-]\d{4}\b/
  20. 1 CREDIT_CARD_REPLACEMENT = "[CREDIT_CARD]"
  21. 1 PHONE_REGEX = /\b\d{3}[\s-]\d{3}[\s-]\d{4}\b/
  22. 1 PHONE_REPLACEMENT = "[PHONE]"
  23. 1 SSN_REGEX = /\b\d{3}[\s-]\d{2}[\s-]\d{4}\b/
  24. 1 SSN_REPLACEMENT = "[SSN]"
  25. 1 IP_REGEX = /\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b/
  26. 1 IP_REPLACEMENT = "[IP]"
  27. 1 MAC_REGEX = /\b[0-9a-f]{2}(:[0-9a-f]{2}){5}\b/i
  28. 1 MAC_REPLACEMENT = "[MAC]"
  29. # Scrub sensitive information from a string
  30. 2 sig { params(string: String).returns(String) }
  31. 1 def scrub(string)
  32. 286 return string if string.empty?
  33. 286 string = string.to_s.dup
  34. 286 config = LogStruct.config.filters
  35. # Passwords in URLs
  36. 286 string.gsub!(URL_PASSWORD_REGEX, URL_PASSWORD_REPLACEMENT) if config.url_passwords
  37. # Emails
  38. 286 if config.email_addresses
  39. 285 string.gsub!(EMAIL_REGEX) do |email|
  40. 11 email_hash = HashUtils.hash_value(email)
  41. 11 "[EMAIL:#{email_hash}]"
  42. end
  43. end
  44. # Credit card numbers
  45. 286 if config.credit_card_numbers
  46. 285 string.gsub!(CREDIT_CARD_REGEX_SHORT, CREDIT_CARD_REPLACEMENT)
  47. 285 string.gsub!(CREDIT_CARD_REGEX_DELIMITERS, CREDIT_CARD_REPLACEMENT)
  48. end
  49. # Phone numbers
  50. 286 string.gsub!(PHONE_REGEX, PHONE_REPLACEMENT) if config.phone_numbers
  51. # SSNs
  52. 286 string.gsub!(SSN_REGEX, SSN_REPLACEMENT) if config.ssns
  53. # IPs
  54. 286 string.gsub!(IP_REGEX, IP_REPLACEMENT) if config.ip_addresses
  55. # MAC addresses
  56. 286 string.gsub!(MAC_REGEX, MAC_REPLACEMENT) if config.mac_addresses
  57. # Custom scrubber
  58. 286 custom_scrubber = LogStruct.config.string_scrubbing_handler
  59. 286 string = custom_scrubber.call(string) if !custom_scrubber.nil?
  60. 286 string
  61. end
  62. end
  63. end
  64. end

tools/log_types_exporter.rb

85.61% lines covered

285 relevant lines. 244 lines covered and 41 lines missed.
    
  1. # typed: strict
  2. # frozen_string_literal: true
  3. # cspell:ignore _tnilable
  4. # rubocop:disable Sorbet/ConstantsFromStrings
  5. # Load LogStruct type definitions
  6. 1 require_relative "../lib/log_struct"
  7. 1 require "json"
  8. 1 require "fileutils"
  9. 1 require "time"
  10. 1 module LogStruct
  11. 1 module Tools
  12. 1 class LogTypesExporter
  13. 1 extend T::Sig
  14. 1 DEFAULT_OUTPUT_TS_FILE = "site/lib/log-generation/log-types.ts"
  15. # Constructor with optional override for log struct classes (for testing)
  16. 2 sig { params(output_ts_file: String, log_struct_classes: T.nilable(T::Array[T::Class[T::Struct]])).void }
  17. 1 def initialize(output_ts_file = DEFAULT_OUTPUT_TS_FILE, log_struct_classes = nil)
  18. 14 @output_ts_file = output_ts_file
  19. 14 @log_struct_classes = log_struct_classes
  20. end
  21. # Public method to export TypeScript definitions and JSON key mappings to files
  22. 2 sig { void }
  23. 1 def export
  24. # Get the data once and reuse for all exports
  25. 1 data = generate_data
  26. # Export TypeScript definitions
  27. 1 puts "Exporting LogStruct types to TypeScript..."
  28. 1 puts "Output file: #{@output_ts_file}"
  29. # Create output directory if needed
  30. 1 FileUtils.mkdir_p(File.dirname(@output_ts_file))
  31. # Generate the TypeScript content
  32. 1 content = generate_typescript(data)
  33. # Write to file
  34. 1 File.write(@output_ts_file, content)
  35. 1 puts "Exported log types to #{@output_ts_file}"
  36. # Export LOG_KEYS mapping to JSON
  37. 1 export_keys_to_json
  38. # Export enums and log structs to JSON
  39. 1 export_data_to_json(data)
  40. end
  41. # Export LOG_KEYS mapping to a JSON file
  42. 2 sig { params(output_json_file: T.nilable(String)).void }
  43. 1 def export_keys_to_json(output_json_file = nil)
  44. # Default to the same directory as the TypeScript file
  45. 1 output_json_file ||= File.join(File.dirname(@output_ts_file), "log-keys.json")
  46. 1 puts "Exporting LogStruct key mappings to JSON..."
  47. 1 puts "Output file: #{output_json_file}"
  48. # Create output directory if needed
  49. 1 FileUtils.mkdir_p(File.dirname(output_json_file))
  50. # Convert LOG_KEYS to a format suitable for JSON
  51. # - Convert keys from symbols to strings
  52. # - Convert values from symbols to strings
  53. 1 json_keys = LogStruct::LOG_KEYS.transform_keys(&:to_s).transform_values(&:to_s)
  54. # Write to file with pretty formatting
  55. 1 File.write(output_json_file, JSON.pretty_generate(json_keys))
  56. 1 puts "Exported key mappings to #{output_json_file}"
  57. end
  58. # Export both enums and log structs to JSON files
  59. 2 sig { params(data: T::Hash[Symbol, T.untyped]).void }
  60. 1 def export_data_to_json(data)
  61. # Export enums to JSON
  62. 1 export_enums_to_json(data[:enums])
  63. # Export log structs to JSON
  64. 1 export_log_structs_to_json(data[:logs])
  65. end
  66. # Export Sorbet enums to a JSON file
  67. 2 sig { params(enums_data: T::Hash[Symbol, T::Array[String]], output_json_file: T.nilable(String)).void }
  68. 1 def export_enums_to_json(enums_data, output_json_file = nil)
  69. # Default to the same directory as the TypeScript file
  70. 2 output_json_file ||= File.join(File.dirname(@output_ts_file), "sorbet-enums.json")
  71. 2 puts "Exporting Sorbet enums to JSON..."
  72. 2 puts "Output file: #{output_json_file}"
  73. # Create output directory if needed
  74. 2 FileUtils.mkdir_p(File.dirname(output_json_file))
  75. # Format enum data for JSON
  76. 2 json_enum_data = {}
  77. # For each enum, get the full class name and values
  78. 2 T::Enum.subclasses
  79. 14 .select { |klass| klass.name.to_s.start_with?("LogStruct::") }
  80. .each do |enum_class|
  81. # Get the full enum name (e.g., "LogStruct::Level")
  82. 14 full_name = enum_class.name.to_s
  83. # Get the simple name (e.g., "Level")
  84. 14 simple_name = full_name.split("::").last
  85. # Skip if we don't have data for this enum
  86. 14 next unless simple_name && enums_data.key?(simple_name.to_sym)
  87. # Map enum values to their constant names
  88. 14 values_with_names = enum_class.values.map do |value|
  89. 826 constant_name = enum_class.constants.find { |const_name| enum_class.const_get(const_name) == value }&.to_s
  90. 106 serialized = value.serialize
  91. # Return a hash with name and value
  92. {
  93. 106 name: constant_name,
  94. value: serialized
  95. }
  96. end
  97. # Add to the JSON data
  98. 14 json_enum_data[full_name] = values_with_names
  99. end
  100. # Write to file with pretty formatting
  101. 2 File.write(output_json_file, JSON.pretty_generate(json_enum_data))
  102. 2 puts "Exported Sorbet enums to #{output_json_file}"
  103. end
  104. # Export LogStruct log structs to a JSON file
  105. 2 sig { params(logs_data: T::Hash[String, T::Hash[Symbol, T.untyped]], output_json_file: T.nilable(String)).void }
  106. 1 def export_log_structs_to_json(logs_data, output_json_file = nil)
  107. # Default to the same directory as the TypeScript file
  108. 2 output_json_file ||= File.join(File.dirname(@output_ts_file), "sorbet-log-structs.json")
  109. 2 puts "Exporting LogStruct log structs to JSON..."
  110. 2 puts "Output file: #{output_json_file}"
  111. # Create output directory if needed
  112. 2 FileUtils.mkdir_p(File.dirname(output_json_file))
  113. # Format structs data for JSON
  114. 2 json_structs_data = {}
  115. # Process each log struct class
  116. 2 logs_data.each do |struct_name, struct_info|
  117. # Get the full class name
  118. 24 full_name = "LogStruct::Log::#{struct_name}"
  119. # Add to the structs data
  120. 24 json_structs_data[full_name] = {
  121. name: struct_name,
  122. fields: struct_info[:fields].transform_keys(&:to_s)
  123. }
  124. end
  125. # Write to file with pretty formatting
  126. 2 File.write(output_json_file, JSON.pretty_generate(json_structs_data))
  127. 2 puts "Exported LogStruct log structs to #{output_json_file}"
  128. end
  129. # Public method to generate TypeScript definitions as a string
  130. # This is the method we can test easily without file I/O
  131. 2 sig { returns(String) }
  132. 1 def generate_typescript_definitions
  133. # Get the data
  134. 6 data = generate_data
  135. # Transform data to TypeScript
  136. 6 generate_typescript(data)
  137. end
  138. 2 sig { returns(T::Hash[Symbol, T.untyped]) }
  139. 1 def generate_data
  140. # Export everything as a hash
  141. {
  142. # Export all enum values from LogStruct module
  143. 9 enums: export_enums,
  144. # Export log structs
  145. logs: export_log_structs
  146. }
  147. end
  148. # Find and export all T::Enum subclasses in the LogStruct module
  149. 2 sig { returns(T::Hash[Symbol, T::Array[String]]) }
  150. 1 def export_enums
  151. 10 enum_hash = {}
  152. # Find all T::Enum subclasses in the LogStruct module
  153. 10 T::Enum.subclasses
  154. 70 .select { |klass| klass.name.to_s.start_with?("LogStruct::") }
  155. .each do |enum_class|
  156. # Extract enum name (last part of the class name)
  157. 70 enum_name = enum_class.name.to_s.split("::").last&.to_sym
  158. 70 next if enum_name.nil? # Skip if we couldn't get a valid name
  159. # Add enum values to the hash
  160. 70 enum_hash[enum_name] = enum_class.values.map(&:serialize)
  161. end
  162. 10 enum_hash
  163. end
  164. 1 private :generate_data
  165. 2 sig { params(data: T::Hash[Symbol, T.untyped]).returns(String) }
  166. 1 def generate_typescript(data)
  167. 7 ts_content = []
  168. # Add file header (We need 'any' for a lot of unstructured Hashes and Arrays)
  169. 7 ts_content << "/* eslint-disable @typescript-eslint/no-explicit-any */"
  170. 7 ts_content << "// Auto-generated TypeScript definitions for LogStruct"
  171. 7 ts_content << "// Generated on #{Time.now.strftime("%Y-%m-%d %H:%M:%S")}"
  172. 7 ts_content << ""
  173. # Add enum definitions
  174. 7 ts_content << "// Enum types"
  175. 7 data[:enums].each do |enum_name, enum_values|
  176. 35 ts_content << "export enum #{enum_name} {"
  177. 35 enum_values.sort.each do |value|
  178. 343 ts_content << " #{value.upcase} = \"#{value}\","
  179. end
  180. 35 ts_content << "}"
  181. 35 ts_content << ""
  182. end
  183. # Add LogType enum
  184. 7 ts_content << "// Log Types"
  185. 7 ts_content << "export enum LogType {"
  186. 7 data[:logs].keys.sort.each do |log_type|
  187. 84 ts_content << " #{log_type.upcase} = \"#{log_type}\","
  188. end
  189. 7 ts_content << "}"
  190. 7 ts_content << ""
  191. # Add array of all log types for iteration
  192. 7 ts_content << "// Array of all log types for iteration"
  193. 7 ts_content << "export const AllLogTypes: Array<LogType> = ["
  194. 7 data[:logs].keys.sort.each do |log_type|
  195. 84 ts_content << " LogType.#{log_type.upcase},"
  196. end
  197. 7 ts_content << "];"
  198. 7 ts_content << ""
  199. # Add interface for each log type
  200. 7 ts_content << "// Log Interfaces"
  201. # Collect all event union types to generate arrays later
  202. 7 event_arrays = {}
  203. 7 data[:logs].each do |log_type, log_info|
  204. 84 ts_content << "export interface #{log_type}Log {"
  205. # Collect valid event types if this log has an enum_union for events
  206. 84 event_field_info = log_info[:fields][:event]
  207. 84 if event_field_info &&
  208. event_field_info[:type] == "enum_union" &&
  209. event_field_info[:base_enum] == "Event" &&
  210. event_field_info[:enum_values]&.any?
  211. 49 event_arrays[log_type] = event_field_info[:enum_values].map do |value|
  212. # Map Ruby enum names to TypeScript enum values (e.g., "IPSpoof" -> "Event.IP_SPOOF")
  213. 245 case value
  214. 7 when "IPSpoof" then "Event.IP_SPOOF"
  215. 7 when "CSRFViolation" then "Event.CSRF_VIOLATION"
  216. else
  217. # Default conversion of StudlyCaps to SCREAMING_SNAKE_CASE
  218. 231 "Event.#{value.gsub(/([a-z])([A-Z])/, '\1_\2').upcase}"
  219. end
  220. end
  221. end
  222. # Output all fields with types
  223. 84 log_info[:fields].each do |field_name, field_info|
  224. 1134 type_str = typescript_type_for(field_info)
  225. 1134 optional = field_info[:optional] ? "?" : ""
  226. 1134 ts_content << " #{field_name}#{optional}: #{type_str};"
  227. end
  228. 84 ts_content << "}"
  229. 84 ts_content << ""
  230. end
  231. # Add union type for all logs
  232. 7 ts_content << "// Union type for all logs"
  233. 7 ts_content << "export type Log ="
  234. 91 log_types = data[:logs].keys.sort.map { |type| " | #{type}Log" }
  235. 7 ts_content << log_types.join("\n")
  236. 7 ts_content << ";"
  237. 7 ts_content << ""
  238. # Add event arrays for each log type that has an enum_union
  239. 7 ts_content << "// Event type arrays for log types"
  240. 7 event_arrays.each do |log_type, event_values|
  241. # Create a type-safe array with a specific union type for each log type's events
  242. 49 union_type = event_values.join(" | ")
  243. 49 ts_content << "export const #{log_type}Events: Array<#{union_type}> = ["
  244. 49 event_values.each do |event|
  245. 245 ts_content << " #{event},"
  246. end
  247. 49 ts_content << "];"
  248. 49 ts_content << ""
  249. end
  250. # Return the TypeScript content as a string
  251. 7 ts_content.join("\n")
  252. end
  253. 1 private :generate_typescript
  254. 2 sig { returns(T::Hash[String, T::Hash[Symbol, T.untyped]]) }
  255. 1 def export_log_structs
  256. 9 result = {}
  257. # Get all log structs using reflection
  258. 9 T::Struct.subclasses
  259. 171 .select { |klass| klass.name.to_s.start_with?("LogStruct::Log::") }
  260. .each do |log_class|
  261. # Extract class name (e.g., "Request" from "LogStruct::Log::Request")
  262. 108 class_name = log_class.name.to_s.split("::").last
  263. # Export fields with their types
  264. 108 fields = {}
  265. 108 log_class.props.each do |field_name, prop_info|
  266. # Use http_method -> method conversion for Request
  267. 1458 field_key = field_name
  268. 1458 field_key = :method if field_name == :http_method && class_name == "Request"
  269. # Get type information
  270. 1458 type_info = extract_type_info(prop_info)
  271. # Add to fields
  272. 1458 fields[field_key] = type_info
  273. end
  274. # Add to result
  275. 108 result[class_name] = {fields: fields}
  276. end
  277. 9 result
  278. end
  279. 2 sig { params(prop_info: T::Hash[Symbol, T.untyped]).returns(T::Hash[Symbol, T.untyped]) }
  280. 1 def extract_type_info(prop_info)
  281. # Extract type information from prop_info
  282. 1465 type_obj = prop_info[:type]
  283. 1465 type_str = type_obj.to_s
  284. # Debug logging for complex types
  285. # if type_str.include?("T.any") || type_str.include?("SecurityEvent")
  286. # puts "Extracting type info for: #{type_str}"
  287. # puts "Type object class: #{type_obj.class}"
  288. # puts "Type object inspect: #{type_obj.inspect}"
  289. # end
  290. # Check for TypedHash specifically (handles metadata field correctly)
  291. 1465 if type_obj.is_a?(T::Types::TypedHash) || type_obj.instance_of?(::T::Types::TypedHash)
  292. 145 return {optional: prop_info[:_tnilable] || false, type: "object"}
  293. end
  294. # Check if this is optional (nilable)
  295. 1320 is_optional = type_str.include?("T.nilable")
  296. # Basic type information
  297. 1320 result = {optional: is_optional}
  298. # Check for direct enum values (single value restriction case)
  299. # For example: const :source, Source::Job, default: T.let(Source::Job, Source::Job)
  300. 1320 if type_obj.is_a?(T::Enum) || type_obj.class&.ancestors&.include?(T::Enum)
  301. # This is a direct reference to a specific enum value (not a type)
  302. # Extract the enum class and the specific value
  303. enum_class = type_obj.class
  304. enum_name = enum_class.name.to_s.split("::").last
  305. # Get the enum value name by finding which constant in the enum class has this value
  306. enum_value_name = T.let(nil, T.nilable(String))
  307. enum_class.constants.each do |const_name|
  308. if enum_class.const_get(const_name) == type_obj
  309. enum_value_name = const_name.to_s
  310. break
  311. end
  312. end
  313. # For example: LogStruct::Source::Job => { type: "enum_single", base_enum: "Source", enum_value: "Job" }
  314. result[:type] = "enum_single"
  315. result[:base_enum] = enum_name
  316. result[:enum_value] = enum_value_name
  317. return result
  318. # Check for T::Types::TEnum with a specific enum value
  319. 1320 elsif type_obj.is_a?(T::Types::TEnum) && type_str.include?("::") && !type_str.include?("T.nilable")
  320. # Handle specific enum types like LogStruct::Source::Job
  321. # The type string will look like "LogStruct::Source::Job"
  322. 127 parts = type_str.split("::")
  323. 127 if parts.size >= 3
  324. # Extract the enum name and specific value
  325. 127 enum_name = parts[-2]
  326. 127 enum_value_name = parts[-1]
  327. # For example: LogStruct::Source::Job => { type: "enum_single", base_enum: "Source", enum_value: "Job" }
  328. 127 result[:type] = "enum_single"
  329. 127 result[:base_enum] = enum_name
  330. 127 result[:enum_value] = enum_value_name
  331. 127 return result
  332. end
  333. # Detect union types (T.any) or type aliases
  334. 1193 elsif type_str.include?("T.any(") || type_str.include?("LogStruct::Log::")
  335. # First, try to extract the base enum type (Event, Level, Source)
  336. 82 base_enum = nil
  337. 82 enum_values = []
  338. # Check if it's a Event union type
  339. 82 if type_str.include?("Event::")
  340. 64 base_enum = "Event"
  341. 64 enum_module = LogStruct::Event
  342. 18 elsif type_str.include?("Level::")
  343. base_enum = "Level"
  344. enum_module = LogStruct::Level
  345. 18 elsif type_str.include?("Source::")
  346. base_enum = "Source"
  347. enum_module = LogStruct::Source
  348. end
  349. 82 if base_enum
  350. 64 result[:type] = "enum_union"
  351. 64 result[:base_enum] = base_enum
  352. # Try to parse values from the T.any(...) format for direct T.any usage
  353. 64 if type_str =~ /T\.any\(([^)]+)\)/
  354. 64 values_str = $1
  355. # Regex to extract enum constants like Event::IPSpoof
  356. 64 values_str.scan(/#{base_enum}::([A-Za-z0-9_]+)/) do |match|
  357. 318 enum_values << match.first
  358. end
  359. end
  360. # For type aliases like SecurityEvent, try to resolve the alias
  361. 64 if enum_values.empty? && type_str =~ /LogStruct::Log::([A-Za-z0-9_]+)::([A-Za-z0-9_]+Event)/
  362. log_class_name = $1
  363. type_alias_name = $2
  364. # Try to get the type alias from the log class
  365. log_class = begin
  366. Object.const_get("LogStruct::Log::#{log_class_name}")
  367. rescue
  368. nil
  369. end
  370. if log_class&.const_defined?(type_alias_name)
  371. # Try to resolve the type alias through the class hierarchy
  372. begin
  373. # Look at the type alias to extract the enum values
  374. # This is specific to LogStruct's enum pattern where the type alias is defined using T.any()
  375. # For this to work, we need to open up the class and extract the type alias content
  376. # Check if there are any constants in the Event module that have this value in their name
  377. enum_module.constants.each do |const_name|
  378. # Check if this constant is used in the type definition at all
  379. potential_match = "#{base_enum}::#{const_name}"
  380. if type_str.include?(potential_match)
  381. enum_values << const_name.to_s
  382. end
  383. end
  384. rescue => e
  385. # Log the error for debugging but continue with what we have
  386. puts "Error resolving type alias #{type_alias_name}: #{e.message}" if ENV["DEBUG"]
  387. end
  388. end
  389. end
  390. 64 result[:enum_values] = enum_values unless enum_values.empty?
  391. else
  392. # Handle other types of unions that aren't enum-based
  393. 18 result[:type] = "any"
  394. end
  395. # Standard type handling for simple types
  396. 1111 elsif type_str.include?("LogStruct::Level")
  397. 109 result[:type] = "enum"
  398. 109 result[:values] = "Level"
  399. 1002 elsif type_str.include?("LogStruct::Source")
  400. 27 result[:type] = "enum"
  401. 27 result[:values] = "Source"
  402. 975 elsif type_str.include?("LogStruct::Event")
  403. result[:type] = "enum"
  404. result[:values] = "Event"
  405. 975 elsif type_str.include?("T::Array") || type_str.include?("TypedArray") || (type_str == "T::Array[String]") || prop_info.key?(:array)
  406. 64 result[:type] = "array"
  407. # Get array item type if available
  408. 64 if prop_info[:array]
  409. 37 item_type = prop_info[:array].to_s
  410. 37 result[:item_type] = if item_type.include?("String")
  411. 37 "string"
  412. elsif item_type.include?("Integer")
  413. "integer"
  414. elsif item_type.include?("Float")
  415. "number"
  416. elsif item_type.include?("Boolean") || item_type.include?("TrueClass") || item_type.include?("FalseClass")
  417. "boolean"
  418. else
  419. "any"
  420. end
  421. end
  422. 911 elsif type_str.include?("String")
  423. 540 result[:type] = "string"
  424. 371 elsif type_str.include?("Integer")
  425. 99 result[:type] = "integer"
  426. 272 elsif type_str.include?("Float")
  427. 99 result[:type] = "number"
  428. 173 elsif type_str.include?("Boolean") || type_str.include?("TrueClass") || type_str.include?("FalseClass")
  429. 9 result[:type] = "boolean"
  430. 164 elsif type_str.include?("Time")
  431. 127 result[:type] = "string"
  432. 127 result[:format] = "date-time"
  433. 37 elsif type_str.include?("T::Hash")
  434. result[:type] = "object"
  435. # Could extract key/value types here if needed
  436. else
  437. 37 result[:type] = "any"
  438. end
  439. # Uncomment for debugging
  440. # puts "Detected type: #{result[:type]}"
  441. # puts "Enum values: #{result[:enum_values]}" if result[:enum_values]
  442. 1193 result
  443. end
  444. 2 sig { params(field_info: T::Hash[Symbol, T.untyped]).returns(String) }
  445. 1 def typescript_type_for(field_info)
  446. 1141 case field_info[:type]
  447. when "enum"
  448. 106 field_info[:values]
  449. when "enum_single"
  450. # Handle single enum value restriction
  451. # (e.g., const :source, Source::Job, default: T.let(Source::Job, Source::Job))
  452. 99 if field_info[:base_enum] && field_info[:enum_value]
  453. # Create a specific enum value reference like: Source.JOB
  454. 99 "#{field_info[:base_enum]}.#{field_info[:enum_value].upcase}"
  455. else
  456. # Fallback to the base enum if we couldn't extract the specific value
  457. field_info[:base_enum] || "any"
  458. end
  459. when "enum_union"
  460. # Handle union of enum values
  461. 50 if field_info[:base_enum] && field_info[:enum_values]
  462. # Create a union type like: Event.IP_SPOOF | Event.CSRF_VIOLATION | Event.BLOCKED_HOST
  463. 50 field_info[:enum_values].map do |value|
  464. # Get the Ruby enum object for the given value name (e.g., Event::IPSpoof)
  465. 248 enum_class = case field_info[:base_enum]
  466. 248 when "Event" then LogStruct::Event
  467. when "Level" then LogStruct::Level
  468. when "Source" then LogStruct::Source
  469. end
  470. 248 if enum_class
  471. # Look up the actual enum value to get its serialized form
  472. enum_value = begin
  473. 248 enum_class.const_get(value)
  474. rescue NameError
  475. nil
  476. end
  477. # Convert to TypeScript enum constant (serialized value -> uppercase)
  478. 248 serialized = enum_value&.serialize&.upcase || value.upcase
  479. 248 "#{field_info[:base_enum]}.#{serialized}"
  480. else
  481. # Fallback if we can't find the enum class
  482. "#{field_info[:base_enum]}.#{value.upcase}"
  483. end
  484. end.join(" | ")
  485. else
  486. # Fallback to the base enum if we couldn't extract specific values
  487. field_info[:base_enum] || "any"
  488. end
  489. when "string"
  490. 519 if field_info[:format] == "date-time"
  491. end
  492. 519 "string"
  493. when "integer", "number"
  494. 154 "number"
  495. when "boolean"
  496. 7 "boolean"
  497. when "array"
  498. 50 if field_info[:item_type]
  499. 29 "#{field_info[:item_type]}[]"
  500. else
  501. 21 "any[]"
  502. end
  503. when "object"
  504. 113 "Record<string, any>"
  505. else
  506. 43 "any"
  507. end
  508. end
  509. end
  510. end
  511. end
  512. # rubocop:enable Sorbet/ConstantsFromStrings