All Files ( 84.86% covered at 41.49 hits/line )
150 files in total.
5204 relevant lines,
4416 lines covered and
788 lines missed.
(
84.86%
)
# typed: strict
# frozen_string_literal: true
# Core library files
- 2
require "log_struct/sorbet"
- 2
require "log_struct/version"
- 2
require "log_struct/enums"
- 2
require "log_struct/configuration"
- 2
require "log_struct/formatter"
- 2
require "log_struct/railtie"
- 2
require "log_struct/concerns/error_handling"
- 2
require "log_struct/concerns/configuration"
- 2
require "log_struct/concerns/logging"
# Require integrations
- 2
require "log_struct/integrations"
# SemanticLogger integration - core feature for high-performance logging
- 2
require "log_struct/semantic_logger/formatter"
- 2
require "log_struct/semantic_logger/color_formatter"
- 2
require "log_struct/semantic_logger/logger"
- 2
require "log_struct/semantic_logger/setup"
- 2
require "log_struct/rails_boot_banner_silencer"
# Monkey patches for Rails compatibility
- 2
require "log_struct/monkey_patches/active_support/tagged_logging/formatter"
- 2
module LogStruct
- 2
extend T::Sig
- 2
@server_mode = T.let(false, T::Boolean)
- 2
class Error < StandardError; end
- 2
extend Concerns::ErrorHandling::ClassMethods
- 2
extend Concerns::Configuration::ClassMethods
- 2
extend Concerns::Logging::ClassMethods
- 4
sig { returns(T::Boolean) }
- 2
def self.server_mode?
- 26
@server_mode
end
- 3
sig { params(value: T::Boolean).void }
- 2
def self.server_mode=(value)
- 19
@server_mode = value
end
# Set enabled at require time based on current Rails environment.
# (Users can override this in their initializer which runs before the Railtie checks enabled)
- 2
set_enabled_from_rails_env!
# Silence Rails boot banners for cleaner server output
- 2
LogStruct::RailsBootBannerSilencer.install!
# Patch Puma immediately for server runs so we can convert its lifecycle
# messages into structured logs reliably.
- 2
if ARGV.include?("server")
begin
require "log_struct/integrations/puma"
LogStruct::Integrations::Puma.install_patches!
# Patches installed now; Rack handler patch covers server boot path
rescue => e
if defined?(::Rails) && ::Rails.respond_to?(:env) && ::Rails.env.test?
raise e
else
LogStruct.handle_exception(e, source: LogStruct::Source::Puma)
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
# Collects structured logs during very early boot before the logger is ready.
- 2
module BootBuffer
- 2
extend T::Sig
- 2
@@logs = T.let([], T::Array[LogStruct::Log::Interfaces::CommonFields])
- 4
sig { params(log: LogStruct::Log::Interfaces::CommonFields).void }
- 2
def self.add(log)
- 5
@@logs << log
end
- 4
sig { void }
- 2
def self.flush
- 4
return if @@logs.empty?
- 5
@@logs.each { |l| LogStruct.info(l) }
- 2
@@logs.clear
end
- 3
sig { void }
- 2
def self.clear
- 5
@@logs.clear
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require_relative "../configuration"
- 2
module LogStruct
- 2
module Concerns
# Concern for handling errors according to configured modes
- 2
module Configuration
- 2
module ClassMethods
- 2
extend T::Sig
- 2
SERVER_COMMAND_ARGS = T.let(["server", "s"].freeze, T::Array[String])
- 2
CONSOLE_COMMAND_ARGS = T.let(["console", "c"].freeze, T::Array[String])
- 2
EMPTY_ARGV = T.let([].freeze, T::Array[String])
- 2
CI_FALSE_VALUES = T.let(["false", "0", "no"].freeze, T::Array[String])
- 4
sig { params(block: T.proc.params(config: LogStruct::Configuration).void).void }
- 2
def configure(&block)
- 34
yield(config)
end
- 4
sig { returns(LogStruct::Configuration) }
- 2
def config
- 13927
LogStruct::Configuration.instance
end
# (Can't use alias_method since this module is extended into LogStruct)
- 3
sig { returns(LogStruct::Configuration) }
- 2
def configuration
- 58
config
end
# Setter method to replace the configuration (for testing purposes)
- 3
sig { params(config: LogStruct::Configuration).void }
- 2
def configuration=(config)
- 118
LogStruct::Configuration.set_instance(config)
end
- 4
sig { returns(T::Boolean) }
- 2
def enabled?
- 11
config.enabled
end
- 4
sig { void }
- 2
def set_enabled_from_rails_env!
# Set enabled based on current Rails environment and the LOGSTRUCT_ENABLED env var.
# Precedence:
# 1. Check if LOGSTRUCT_ENABLED env var is defined (not an empty string)
# - Sets enabled=true only when value is "true", "yes", "1", etc.
# - Sets enabled=false when value is any other value
# 2. Otherwise, check if current Rails environment is in enabled_environments
# AND one of: Rails::Server is defined, OR test environment with CI=true
# BUT NOT Rails::Console (to exclude interactive console)
# 3. Otherwise, leave as config.enabled (defaults to true)
# Then check if LOGSTRUCT_ENABLED env var is set
- 20
config.enabled = if ENV["LOGSTRUCT_ENABLED"]
- 5
%w[true t yes y 1].include?(ENV["LOGSTRUCT_ENABLED"]&.strip&.downcase)
else
- 15
is_console = console_process?
- 15
is_server = server_process?
- 15
ci_build?
- 15
in_enabled_env = config.enabled_environments.include?(::Rails.env.to_sym)
- 15
in_enabled_env && !is_console && (is_server || ::Rails.env.test?)
end
end
- 3
sig { returns(T::Boolean) }
- 2
def is_local?
- 1
config.local_environments.include?(::Rails.env.to_sym)
end
- 3
sig { returns(T::Boolean) }
- 2
def is_production?
- 1
!is_local?
end
- 4
sig { void }
- 2
def merge_rails_filter_parameters!
- 4
return unless ::Rails.application.config.respond_to?(:filter_parameters)
- 4
rails_filter_params = ::Rails.application.config.filter_parameters
- 4
return unless rails_filter_params.is_a?(Array)
- 4
return if rails_filter_params.empty?
- 3
symbol_filters = T.let([], T::Array[Symbol])
- 3
matchers = T.let([], T::Array[ConfigStruct::FilterMatcher])
- 3
leftovers = T.let([], T::Array[T.untyped])
- 3
rails_filter_params.each do |entry|
- 12
matcher = build_filter_matcher(entry)
- 12
if matcher
- 1
matchers << matcher
- 1
next
end
- 11
normalized_symbol = normalize_filter_symbol(entry)
- 11
if normalized_symbol
- 11
symbol_filters << normalized_symbol
else
leftovers << entry
end
end
- 3
if symbol_filters.any?
- 2
config.filters.filter_keys |= symbol_filters
end
- 3
if matchers.any?
- 1
matchers.each do |matcher|
- 1
existing = config.filters.filter_matchers.any? do |registered|
- 1
registered.label == matcher.label
end
- 1
config.filters.filter_matchers << matcher unless existing
end
end
- 3
replace_filter_parameters(rails_filter_params, leftovers)
end
- 2
private
- 3
sig { returns(T::Boolean) }
- 2
def console_process?
- 15
return true if defined?(::Rails::Console)
- 50
current_argv.any? { |arg| CONSOLE_COMMAND_ARGS.include?(arg) }
end
- 3
sig { returns(T::Boolean) }
- 2
def server_process?
- 15
return true if logstruct_server_mode?
- 52
current_argv.any? { |arg| SERVER_COMMAND_ARGS.include?(arg) }
end
- 3
sig { returns(T::Boolean) }
- 2
def logstruct_server_mode?
- 15
::LogStruct.server_mode?
end
- 3
sig { returns(T::Array[String]) }
- 2
def current_argv
- 28
raw = ::ARGV
- 102
strings = raw.map { |arg| arg.to_s }
- 28
T.let(strings, T::Array[String])
rescue NameError
EMPTY_ARGV
end
- 3
sig { returns(T::Boolean) }
- 2
def ci_build?
- 15
value = ENV["CI"]
- 15
return false if value.nil?
- 14
normalized = value.strip.downcase
- 14
return false if normalized.empty?
- 13
!CI_FALSE_VALUES.include?(normalized)
end
- 3
sig { params(filter: T.untyped).returns(T.nilable(Symbol)) }
- 2
def normalize_filter_symbol(filter)
- 11
return filter if filter.is_a?(Symbol)
- 2
return filter.downcase.to_sym if filter.is_a?(String)
return nil unless filter.respond_to?(:to_sym)
begin
sym = filter.to_sym
sym.is_a?(Symbol) ? sym : nil
rescue
nil
end
end
- 3
sig { params(filter: T.untyped).returns(T.nilable(ConfigStruct::FilterMatcher)) }
- 2
def build_filter_matcher(filter)
- 12
case filter
when ::Regexp
- 1
callable = Kernel.lambda do |key, _value|
- 1
filter.match?(key)
end
- 1
return ConfigStruct::FilterMatcher.new(callable: callable, label: filter.inspect)
else
- 11
return build_callable_filter_matcher(filter) if callable_filter?(filter)
end
- 11
nil
end
- 3
sig { params(filter: T.untyped).returns(T::Boolean) }
- 2
def callable_filter?(filter)
- 11
filter.respond_to?(:call)
end
- 2
sig { params(filter: T.untyped).returns(T.nilable(ConfigStruct::FilterMatcher)) }
- 2
def build_callable_filter_matcher(filter)
callable = Kernel.lambda do |key, value|
call_args = case arity_for_filter(filter)
when 0
[]
when 1
[key]
else
[key, value]
end
result = filter.call(*call_args)
!!result
rescue ArgumentError
begin
!!filter.call(key)
rescue => e
handle_filter_error(e, filter, key)
false
end
rescue => e
handle_filter_error(e, filter, key)
false
end
ConfigStruct::FilterMatcher.new(callable: callable, label: filter.inspect)
end
- 2
sig { params(filter: T.untyped).returns(Integer) }
- 2
def arity_for_filter(filter)
filter.respond_to?(:arity) ? filter.arity : 2
end
- 3
sig { params(filter_params: T::Array[T.untyped], leftovers: T::Array[T.untyped]).void }
- 2
def replace_filter_parameters(filter_params, leftovers)
- 3
filter_params.clear
- 3
filter_params.concat(leftovers)
end
- 2
sig { params(error: StandardError, filter: T.untyped, key: String).void }
- 2
def handle_filter_error(error, filter, key)
context = {
filter: filter.class.name,
key: key,
filter_label: begin
filter.inspect
rescue
"unknown"
end
}
LogStruct.handle_exception(error, source: Source::Internal, context: context)
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
- 2
module Concerns
# Concern for handling errors according to configured modes
- 2
module ErrorHandling
- 2
module ClassMethods
- 2
extend T::Sig
- 2
extend T::Helpers
# Needed for raise
- 2
requires_ancestor { Module }
# Get the error handling mode for a given source
- 3
sig { params(source: Source).returns(ErrorHandlingMode) }
- 2
def error_handling_mode_for(source)
- 9
config = LogStruct.config
# Use a case statement for type-safety
- 9
case source
when Source::TypeChecking
- 1
config.error_handling_modes.type_checking_errors
when Source::Internal
config.error_handling_modes.logstruct_errors
when Source::Security
config.error_handling_modes.security_errors
when Source::Rails, Source::App, Source::Job, Source::Storage, Source::Mailer,
Source::Shrine, Source::CarrierWave, Source::Sidekiq, Source::Dotenv, Source::Puma
- 8
config.error_handling_modes.standard_errors
else
# Ensures the case statement is exhaustive
T.absurd(source)
end
end
# Log an errors with structured data
- 3
sig { params(error: StandardError, source: Source, context: T.nilable(T::Hash[Symbol, T.untyped])).void }
- 2
def log_error(error, source:, context: nil)
# Create structured log entry
- 3
error_log = Log.from_exception(source, error, context || {})
- 3
LogStruct.error(error_log)
end
# Report an error using the configured handler or MultiErrorReporter
- 3
sig { params(error: StandardError, source: Source, context: T.nilable(T::Hash[Symbol, T.untyped])).void }
- 2
def log_and_report_error(error, source:, context: nil)
- 1
log_error(error, source: source, context: context)
- 1
error_handler = LogStruct.config.error_reporting_handler
- 1
if error_handler
# Use the configured handler
error_handler.call(error, context, source)
else
# Fall back to MultiErrorReporter (detects Sentry, Bugsnag, etc.)
- 1
LogStruct::MultiErrorReporter.report_error(error, context || {})
end
end
# Handle an error according to the configured error handling mode (log, report, raise, etc)
- 3
sig { params(error: StandardError, source: Source, context: T.nilable(T::Hash[Symbol, T.untyped])).void }
- 2
def handle_exception(error, source:, context: nil)
- 8
mode = error_handling_mode_for(source)
# Log / report in production, raise locally (dev/test)
- 8
if mode == ErrorHandlingMode::LogProduction || mode == ErrorHandlingMode::ReportProduction
- 3
raise(error) if !LogStruct.is_production?
end
- 6
case mode
when ErrorHandlingMode::Ignore
# Do nothing
when ErrorHandlingMode::Raise
- 2
raise(error)
when ErrorHandlingMode::Log, ErrorHandlingMode::LogProduction
- 2
log_error(error, source: source, context: context)
when ErrorHandlingMode::Report, ErrorHandlingMode::ReportProduction
- 1
log_and_report_error(error, source: source, context: context)
else
# Ensures the case statement is exhaustive
T.absurd(mode)
end
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require_relative "../log"
- 2
module LogStruct
- 2
module Concerns
# Concern for handling errors according to configured modes
- 2
module Logging
- 2
module ClassMethods
- 2
extend T::Sig
# Log a log struct at debug level
- 3
sig { params(log: T.any(Log::Interfaces::CommonFields, Log::Interfaces::PublicCommonFields)).void }
- 2
def debug(log)
- 1
Rails.logger.debug(log)
end
# Log a log struct at info level
- 4
sig { params(log: T.any(Log::Interfaces::CommonFields, Log::Interfaces::PublicCommonFields)).void }
- 2
def info(log)
- 16
Rails.logger.info(log)
end
# Log a log struct at warn level
- 3
sig { params(log: T.any(Log::Interfaces::CommonFields, Log::Interfaces::PublicCommonFields)).void }
- 2
def warn(log)
- 1
Rails.logger.warn(log)
end
# Log a log struct at error level
- 3
sig { params(log: T.any(Log::Interfaces::CommonFields, Log::Interfaces::PublicCommonFields)).void }
- 2
def error(log)
- 5
Rails.logger.error(log)
end
# Log a log struct at fatal level
- 3
sig { params(log: T.any(Log::Interfaces::CommonFields, Log::Interfaces::PublicCommonFields)).void }
- 2
def fatal(log)
- 1
Rails.logger.fatal(log)
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
- 2
module ConfigStruct
- 2
class ErrorHandlingModes < T::Struct
- 2
include Sorbet::SerializeSymbolKeys
# How to handle different types of errors
# Modes:
# - Ignore - Ignore the error
# - Log - Log the error
# - Report - Log and report to error tracking service (but don't crash)
# - LogProduction - Log error in production, raise locally (dev/test)
# - ReportProduction - Report error in production, raise locally (dev/test)
# - Raise - Always raise the error
# Configurable error handling categories
- 2
prop :type_checking_errors, ErrorHandlingMode, default: ErrorHandlingMode::LogProduction
- 2
prop :logstruct_errors, ErrorHandlingMode, default: ErrorHandlingMode::LogProduction
- 2
prop :security_errors, ErrorHandlingMode, default: ErrorHandlingMode::Report
- 2
prop :standard_errors, ErrorHandlingMode, default: ErrorHandlingMode::Raise
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
- 2
module ConfigStruct
- 2
class FilterMatcher < T::Struct
- 2
extend T::Sig
- 2
const :callable, T.proc.params(key: String, value: T.untyped).returns(T::Boolean)
- 2
const :label, String
- 3
sig { params(key: String, value: T.untyped).returns(T::Boolean) }
- 2
def matches?(key, value)
- 3036
callable.call(key, value)
end
end
- 2
class Filters < T::Struct
- 2
include Sorbet::SerializeSymbolKeys
# Keys that should be filtered in nested structures such as request params and job arguments.
# Filtered data includes information about Hashes and Arrays.
#
# { _filtered: {
# _class: "Hash", # Class of the filtered value
# _bytes: 1234, # Length of JSON string in bytes
# _keys_count: 3, # Number of keys in the hash
# _keys: [:key1, :key2, :key3], # First 10 keys in the hash
# }
# }
#
# Default: [:password, :password_confirmation, :pass, :pw, :token, :secret,
# :credentials, :creds, :auth, :authentication, :authorization]
#
- 2
prop :filter_keys,
T::Array[Symbol],
factory: -> {
- 50
%i[
password password_confirmation pass pw token secret
credentials auth authentication authorization
credit_card ssn social_security
]
}
# Keys where string values should include an SHA256 hash.
# Useful for tracing emails across requests (e.g. sign in, sign up) while protecting privacy.
# Default: [:email, :email_address]
- 2
prop :filter_keys_with_hashes,
T::Array[Symbol],
- 50
factory: -> { %i[email email_address] }
# Hash salt for SHA256 hashing (typically used for email addresses)
# Used for both param filters and string scrubbing
# Default: "l0g5t0p"
- 2
prop :hash_salt, String, default: "l0g5t0p"
# Hash length for SHA256 hashing (typically used for email addresses)
# Used for both param filters and string scrubbing
# Default: 12
- 2
prop :hash_length, Integer, default: 12
# Filter email addresses. Also controls email filtering for the ActionMailer integration
# (to, from, recipient fields, etc.)
# Default: true
- 2
prop :email_addresses, T::Boolean, default: true
# Filter URL passwords
# Default: true
- 2
prop :url_passwords, T::Boolean, default: true
# Filter credit card numbers
# Default: true
- 2
prop :credit_card_numbers, T::Boolean, default: true
# Filter phone numbers
# Default: true
- 2
prop :phone_numbers, T::Boolean, default: true
# Filter social security numbers
# Default: true
- 2
prop :ssns, T::Boolean, default: true
# Filter IP addresses
# Default: false
- 2
prop :ip_addresses, T::Boolean, default: false
# Filter MAC addresses
# Default: false
- 2
prop :mac_addresses, T::Boolean, default: false
# Additional filter matchers built from Rails filter_parameters entries that aren't simple symbols.
# Each matcher receives the key (String) and optional value, returning true when the pair should be filtered.
- 2
prop :filter_matchers,
T::Array[FilterMatcher],
- 50
factory: -> { [] }
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "active_support/notifications"
- 2
module LogStruct
- 2
module ConfigStruct
- 2
class Integrations < T::Struct
- 2
include Sorbet::SerializeSymbolKeys
# Enable or disable Sorbet error handler integration
# Default: true
- 2
prop :enable_sorbet_error_handlers, T::Boolean, default: true
# Enable or disable Lograge integration
# Default: true
- 2
prop :enable_lograge, T::Boolean, default: true
# Custom options for Lograge
# Default: nil
- 2
prop :lograge_custom_options, T.nilable(Handlers::LogrageCustomOptions), default: nil
# Enable or disable ActionMailer integration
# Default: true
- 2
prop :enable_actionmailer, T::Boolean, default: true
# Map instance variables on mailer to ID fields in additional_data
# Default: { account: :account_id, user: :user_id }
# Example: { organization: :org_id, company: :company_id }
- 53
prop :actionmailer_id_mapping, T::Hash[Symbol, Symbol], factory: -> { {account: :account_id, user: :user_id} }
# Enable or disable host authorization logging
# Default: true
- 2
prop :enable_host_authorization, T::Boolean, default: true
# Enable or disable ActiveJob integration
# Default: true
- 2
prop :enable_activejob, T::Boolean, default: true
# Enable or disable Rack middleware
# Default: true
- 2
prop :enable_rack_error_handler, T::Boolean, default: true
# Enable or disable Sidekiq integration
# Default: true
- 2
prop :enable_sidekiq, T::Boolean, default: true
# Enable or disable Shrine integration
# Default: true
- 2
prop :enable_shrine, T::Boolean, default: true
# Enable or disable ActiveStorage integration
# Default: true
- 2
prop :enable_activestorage, T::Boolean, default: true
# Enable or disable CarrierWave integration
# Default: true
- 2
prop :enable_carrierwave, T::Boolean, default: true
# Enable or disable GoodJob integration
# Default: true
- 2
prop :enable_goodjob, T::Boolean, default: true
# Enable SemanticLogger integration for high-performance logging
# Default: true
- 2
prop :enable_semantic_logger, T::Boolean, default: true
# Enable SQL query logging through ActiveRecord instrumentation
# Default: false (can be resource intensive)
- 2
prop :enable_sql_logging, T::Boolean, default: false
# Only log SQL queries slower than this threshold (in milliseconds)
# Set to 0 or nil to log all queries
# Default: 100.0 (log queries taking >100ms)
- 2
prop :sql_slow_query_threshold, T.nilable(Float), default: 100.0
# Include bind parameters in SQL logs (disable in production for security)
# Default: true in development/test, false in production
- 53
prop :sql_log_bind_params, T::Boolean, factory: -> { !defined?(::Rails) || !::Rails.respond_to?(:env) || !::Rails.env.production? }
# Enable Ahoy (analytics events) integration
# Default: true (safe no-op unless Ahoy is defined)
- 2
prop :enable_ahoy, T::Boolean, default: true
# Enable ActiveModelSerializers integration
# Default: true (safe no-op unless ActiveModelSerializers is defined)
- 2
prop :enable_active_model_serializers, T::Boolean, default: true
# Enable dotenv-rails integration (convert to structured logs)
# Default: true
- 2
prop :enable_dotenv, T::Boolean, default: true
# Enable Puma integration (convert server lifecycle logs)
# Default: true
- 2
prop :enable_puma, T::Boolean, default: true
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require_relative "handlers"
- 2
require_relative "config_struct/error_handling_modes"
- 2
require_relative "config_struct/integrations"
- 2
require_relative "config_struct/filters"
- 2
module LogStruct
# Core configuration class that provides a type-safe API
- 2
class Configuration < T::Struct
- 2
extend T::Sig
- 2
include Sorbet::SerializeSymbolKeys
# -------------------------------------------------------------------------------------
# Props
# -------------------------------------------------------------------------------------
- 2
prop :enabled, T::Boolean, default: true
- 53
prop :enabled_environments, T::Array[Symbol], factory: -> { [:test, :production] }
- 53
prop :local_environments, T::Array[Symbol], factory: -> { [:development, :test] }
# Prefer production-style JSON in development when LogStruct is enabled
- 2
prop :prefer_json_in_development, T::Boolean, default: true
# Enable colorful human formatter in development
- 2
prop :enable_color_output, T::Boolean, default: true
# Custom color map for the color formatter
- 2
prop :color_map, T.nilable(T::Hash[Symbol, Symbol]), default: nil
# Filter noisy loggers (ActionView, etc.)
- 2
prop :filter_noisy_loggers, T::Boolean, default: false
- 53
const :integrations, ConfigStruct::Integrations, factory: -> { ConfigStruct::Integrations.new }
- 41
const :filters, ConfigStruct::Filters, factory: -> { ConfigStruct::Filters.new }
# Custom log scrubbing handler for any additional string scrubbing
# Default: nil
- 2
prop :string_scrubbing_handler, T.nilable(Handlers::StringScrubber)
# Custom handler for error reporting
# Default: Errors are handled by MultiErrorReporter
# (auto-detects Sentry, Bugsnag, Rollbar, Honeybadger, etc.)
- 2
prop :error_reporting_handler, T.nilable(Handlers::ErrorReporter), default: nil
# How to handle errors from various sources
- 2
const :error_handling_modes,
ConfigStruct::ErrorHandlingModes,
factory: -> {
- 51
ConfigStruct::ErrorHandlingModes.new
}
# -------------------------------------------------------------------------------------
# Class Methods
# -------------------------------------------------------------------------------------
# Class‐instance variable
- 2
@instance = T.let(nil, T.nilable(Configuration))
- 4
sig { returns(Configuration) }
- 2
def self.instance
- 13937
@instance ||= T.let(Configuration.new, T.nilable(Configuration))
end
- 3
sig { params(config: Configuration).void }
- 2
def self.set_instance(config)
- 118
@instance = config
end
end
end
# typed: strict
# frozen_string_literal: true
# Require all enums in this directory
- 2
require_relative "enums/error_handling_mode"
- 2
require_relative "enums/error_reporter"
- 2
require_relative "enums/event"
- 2
require_relative "enums/level"
- 2
require_relative "enums/source"
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
# Enum for error handling modes
- 2
class ErrorHandlingMode < T::Enum
- 2
enums do
# Always ignore the error
- 2
Ignore = new(:ignore)
# Always log the error
- 2
Log = new(:log)
# Always report to tracking service and continue
- 2
Report = new(:report)
# Log in production, raise locally (dev/test)
- 2
LogProduction = new(:log_production)
# Report in production, raise locally (dev/test)
- 2
ReportProduction = new(:report_production)
# Always raise regardless of environment
- 2
Raise = new(:raise)
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
- 2
class ErrorReporter < T::Enum
- 2
enums do
- 2
RailsLogger = new(:rails_logger)
- 2
Sentry = new(:sentry)
- 2
Bugsnag = new(:bugsnag)
- 2
Rollbar = new(:rollbar)
- 2
Honeybadger = new(:honeybadger)
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
# Define log event types as an enum
- 2
class Event < T::Enum
- 2
enums do
# Plain log messages
- 2
Log = new(:log)
# Request events
- 2
Request = new(:request)
# Job events
- 2
Enqueue = new(:enqueue)
- 2
Schedule = new(:schedule)
- 2
Start = new(:start)
- 2
Finish = new(:finish)
# File storage events (ActiveStorage, Shrine, CarrierWave, etc.)
- 2
Upload = new(:upload)
- 2
Download = new(:download)
- 2
Delete = new(:delete)
- 2
Metadata = new(:metadata)
- 2
Exist = new(:exist)
- 2
Stream = new(:stream)
- 2
Url = new(:url)
# Data generation events
- 2
Generate = new(:generate)
# Email events
- 2
Delivery = new(:delivery)
- 2
Delivered = new(:delivered)
# Configuration / boot events
- 2
Load = new(:load)
- 2
Update = new(:update)
- 2
Save = new(:save)
- 2
Restore = new(:restore)
# Server lifecycle (e.g., Puma)
# Start already defined above
- 2
Shutdown = new(:shutdown)
# Security events
- 2
IPSpoof = new(:ip_spoof)
- 2
CSRFViolation = new(:csrf_violation)
- 2
BlockedHost = new(:blocked_host)
# Database events
- 2
Database = new(:database)
# Error events
- 2
Error = new(:error)
# Fallback
- 2
Unknown = new(:unknown)
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "logger"
- 2
module LogStruct
# Define log levels as an enum
- 2
class Level < T::Enum
- 2
extend T::Sig
- 2
enums do
# Standard log levels
- 2
Debug = new(:debug)
- 2
Info = new(:info)
- 2
Warn = new(:warn)
- 2
Error = new(:error)
- 2
Fatal = new(:fatal)
- 2
Unknown = new(:unknown)
end
# Convert a Level to the corresponding Logger integer constant
- 3
sig { returns(Integer) }
- 2
def to_severity_int
- 6
case serialize
- 1
when :debug then ::Logger::DEBUG
- 1
when :info then ::Logger::INFO
- 1
when :warn then ::Logger::WARN
- 1
when :error then ::Logger::ERROR
- 1
when :fatal then ::Logger::FATAL
- 1
else ::Logger::UNKNOWN
end
end
# Convert a string or integer severity to a Level
- 4
sig { params(severity: T.any(String, Symbol, Integer, NilClass)).returns(Level) }
- 2
def self.from_severity(severity)
- 899
return Unknown if severity.nil?
- 898
return from_severity_int(severity) if severity.is_a?(Integer)
- 887
from_severity_sym(severity.downcase.to_sym)
end
- 4
sig { params(severity: Symbol).returns(Level) }
- 2
def self.from_severity_sym(severity)
- 887
case severity.to_s.downcase.to_sym
- 15
when :debug then Debug
- 834
when :info then Info
- 8
when :warn then Warn
- 21
when :error then Error
- 6
when :fatal then Fatal
- 3
else Unknown
end
end
- 3
sig { params(severity: Integer).returns(Level) }
- 2
def self.from_severity_int(severity)
- 11
case severity
- 1
when ::Logger::DEBUG then Debug
- 5
when ::Logger::INFO then Info
- 1
when ::Logger::WARN then Warn
- 1
when ::Logger::ERROR then Error
- 1
when ::Logger::FATAL then Fatal
- 2
else Unknown
end
end
end
end
# typed: strict
# frozen_string_literal: true
# NOTE:
# - This enum defines human‑readable field names (constants) that map to compact
# JSON key symbols via `serialize` (e.g., Database => :db).
# - The enum constant names are code‑generated into
# `schemas/meta/log-fields.json` by `scripts/generate_structs.rb` and
# referenced from `schemas/meta/log-source-schema.json` to strictly validate
# field keys in `schemas/log_sources/*`.
# - When adding or renaming fields here, run the generator so schema validation
# stays in sync.
#
# Use human-readable field names as the enum values and short field names for the JSON properties
- 2
module LogStruct
- 2
class LogField < T::Enum
- 2
enums do
# Shared fields
- 2
Source = new(:src)
- 2
Event = new(:evt)
- 2
Timestamp = new(:ts)
- 2
Level = new(:lvl)
# Common fields
- 2
Message = new(:msg)
- 2
Data = new(:data)
# Request-related fields
- 2
Path = new(:path)
- 2
HttpMethod = new(:method) # property name was http_method
- 2
SourceIp = new(:source_ip)
- 2
UserAgent = new(:user_agent)
- 2
Referer = new(:referer)
- 2
RequestId = new(:request_id)
# HTTP-specific fields
- 2
Format = new(:format)
- 2
Controller = new(:controller)
- 2
Action = new(:action)
- 2
Status = new(:status)
# DurationMs already defined below for general metrics
- 2
View = new(:view)
- 2
Database = new(:db)
- 2
Params = new(:params)
# Security-specific fields
- 2
BlockedHost = new(:blocked_host)
- 2
BlockedHosts = new(:blocked_hosts)
- 2
AllowedHosts = new(:allowed_hosts)
- 2
AllowIpHosts = new(:allow_ip_hosts)
- 2
ClientIp = new(:client_ip)
- 2
XForwardedFor = new(:x_forwarded_for)
# Email-specific fields
- 2
To = new(:to)
- 2
From = new(:from)
- 2
Subject = new(:subject)
- 2
MessageId = new(:msg_id)
- 2
MailerClass = new(:mailer)
- 2
MailerAction = new(:mailer_action)
- 2
AttachmentCount = new(:attachments)
# Error fields
- 2
ErrorClass = new(:error_class)
- 2
Backtrace = new(:backtrace)
# Job-specific fields
- 2
JobId = new(:job_id)
- 2
JobClass = new(:job_class)
- 2
QueueName = new(:queue_name)
- 2
Arguments = new(:arguments)
- 2
RetryCount = new(:retry_count)
- 2
Retries = new(:retries)
- 2
Attempt = new(:attempt)
- 2
Executions = new(:executions)
- 2
ExceptionExecutions = new(:exception_executions)
- 2
ProviderJobId = new(:provider_job_id)
- 2
ScheduledAt = new(:scheduled_at)
- 2
StartedAt = new(:started_at)
- 2
FinishedAt = new(:finished_at)
- 2
DurationMs = new(:duration_ms)
- 2
WaitMs = new(:wait_ms)
# Deprecated: ExecutionTime/WaitTime/RunTime
- 2
ExecutionTime = new(:execution_time)
- 2
WaitTime = new(:wait_time)
- 2
RunTime = new(:run_time)
- 2
Priority = new(:priority)
- 2
CronKey = new(:cron_key)
- 2
ErrorMessage = new(:error_message)
- 2
Result = new(:result)
- 2
EnqueueCaller = new(:enqueue_caller)
# Dotenv fields
- 2
File = new(:file)
- 2
Vars = new(:vars)
- 2
Snapshot = new(:snapshot)
# Sidekiq-specific fields
- 2
ProcessId = new(:pid)
- 2
ThreadId = new(:tid)
- 2
Context = new(:ctx)
# Storage-specific fields (ActiveStorage)
- 2
Checksum = new(:checksum)
- 2
Exist = new(:exist)
- 2
Url = new(:url)
- 2
Prefix = new(:prefix)
- 2
Range = new(:range)
# Storage-specific fields (Shrine)
- 2
Storage = new(:storage)
- 2
Operation = new(:op)
- 2
FileId = new(:file_id)
- 2
Filename = new(:filename)
- 2
MimeType = new(:mime_type)
- 2
Size = new(:size)
- 2
Metadata = new(:metadata)
- 2
Location = new(:location)
- 2
UploadOptions = new(:upload_opts)
- 2
DownloadOptions = new(:download_opts)
- 2
Options = new(:opts)
- 2
Uploader = new(:uploader)
# CarrierWave-specific fields
- 2
Model = new(:model)
- 2
MountPoint = new(:mount_point)
- 2
Version = new(:version)
- 2
StorePath = new(:store_path)
- 2
Extension = new(:ext)
# SQL-specific fields
- 2
Sql = new(:sql)
- 2
Name = new(:name)
- 2
RowCount = new(:row_count)
# Use Adapter for both AMS and SQL adapter name
- 2
BindParams = new(:bind_params)
- 2
DatabaseName = new(:db_name)
- 2
ConnectionPoolSize = new(:pool_size)
- 2
ActiveConnections = new(:active_count)
- 2
OperationType = new(:op_type)
- 2
TableNames = new(:table_names)
# ActiveModelSerializers fields
- 2
Serializer = new(:serializer)
- 2
Adapter = new(:adapter)
- 2
ResourceClass = new(:resource_class)
# Ahoy-specific fields
- 2
AhoyEvent = new(:ahoy_event)
- 2
Properties = new(:properties)
# Puma / server lifecycle fields
- 2
Mode = new(:mode)
- 2
PumaVersion = new(:puma_version)
- 2
PumaCodename = new(:puma_codename)
- 2
RubyVersion = new(:ruby_version)
- 2
MinThreads = new(:min_threads)
- 2
MaxThreads = new(:max_threads)
- 2
Environment = new(:environment)
- 2
ListeningAddresses = new(:listening_addresses)
- 2
Address = new(:addr)
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
# Combined Source class that unifies log and error sources
- 2
class Source < T::Enum
- 2
enums do
# Error sources
- 2
TypeChecking = new(:type_checking) # For type checking errors (Sorbet)
- 2
Security = new(:security) # Security-related events
# Errors from LogStruct. (Cannot use LogStruct here because it confuses tapioca.)
- 2
Internal = new(:logstruct)
# Application sources
- 2
Rails = new(:rails) # For request-related logs/errors
- 2
Job = new(:job) # ActiveJob logs/errors
- 2
Storage = new(:storage) # ActiveStorage logs/errors
- 2
Mailer = new(:mailer) # ActionMailer logs/errors
- 2
App = new(:app) # General application logs/errors
# Third-party gem sources
- 2
Shrine = new(:shrine)
- 2
CarrierWave = new(:carrierwave)
- 2
Sidekiq = new(:sidekiq)
- 2
Dotenv = new(:dotenv)
- 2
Puma = new(:puma)
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "logger"
- 2
require "active_support/core_ext/object/blank"
- 2
require "json"
- 2
require "globalid"
- 2
require_relative "enums/source"
- 2
require_relative "enums/event"
- 2
require_relative "string_scrubber"
- 2
require_relative "log"
- 2
require_relative "param_filters"
- 2
require_relative "multi_error_reporter"
- 2
module LogStruct
- 2
class Formatter < ::Logger::Formatter
- 2
extend T::Sig
# Add current_tags method to support ActiveSupport::TaggedLogging
- 3
sig { returns(T::Array[String]) }
- 2
def current_tags
- 7
Thread.current[:activesupport_tagged_logging_tags] ||= []
end
# Add tagged method to support ActiveSupport::TaggedLogging
- 3
sig { params(tags: T::Array[String], blk: T.proc.params(formatter: Formatter).void).returns(T.untyped) }
- 2
def tagged(*tags, &blk)
- 1
new_tags = tags.flatten
- 1
current_tags.concat(new_tags) if new_tags.any?
- 1
yield self
ensure
- 1
current_tags.pop(new_tags.size) if new_tags&.any?
end
# Add clear_tags! method to support ActiveSupport::TaggedLogging
- 3
sig { void }
- 2
def clear_tags!
- 1
Thread.current[:activesupport_tagged_logging_tags] = []
end
- 2
sig { params(tags: T::Array[String]).returns(T.untyped) }
- 2
def push_tags(*tags)
current_tags.concat(tags)
end
- 4
sig { params(string: String).returns(String) }
- 2
def scrub_string(string)
# Use StringScrubber module to scrub sensitive information from strings
- 4413
StringScrubber.scrub(string)
end
- 4
sig { params(arg: T.untyped, recursion_depth: Integer).returns(T.untyped) }
- 2
def process_values(arg, recursion_depth: 0)
# Prevent infinite recursion in case any args have circular references
# or are too deeply nested. Just return args.
- 5525
return arg if recursion_depth > 20
- 5523
case arg
when Hash
- 916
result = {}
# Process each key-value pair
- 916
arg.each do |key, value|
# Check if this key should be filtered at any depth
- 4576
result[key] = if ParamFilters.should_filter_key?(key, value)
# Filter the value
- 2
{_filtered: ParamFilters.summarize_json_attribute(key, value)}
else
# Process the value normally
- 4574
process_values(value, recursion_depth: recursion_depth + 1)
end
end
- 916
result
when Array
- 26
process_array(arg, recursion_depth: recursion_depth)
when GlobalID::Identification
begin
- 5
arg.to_global_id
rescue
begin
- 1
case arg
when ActiveRecord::Base
"#{arg.class}(##{arg.id})"
else
# For non-ActiveRecord objects that failed to_global_id, try to get a string representation
# If this also fails, we want to catch it and return the error placeholder
String(T.cast(arg, Object))
end
rescue => e
- 1
LogStruct.handle_exception(e, source: Source::Internal)
- 1
"[GLOBALID_ERROR]"
end
end
when Source, Event
arg.serialize
when String
- 4413
scrub_string(arg)
when Time
arg.iso8601(3)
else
# Any other type (e.g. Symbol, Integer, Float, Boolean etc.)
- 163
arg
end
rescue => e
# Report error through LogStruct's framework
context = {
processor_method: "process_values",
value_type: arg.class.name,
recursion_depth: recursion_depth
}
LogStruct.handle_exception(e, source: Source::Internal, context: context)
arg
end
- 4
sig { params(log_value: T.untyped, time: Time).returns(T::Hash[Symbol, T.untyped]) }
- 2
def log_value_to_hash(log_value, time:)
- 883
case log_value
when Log::Interfaces::CommonFields
# Our log classes all implement a custom #serialize method that use symbol keys
- 848
log_value.serialize
when T::Struct
# Default T::Struct.serialize methods returns a hash with string keys, so convert them to symbols
- 2
log_value.serialize.deep_symbolize_keys
when Hash
# Use hash as is and convert string keys to symbols
- 25
log_value.dup.deep_symbolize_keys
else
# Create a Plain log with the message as a string and serialize it with symbol keys
# log_value can be literally anything: Integer, Float, Boolean, NilClass, etc.
- 8
log_message = case log_value
# Handle all the basic types without any further processing
when String, Symbol, TrueClass, FalseClass, NilClass, Array, Hash, Time, Numeric
- 5
log_value
else
# Handle the serialization of complex objects in a useful way:
#
# 1. For ActiveRecord models: Use as_json which includes attributes
# 2. For objects with custom as_json implementations: Use their implementation
# 3. For basic objects that only have ActiveSupport's as_json: Use to_s
begin
- 3
method_owner = log_value.method(:as_json).owner
# If it's ActiveRecord, ActiveModel, or a custom implementation, use as_json
- 2
if method_owner.to_s.include?("ActiveRecord") ||
method_owner.to_s.include?("ActiveModel") ||
method_owner.to_s.exclude?("ActiveSupport::CoreExtensions") &&
method_owner.to_s.exclude?("Object")
- 1
log_value.as_json
else
# For plain objects with only the default ActiveSupport as_json
- 1
log_value.to_s
end
rescue => e
# Handle serialization errors
context = {
- 1
object_class: log_value.class.name,
object_inspect: log_value.inspect.truncate(100)
}
- 1
LogStruct.handle_exception(e, source: Source::Internal, context: context)
# Fall back to the string representation to ensure we continue processing
- 1
log_value.to_s
end
end
- 8
Log::Plain.new(
message: log_message,
timestamp: time
).serialize
end
end
# Serializes Log (or string) into JSON
- 4
sig { params(severity: T.any(String, Symbol, Integer), time: Time, progname: T.nilable(String), log_value: T.untyped).returns(String) }
- 2
def call(severity, time, progname, log_value)
- 876
level_enum = Level.from_severity(severity)
- 876
data = log_value_to_hash(log_value, time: time)
# Filter params, scrub sensitive values, format ActiveJob GlobalID arguments
- 876
data = process_values(data)
# Add standard fields if not already present
- 876
data[:src] ||= Source::App
- 876
data[:evt] ||= Event::Log
- 876
data[:ts] ||= time.iso8601(3)
- 876
data[:lvl] = level_enum # Set level from severity parameter
- 876
data[:prog] = progname if progname.present?
- 876
generate_json(data)
end
# Output as JSON with a newline. We mock this method in tests so we can
# inspect the data right before it gets turned into a JSON string.
- 4
sig { params(data: T::Hash[T.untyped, T.untyped]).returns(String) }
- 2
def generate_json(data)
- 877
"#{data.to_json}\n"
end
- 4
sig { params(array: T::Array[T.untyped], recursion_depth: Integer).returns(T::Array[T.untyped]) }
- 2
def process_array(array, recursion_depth:)
- 26
return [] if array.empty?
- 24
if looks_like_backtrace_array?(array)
- 20
array.map { |value| process_values(value, recursion_depth: recursion_depth + 1) }
else
- 20
processed = []
- 20
array.each_with_index do |value, index|
- 54
break if index >= 10
- 52
processed << process_values(value, recursion_depth: recursion_depth + 1)
end
- 20
if array.size > 10
- 2
processed << "... and #{array.size - 10} more items"
end
- 20
processed
end
end
# Check if an array looks like a backtrace (array of strings with file:line pattern)
- 3
sig { params(array: T::Array[T.untyped]).returns(T::Boolean) }
- 2
def looks_like_backtrace_array?(array)
- 24
backtrace_like_count = array.first(5).count do |element|
- 58
element.is_a?(String) && element.match?(/\A[^:\s]+:\d+/)
end
- 24
backtrace_like_count >= 3
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
# Module for custom handlers used throughout the library
- 2
module Handlers
# Type for Lograge custom options
- 2
LogrageCustomOptions = T.type_alias {
- 2
T.proc.params(
event: ActiveSupport::Notifications::Event,
options: T::Hash[Symbol, T.untyped]
).returns(T.untyped)
}
# Type for error reporting handlers
- 2
ErrorReporter = T.type_alias {
- 2
T.proc.params(
error: StandardError,
context: T.nilable(T::Hash[Symbol, T.untyped]),
source: Source
).void
}
# Type for string scrubbing handlers
- 4
StringScrubber = T.type_alias { T.proc.params(string: String).returns(String) }
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "digest"
- 2
module LogStruct
# Utility module for hashing sensitive data
- 2
module HashUtils
- 2
class << self
- 2
extend T::Sig
# Create a hash of a string value for tracing while preserving privacy
- 3
sig { params(value: String).returns(String) }
- 2
def hash_value(value)
- 16
salt = LogStruct.config.filters.hash_salt
- 16
length = LogStruct.config.filters.hash_length
- 16
Digest::SHA256.hexdigest("#{salt}#{value}")[0...length] || "error"
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require_relative "integrations/integration_interface"
- 2
require_relative "integrations/active_job"
- 2
require_relative "integrations/active_record"
- 2
require_relative "integrations/rack_error_handler"
- 2
require_relative "integrations/host_authorization"
- 2
require_relative "integrations/action_mailer"
- 2
require_relative "integrations/lograge"
- 2
require_relative "integrations/shrine"
- 2
require_relative "integrations/sidekiq"
- 2
require_relative "integrations/good_job"
- 2
require_relative "integrations/active_storage"
- 2
require_relative "integrations/carrierwave"
- 2
require_relative "integrations/sorbet"
- 2
require_relative "integrations/ahoy"
- 2
require_relative "integrations/active_model_serializers"
- 2
require_relative "integrations/dotenv"
- 2
require_relative "integrations/puma"
- 2
module LogStruct
- 2
module Integrations
- 2
extend T::Sig
# Register generic initializers on the Railtie to keep integration
# wiring centralized (boot replay interception and resolution).
- 4
sig { params(railtie: T.untyped).void }
- 2
def self.setup_initializers(railtie)
# Intercept any boot-time replays (e.g., dotenv) before those railties run
- 2
railtie.initializer "logstruct.intercept_boot_replays", before: "dotenv" do
- 2
LogStruct::Integrations::Dotenv.intercept_logger_setter!
end
# Decide which set of boot logs to emit after user initializers
- 2
railtie.initializer "logstruct.resolve_boot_logs", after: :load_config_initializers do
- 2
LogStruct::Integrations::Dotenv.resolve_boot_logs!
end
end
- 4
sig { params(stage: Symbol).void }
- 2
def self.setup_integrations(stage: :all)
- 4
config = LogStruct.config
- 4
case stage
when :non_middleware
- 2
setup_non_middleware_integrations(config)
when :middleware
- 2
setup_middleware_integrations(config)
when :all
setup_non_middleware_integrations(config)
setup_middleware_integrations(config)
else
raise ArgumentError, "Unknown integration stage: #{stage}"
end
end
- 4
sig { params(config: LogStruct::Configuration).void }
- 2
def self.setup_non_middleware_integrations(config)
- 2
Integrations::Lograge.setup(config) if config.integrations.enable_lograge
- 2
Integrations::ActionMailer.setup(config) if config.integrations.enable_actionmailer
- 2
Integrations::ActiveJob.setup(config) if config.integrations.enable_activejob
- 2
Integrations::ActiveRecord.setup(config) if config.integrations.enable_sql_logging
- 2
Integrations::Sidekiq.setup(config) if config.integrations.enable_sidekiq
- 2
Integrations::GoodJob.setup(config) if config.integrations.enable_goodjob
- 2
Integrations::Ahoy.setup(config) if config.integrations.enable_ahoy
- 2
Integrations::ActiveModelSerializers.setup(config) if config.integrations.enable_active_model_serializers
- 2
Integrations::Shrine.setup(config) if config.integrations.enable_shrine
- 2
Integrations::ActiveStorage.setup(config) if config.integrations.enable_activestorage
- 2
Integrations::CarrierWave.setup(config) if config.integrations.enable_carrierwave
- 2
Integrations::Sorbet.setup(config) if config.integrations.enable_sorbet_error_handlers
- 2
if config.enabled && config.integrations.enable_dotenv
- 2
Integrations::Dotenv.setup(config)
end
- 2
Integrations::Puma.setup(config) if config.integrations.enable_puma
end
- 4
sig { params(config: LogStruct::Configuration).void }
- 2
def self.setup_middleware_integrations(config)
- 2
Integrations::HostAuthorization.setup(config) if config.integrations.enable_host_authorization
- 2
Integrations::RackErrorHandler.setup(config) if config.integrations.enable_rack_error_handler
end
- 2
private_class_method :setup_non_middleware_integrations, :setup_middleware_integrations
end
end
# typed: strict
# frozen_string_literal: true
begin
- 2
require "action_mailer"
rescue LoadError
# actionmailer gem is not available, integration will be skipped
end
- 2
if defined?(::ActionMailer)
- 2
require "logger"
- 2
require_relative "action_mailer/metadata_collection"
- 2
require_relative "action_mailer/event_logging"
- 2
require_relative "action_mailer/error_handling"
end
- 2
module LogStruct
- 2
module Integrations
# ActionMailer integration for structured logging
- 2
module ActionMailer
- 2
extend T::Sig
- 2
extend IntegrationInterface
# Set up ActionMailer structured logging
- 4
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def self.setup(config)
- 9
return nil unless defined?(::ActionMailer)
- 9
return nil unless config.enabled
- 9
return nil unless config.integrations.enable_actionmailer
# Silence default ActionMailer logs (we use our own structured logging)
# This is required because we replace the logging using our own callbacks
- 8
if defined?(::ActionMailer::Base)
- 8
::ActionMailer::Base.logger = ::Logger.new(File::NULL)
end
# Register our custom observers and handlers
# Registering these at the class level means all mailers will use them
- 8
ActiveSupport.on_load(:action_mailer) do
- 8
prepend LogStruct::Integrations::ActionMailer::EventLogging
- 8
prepend LogStruct::Integrations::ActionMailer::ErrorHandling
- 8
prepend LogStruct::Integrations::ActionMailer::MetadataCollection
end
# If ActionMailer::Base is already loaded, the on_load hooks won't run
# So we need to apply the modules directly
- 8
if defined?(::ActionMailer::Base)
- 8
::ActionMailer::Base.prepend(LogStruct::Integrations::ActionMailer::EventLogging)
- 8
::ActionMailer::Base.prepend(LogStruct::Integrations::ActionMailer::ErrorHandling)
- 8
::ActionMailer::Base.prepend(LogStruct::Integrations::ActionMailer::MetadataCollection)
end
- 8
true
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
- 2
module Integrations
- 2
module ActionMailer
# Handles error handling for ActionMailer
#
# IMPORTANT LIMITATIONS:
# 1. This module must be included BEFORE users define rescue_from handlers
# to ensure proper handler precedence (user handlers are checked first)
# 2. Rails rescue_from handlers don't bubble to parent class handlers after reraise
# 3. Handler order matters: Rails checks rescue_from handlers in reverse declaration order
- 2
module ErrorHandling
- 2
extend T::Sig
- 2
extend ActiveSupport::Concern
- 3
sig { returns(T.nilable(T::Boolean)) }
- 2
attr_accessor :logstruct_mail_failed
# NOTE: rescue_from handlers are checked in reverse order of declaration.
# We want LogStruct handlers to be checked AFTER user handlers (lower priority),
# so we need to add them BEFORE user handlers are declared.
# This will be called when the module is included/prepended
- 4
sig { params(base: T.untyped).void }
- 2
def self.install_handler(base)
# Only add the handler once per class
- 16
return if base.instance_variable_get(:@_logstruct_handler_installed)
# Add our handler FIRST so it has lower priority than user handlers
- 2
base.rescue_from StandardError, with: :log_and_reraise_error
# Mark as installed to prevent duplicates
- 2
base.instance_variable_set(:@_logstruct_handler_installed, true)
end
- 2
included do
LogStruct::Integrations::ActionMailer::ErrorHandling.install_handler(self)
end
# Also support prepended (used by tests and manual setup)
- 4
sig { params(base: T.untyped).void }
- 2
def self.prepended(base)
- 16
install_handler(base)
end
- 2
protected
# Just log the error without reporting or retrying
- 3
sig { params(ex: StandardError).void }
- 2
def log_and_ignore_error(ex)
- 1
self.logstruct_mail_failed = true
- 1
log_email_delivery_error(ex, notify: false, report: false, reraise: false)
end
# Log and report to error service, but doesn't reraise.
- 2
sig { params(ex: StandardError).void }
- 2
def log_and_report_error(ex)
log_email_delivery_error(ex, notify: false, report: true, reraise: false)
end
# Log, report to error service, and reraise for retry
- 3
sig { params(ex: StandardError).void }
- 2
def log_and_reraise_error(ex)
- 1
log_email_delivery_error(ex, notify: false, report: true, reraise: true)
end
- 2
private
# Handle an error from a mailer
- 3
sig { params(mailer: T.untyped, error: StandardError, message: String).void }
- 2
def log_structured_error(mailer, error, message)
# Get message if available
- 2
mailer_message = mailer.respond_to?(:message) ? mailer.message : nil
# Prepare universal mailer fields
- 2
message_data = {}
- 2
MetadataCollection.add_message_metadata(mailer, message_data)
# Prepare app-specific context data for additional_data
- 2
context_data = {}
- 2
MetadataCollection.add_context_metadata(mailer, context_data)
# Extract email fields
- 2
to = mailer_message&.to
- 2
from = mailer_message&.from&.first
- 2
subject = mailer_message&.subject
- 2
message_id = extract_message_id_from_mailer(mailer)
# Create ActionMailer-specific error struct
- 2
exception_data = Log::ActionMailer::Error.new(
to: to,
from: from,
subject: subject,
message_id: message_id,
mailer_class: mailer.class.to_s,
- 2
mailer_action: mailer.respond_to?(:action_name) ? mailer.action_name&.to_s : nil,
attachment_count: message_data[:attachment_count],
error_class: error.class,
message: message,
backtrace: error.backtrace,
additional_data: context_data.presence,
timestamp: Time.now
)
# Log the structured error
- 2
LogStruct.error(exception_data)
end
# Extract message ID from the mailer
- 3
sig { params(mailer: T.untyped).returns(T.nilable(String)) }
- 2
def extract_message_id_from_mailer(mailer)
- 3
return nil unless mailer.respond_to?(:message)
- 3
mail_message = mailer.message
- 3
return nil unless mail_message.respond_to?(:message_id)
- 3
mail_message.message_id
end
# Log when email delivery fails
- 3
sig { params(error: StandardError, notify: T::Boolean, report: T::Boolean, reraise: T::Boolean).void }
- 2
def log_email_delivery_error(error, notify: false, report: true, reraise: true)
# Generate appropriate error message
- 2
message = error_message_for(error, reraise)
# Use structured error logging
- 2
log_structured_error(self, error, message)
# Handle notifications and reporting
- 2
handle_error_notifications(error, notify, report, reraise)
end
# Generate appropriate error message based on error handling strategy
- 3
sig { params(error: StandardError, reraise: T::Boolean).returns(String) }
- 2
def error_message_for(error, reraise)
- 2
if reraise
- 1
"#{error.class}: Email delivery error, will retry. Recipients: #{recipients(error)}. Error message: #{error.message}"
else
- 1
"#{error.class}: Cannot send email to #{recipients(error)}. Error message: #{error.message}"
end
end
# Handle error notifications, reporting, and reraising
- 3
sig { params(error: StandardError, notify: T::Boolean, report: T::Boolean, reraise: T::Boolean).void }
- 2
def handle_error_notifications(error, notify, report, reraise)
# Log a notification event if requested
- 2
log_notification_event(error) if notify
# Report to error reporting service if requested
- 2
if report
# Get message if available
- 1
mailer_message = respond_to?(:message) ? message : nil
# Prepare universal mailer fields
- 1
message_data = {}
- 1
MetadataCollection.add_message_metadata(self, message_data)
# Prepare app-specific context data
- 1
context_data = {recipients: recipients(error)}
- 1
MetadataCollection.add_context_metadata(self, context_data)
# Extract email fields
- 1
to = mailer_message&.to
- 1
from = mailer_message&.from&.first
- 1
subject = mailer_message&.subject
- 1
message_id = extract_message_id_from_mailer(self)
# Create ActionMailer-specific error struct
- 1
exception_data = Log::ActionMailer::Error.new(
to: to,
from: from,
subject: subject,
message_id: message_id,
mailer_class: self.class.to_s,
- 1
mailer_action: respond_to?(:action_name) ? action_name&.to_s : nil,
attachment_count: message_data[:attachment_count],
error_class: error.class,
message: error.message,
backtrace: error.backtrace,
additional_data: context_data.presence,
timestamp: Time.now
)
# Log the exception with structured data
- 1
LogStruct.error(exception_data)
# Call the error handler with flat context for compatibility
context = {
- 1
mailer_class: self.class.to_s,
- 1
mailer_action: respond_to?(:action_name) ? action_name : nil,
recipients: recipients(error)
}
- 1
LogStruct.handle_exception(error, source: Source::Mailer, context: context)
end
# Re-raise the error if requested
- 1
Kernel.raise error if reraise
end
# Log a notification event that can be picked up by external systems
- 2
sig { params(error: StandardError).void }
- 2
def log_notification_event(error)
# Get message if available
mailer_message = respond_to?(:message) ? message : nil
# Prepare universal mailer fields
message_data = {}
MetadataCollection.add_message_metadata(self, message_data)
# Prepare app-specific context data
context_data = {
mailer: self.class.to_s,
action: action_name&.to_s,
recipients: recipients(error)
}
MetadataCollection.add_context_metadata(self, context_data)
# Extract email fields
to = mailer_message&.to
from = mailer_message&.from&.first
subject = mailer_message&.subject
message_id = extract_message_id_from_mailer(self)
# Create ActionMailer-specific error struct
exception_data = Log::ActionMailer::Error.new(
to: to,
from: from,
subject: subject,
message_id: message_id,
mailer_class: self.class.to_s,
mailer_action: respond_to?(:action_name) ? action_name&.to_s : nil,
attachment_count: message_data[:attachment_count],
error_class: error.class,
message: error.message,
backtrace: error.backtrace,
additional_data: context_data.presence,
timestamp: Time.now,
level: Level::Info
)
# Log the error at info level since it's not a critical error
LogStruct.info(exception_data)
end
- 3
sig { params(error: StandardError).returns(String) }
- 2
def recipients(error)
# Extract recipient info if available
- 4
if error.respond_to?(:recipients) && T.unsafe(error).recipients.present?
T.unsafe(error).recipients.join(", ")
else
- 4
"unknown"
end
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
- 2
module Integrations
- 2
module ActionMailer
# Handles logging of email delivery events
- 2
module EventLogging
- 2
extend ActiveSupport::Concern
- 2
extend T::Sig
- 2
extend T::Helpers
- 2
requires_ancestor { ::ActionMailer::Base }
- 2
requires_ancestor { ErrorHandling }
- 2
included do
T.bind(self, T.class_of(::ActionMailer::Base))
# Add callbacks for delivery events
before_deliver :log_email_delivery
after_deliver :log_email_delivered
end
# When this module is prepended (our integration uses prepend), ensure callbacks are registered
- 2
if respond_to?(:prepended)
- 2
prepended do
- 2
T.bind(self, T.class_of(::ActionMailer::Base))
# Add callbacks for delivery events
- 2
before_deliver :log_email_delivery
- 2
after_deliver :log_email_delivered
end
end
- 2
protected
# Log when an email is about to be delivered
- 3
sig { void }
- 2
def log_email_delivery
- 5
log_mailer_event(Event::Delivery)
end
# Log when an email is delivered
- 3
sig { void }
- 2
def log_email_delivered
# Don't log delivered event if the delivery failed (error was handled with log_and_ignore_error)
- 5
return if logstruct_mail_failed
- 4
log_mailer_event(Event::Delivered)
end
- 2
private
# Log a mailer event with the given event type
- 3
sig { params(event_type: LogStruct::Event, level: Symbol, additional_data: T::Hash[Symbol, T.untyped]).returns(T.untyped) }
- 2
def log_mailer_event(event_type, level = :info, additional_data = {})
# Get message (self refers to the mailer instance)
- 9
mailer_message = message if respond_to?(:message)
# Prepare universal mailer fields
- 9
message_data = {}
- 9
MetadataCollection.add_message_metadata(self, message_data)
# Prepare app-specific context data for additional_data
- 9
context_data = {}
- 9
MetadataCollection.add_context_metadata(self, context_data)
- 9
context_data.merge!(additional_data) if additional_data.present?
# Extract email fields (these will be filtered if email_addresses=true)
- 9
to = mailer_message&.to
- 9
from = mailer_message&.from&.first
- 9
subject = mailer_message&.subject
- 9
base_fields = Log::ActionMailer::BaseFields.new(
to: to,
from: from,
subject: subject,
message_id: extract_message_id,
mailer_class: self.class.to_s,
mailer_action: action_name.to_s,
attachment_count: message_data[:attachment_count]
)
- 9
log = case event_type
when Event::Delivery
- 5
Log::ActionMailer::Delivery.new(
**base_fields.to_kwargs,
additional_data: context_data.presence,
timestamp: Time.now
)
when Event::Delivered
- 4
Log::ActionMailer::Delivered.new(
**base_fields.to_kwargs,
additional_data: context_data.presence,
timestamp: Time.now
)
else
return
end
- 9
LogStruct.info(log)
- 9
log
end
# Extract message ID from the mailer
- 3
sig { returns(T.nilable(String)) }
- 2
def extract_message_id
- 9
return nil unless respond_to?(:message)
- 9
mail_message = message
- 9
return nil unless mail_message.respond_to?(:message_id)
- 9
mail_message.message_id
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
- 2
module Integrations
- 2
module ActionMailer
# Handles collection of metadata for email logging
- 2
module MetadataCollection
- 2
extend T::Sig
# Add message-specific metadata to log data
- 3
sig { params(mailer: T.untyped, log_data: T::Hash[Symbol, T.untyped]).void }
- 2
def self.add_message_metadata(mailer, log_data)
- 14
message = mailer.respond_to?(:message) ? mailer.message : nil
# Add attachment count if message is available
- 14
log_data[:attachment_count] = if message
- 13
message.attachments&.count || 0
else
- 1
0
end
end
# Add context metadata to log data
- 3
sig { params(mailer: T.untyped, log_data: T::Hash[Symbol, T.untyped]).void }
- 2
def self.add_context_metadata(mailer, log_data)
# Add account ID information if available (but not user email)
- 14
extract_ids_to_log_data(mailer, log_data)
# Add any current tags from ActiveJob or ActionMailer
- 14
add_current_tags_to_log_data(log_data)
end
- 3
sig { params(mailer: T.untyped, log_data: T::Hash[Symbol, T.untyped]).void }
- 2
def self.extract_ids_to_log_data(mailer, log_data)
# Use configured ID mapping from LogStruct configuration
- 14
id_mapping = LogStruct.config.integrations.actionmailer_id_mapping
- 14
id_mapping.each do |ivar_name, log_key|
- 28
ivar = :"@#{ivar_name}"
- 28
next unless mailer.instance_variable_defined?(ivar)
obj = mailer.instance_variable_get(ivar)
log_data[log_key] = obj.id if obj.respond_to?(:id)
end
end
- 3
sig { params(log_data: T::Hash[Symbol, T.untyped]).void }
- 2
def self.add_current_tags_to_log_data(log_data)
# Get current tags from thread-local storage or ActiveSupport::TaggedLogging
- 14
tags = if ::ActiveSupport::TaggedLogging.respond_to?(:current_tags)
- 14
T.unsafe(::ActiveSupport::TaggedLogging).current_tags
else
Thread.current[:activesupport_tagged_logging_tags] || []
end
- 14
log_data[:tags] = tags if tags.present?
# Get request_id from ActionDispatch if available
- 14
if ::ActionDispatch::Request.respond_to?(:current_request_id) &&
T.unsafe(::ActionDispatch::Request).current_request_id.present?
- 4
log_data[:request_id] = T.unsafe(::ActionDispatch::Request).current_request_id
end
# Get job_id from ActiveJob if available
- 14
if defined?(::ActiveJob::Logging) && ::ActiveJob::Logging.respond_to?(:job_id) &&
T.unsafe(::ActiveJob::Logging).job_id.present?
- 3
log_data[:job_id] = T.unsafe(::ActiveJob::Logging).job_id
end
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
begin
- 2
require "active_job"
- 2
require "active_job/log_subscriber"
rescue LoadError
# ActiveJob gem is not available, integration will be skipped
end
- 2
require_relative "active_job/log_subscriber" if defined?(::ActiveJob::LogSubscriber)
- 2
module LogStruct
- 2
module Integrations
# ActiveJob integration for structured logging
- 2
module ActiveJob
- 2
extend T::Sig
- 2
extend IntegrationInterface
# Set up ActiveJob structured logging
- 4
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def self.setup(config)
- 2
return nil unless defined?(::ActiveJob::LogSubscriber)
- 2
return nil unless config.enabled
- 2
return nil unless config.integrations.enable_activejob
- 2
::ActiveSupport.on_load(:active_job) do
# Detach the default text formatter
- 2
::ActiveJob::LogSubscriber.detach_from :active_job
# Attach our structured formatter
- 2
Integrations::ActiveJob::LogSubscriber.attach_to :active_job
end
- 2
true
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../log/active_job"
- 2
require_relative "../../log/error"
- 2
module LogStruct
- 2
module Integrations
- 2
module ActiveJob
# Structured logging for ActiveJob
- 2
class LogSubscriber < ::ActiveJob::LogSubscriber
- 2
extend T::Sig
- 2
sig { params(event: ::ActiveSupport::Notifications::Event).void }
- 2
def enqueue(event)
job = T.cast(event.payload[:job], ::ActiveJob::Base)
ts = event.time ? Time.at(event.time) : Time.now
base_fields = build_base_fields(job)
logger.info(Log::ActiveJob::Enqueue.new(
**base_fields.to_kwargs,
timestamp: ts
))
end
- 2
sig { params(event: ::ActiveSupport::Notifications::Event).void }
- 2
def enqueue_at(event)
job = T.cast(event.payload[:job], ::ActiveJob::Base)
ts = event.time ? Time.at(event.time) : Time.now
base_fields = build_base_fields(job)
logger.info(Log::ActiveJob::Schedule.new(
**base_fields.to_kwargs,
scheduled_at: job.scheduled_at,
timestamp: ts
))
end
- 2
sig { params(event: ::ActiveSupport::Notifications::Event).void }
- 2
def perform(event)
job = T.cast(event.payload[:job], ::ActiveJob::Base)
exception = event.payload[:exception_object]
if exception
# Log the exception with the job context
log_exception(exception, job, event)
else
start_float = event.time
end_float = event.end
ts = start_float ? Time.at(start_float) : Time.now
finished_at = end_float ? Time.at(end_float) : Time.now
base_fields = build_base_fields(job)
logger.info(Log::ActiveJob::Finish.new(
**base_fields.to_kwargs,
duration_ms: event.duration.to_f,
finished_at: finished_at,
timestamp: ts
))
end
end
- 2
sig { params(event: ::ActiveSupport::Notifications::Event).void }
- 2
def perform_start(event)
job = T.cast(event.payload[:job], ::ActiveJob::Base)
ts = event.time ? Time.at(event.time) : Time.now
started_at = ts
attempt = job.executions
base_fields = build_base_fields(job)
logger.info(Log::ActiveJob::Start.new(
**base_fields.to_kwargs,
started_at: started_at,
attempt: attempt,
timestamp: ts
))
end
- 2
private
- 2
sig { params(job: ::ActiveJob::Base).returns(Log::ActiveJob::BaseFields) }
- 2
def build_base_fields(job)
Log::ActiveJob::BaseFields.new(
job_id: job.job_id,
job_class: job.class.to_s,
queue_name: job.queue_name&.to_sym,
executions: job.executions,
provider_job_id: job.provider_job_id,
arguments: ((job.class.respond_to?(:log_arguments?) && job.class.log_arguments?) ? job.arguments : nil)
)
end
- 2
sig { params(exception: StandardError, job: ::ActiveJob::Base, _event: ::ActiveSupport::Notifications::Event).void }
- 2
def log_exception(exception, job, _event)
base_fields = build_base_fields(job)
job_context = base_fields.to_kwargs
log_data = Log.from_exception(Source::Job, exception, job_context)
logger.error(log_data)
end
- 2
sig { returns(T.untyped) }
- 2
def logger
::ActiveJob::Base.logger
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "active_support/notifications"
- 2
module LogStruct
- 2
module Integrations
# ActiveModelSerializers integration. Subscribes to AMS notifications and
# emits structured logs with serializer/adapter/duration details.
- 2
module ActiveModelSerializers
- 2
extend T::Sig
- 4
sig { params(config: LogStruct::Configuration).returns(T.nilable(TrueClass)) }
- 2
def self.setup(config)
- 3
return nil unless defined?(::ActiveSupport::Notifications)
# Only activate if AMS appears to be present
- 3
return nil unless defined?(::ActiveModelSerializers)
# Subscribe to common AMS notification names; keep broad but specific
- 1
pattern = /\.active_model_serializers\z/
- 1
::ActiveSupport::Notifications.subscribe(pattern) do |_name, started, finished, _unique_id, payload|
# started/finished are Time; convert to ms
- 1
duration_ms = ((finished - started) * 1000.0).round(3)
- 1
serializer = payload[:serializer] || payload[:serializer_class]
- 1
adapter = payload[:adapter]
- 1
resource = payload[:resource] || payload[:object]
- 1
LogStruct.info(
LogStruct::Log::ActiveModelSerializers.new(
message: "ams.render",
serializer: serializer&.to_s,
adapter: adapter&.to_s,
resource_class: resource&.class&.name,
duration_ms: duration_ms,
timestamp: started
)
)
rescue => e
LogStruct.handle_exception(e, source: LogStruct::Source::Rails, context: {integration: :active_model_serializers})
end
- 1
true
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "active_support/notifications"
- 2
module LogStruct
- 2
module Integrations
# ActiveRecord Integration for SQL Query Logging
#
# This integration captures and structures all SQL queries executed through ActiveRecord,
# providing detailed performance and debugging information in a structured format.
#
# ## Features:
# - Captures all SQL queries with execution time
# - Safely filters sensitive data from bind parameters
# - Extracts database operation metadata
# - Provides connection pool monitoring information
# - Identifies query types and table names
#
# ## Performance Considerations:
# - Minimal overhead on query execution
# - Async logging prevents I/O blocking
# - Configurable to disable in production if needed
# - Smart filtering reduces log volume for repetitive queries
#
# ## Security:
# - SQL queries are always parameterized (safe)
# - Bind parameters filtered through LogStruct's param filters
# - Sensitive patterns automatically scrubbed
#
# ## Configuration:
# ```ruby
# LogStruct.configure do |config|
# config.integrations.enable_sql_logging = true
# config.integrations.sql_slow_query_threshold = 100.0 # ms
# config.integrations.sql_log_bind_params = false # disable in production
# end
# ```
- 2
module ActiveRecord
- 2
extend T::Sig
- 2
extend IntegrationInterface
# Track subscription state keyed to the current Notifications.notifier instance
- 2
State = ::Struct.new(:subscribed, :notifier_id)
- 2
STATE = T.let(State.new(false, nil), State)
# Set up SQL query logging integration
- 3
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def self.setup(config)
- 16
return nil unless config.integrations.enable_sql_logging
- 15
return nil unless defined?(::ActiveRecord::Base)
# Detach Rails' default ActiveRecord log subscriber to prevent
# duplicate/unstructured SQL debug output when LogStruct SQL logging
# is enabled. We still receive notifications via ActiveSupport.
- 14
if defined?(::ActiveRecord::LogSubscriber)
begin
::ActiveRecord::LogSubscriber.detach_from(:active_record)
rescue => e
LogStruct.handle_exception(e, source: LogStruct::Source::Internal)
end
end
# Disable verbose query logs ("↳ caller") since LogStruct provides
# structured context and these lines are noisy/unstructured.
- 14
if ::ActiveRecord::Base.respond_to?(:verbose_query_logs=)
T.unsafe(::ActiveRecord::Base).verbose_query_logs = false
end
- 14
subscribe_to_sql_notifications
- 14
true
end
- 2
private_class_method
# Subscribe to ActiveRecord's sql.active_record notifications
- 3
sig { void }
- 2
def self.subscribe_to_sql_notifications
# Avoid duplicate subscriptions; re-subscribe if the notifier was reset
- 14
notifier = ::ActiveSupport::Notifications.notifier
- 14
current_id = notifier&.object_id
- 14
if STATE.subscribed && STATE.notifier_id == current_id
return
end
- 14
::ActiveSupport::Notifications.subscribe("sql.active_record") do |name, start, finish, id, payload|
- 9
handle_sql_event(name, start, finish, id, payload)
rescue => error
- 1
LogStruct.handle_exception(error, source: LogStruct::Source::Internal)
end
- 14
STATE.subscribed = true
- 14
STATE.notifier_id = current_id
end
# Process SQL notification event and create structured log
- 3
sig { params(name: String, start: T.untyped, finish: T.untyped, id: String, payload: T::Hash[Symbol, T.untyped]).void }
- 2
def self.handle_sql_event(name, start, finish, id, payload)
# Skip schema queries and Rails internal queries
- 31
return if skip_query?(payload)
- 24
duration_ms = ((finish - start) * 1000.0).round(2)
# Skip fast queries if threshold is configured
- 24
config = LogStruct.config
- 24
if config.integrations.sql_slow_query_threshold&.positive?
- 2
return if duration_ms < config.integrations.sql_slow_query_threshold
end
- 23
sql_log = Log::SQL.new(
message: format_sql_message(payload),
source: Source::App,
event: Event::Database,
sql: payload[:sql]&.strip || "",
name: payload[:name] || "SQL Query",
duration_ms: duration_ms,
row_count: extract_row_count(payload),
adapter: extract_adapter_name(payload),
bind_params: extract_and_filter_binds(payload),
database_name: extract_database_name(payload),
connection_pool_size: extract_pool_size(payload),
active_connections: extract_active_connections(payload),
operation_type: extract_operation_type(payload),
table_names: extract_table_names(payload)
)
- 22
LogStruct.info(sql_log)
end
# Determine if query should be skipped from logging
- 3
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T::Boolean) }
- 2
def self.skip_query?(payload)
- 31
query_name = payload[:name]
- 31
sql = payload[:sql]
# Skip Rails schema queries
- 31
return true if query_name&.include?("SCHEMA")
- 30
return true if query_name&.include?("CACHE")
# Skip common Rails internal queries
- 29
return true if sql&.include?("schema_migrations")
- 28
return true if sql&.include?("ar_internal_metadata")
# Skip SHOW/DESCRIBE queries
- 27
return true if sql&.match?(/\A\s*(SHOW|DESCRIBE|EXPLAIN)\s/i)
- 24
false
end
# Format a readable message for the SQL log
- 3
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(String) }
- 2
def self.format_sql_message(payload)
- 23
operation_name = payload[:name] || "SQL Query"
- 23
"#{operation_name} executed"
end
# Extract row count from payload
- 3
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(Integer)) }
- 2
def self.extract_row_count(payload)
- 23
row_count = payload[:row_count]
- 23
row_count.is_a?(Integer) ? row_count : nil
end
# Extract database adapter name
- 3
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(String)) }
- 2
def self.extract_adapter_name(payload)
- 23
connection = payload[:connection]
- 23
return nil unless connection
- 22
adapter_name = connection.class.name
- 22
adapter_name&.split("::")&.last
end
# Extract and filter bind parameters
- 3
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(T::Array[T.untyped])) }
- 2
def self.extract_and_filter_binds(payload)
- 23
return nil unless LogStruct.config.integrations.sql_log_bind_params
# Prefer type_casted_binds as they're more readable
- 22
binds = payload[:type_casted_binds] || payload[:binds]
- 22
return nil unless binds
# Filter sensitive data from bind parameters
- 2
binds.map do |bind|
- 4
filter_bind_parameter(bind)
end
end
# Extract database name from connection
- 3
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(String)) }
- 2
def self.extract_database_name(payload)
- 23
connection = payload[:connection]
- 23
return nil unless connection
- 22
if connection.respond_to?(:current_database)
- 22
connection.current_database
elsif connection.respond_to?(:database)
connection.database
end
rescue
nil
end
# Extract connection pool size
- 3
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(Integer)) }
- 2
def self.extract_pool_size(payload)
- 23
connection = payload[:connection]
- 23
return nil unless connection
- 22
pool = connection.pool if connection.respond_to?(:pool)
- 22
pool&.size
rescue
nil
end
# Extract active connection count
- 3
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(Integer)) }
- 2
def self.extract_active_connections(payload)
- 23
connection = payload[:connection]
- 23
return nil unless connection
- 22
pool = connection.pool if connection.respond_to?(:pool)
- 22
pool&.stat&.[](:busy)
rescue
nil
end
# Extract SQL operation type (SELECT, INSERT, etc.)
- 3
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(String)) }
- 2
def self.extract_operation_type(payload)
- 23
sql = payload[:sql]
- 23
return nil unless sql
# Extract first word of SQL query
- 23
match = sql.strip.match(/\A\s*(\w+)/i)
- 23
match&.captures&.first&.upcase
end
# Extract table names from SQL query
- 3
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(T::Array[String])) }
- 2
def self.extract_table_names(payload)
- 23
sql = payload[:sql]
- 23
return nil unless sql
# Simple regex to extract table names (basic implementation)
# This covers most common cases but could be enhanced
- 23
tables = []
# Match FROM, JOIN, UPDATE, INSERT INTO, DELETE FROM patterns
- 23
sql.scan(/(?:FROM|JOIN|UPDATE|INTO|DELETE\s+FROM)\s+["`]?(\w+)["`]?/i) do |match|
- 23
table_name = match[0]
- 23
tables << table_name unless tables.include?(table_name)
end
- 23
tables.empty? ? nil : tables
end
# Filter individual bind parameter values to remove sensitive data
- 3
sig { params(value: T.untyped).returns(T.untyped) }
- 2
def self.filter_bind_parameter(value)
- 4
case value
when String
# Filter strings that look like passwords, tokens, secrets, etc.
- 2
if looks_sensitive?(value)
- 1
"[FILTERED]"
else
- 1
value
end
else
- 2
value
end
end
# Check if a string value looks sensitive and should be filtered
- 3
sig { params(value: String).returns(T::Boolean) }
- 2
def self.looks_sensitive?(value)
# Filter very long strings that might be tokens
- 2
return true if value.length > 50
# Filter strings that look like hashed passwords, API keys, tokens
- 2
return true if value.match?(/\A[a-f0-9]{32,}\z/i) # MD5, SHA, etc.
- 2
return true if value.match?(/\A[A-Za-z0-9+\/]{20,}={0,2}\z/) # Base64
- 2
return true if value.match?(/(password|secret|token|key|auth)/i)
- 1
false
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require_relative "../enums/source"
- 2
require_relative "../enums/event"
- 2
require_relative "../log/active_storage"
- 2
module LogStruct
- 2
module Integrations
# Integration for ActiveStorage structured logging
- 2
module ActiveStorage
- 2
extend T::Sig
- 2
extend IntegrationInterface
# Set up ActiveStorage structured logging
- 4
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def self.setup(config)
- 2
return nil unless defined?(::ActiveStorage)
- 1
return nil unless config.enabled
- 1
return nil unless config.integrations.enable_activestorage
# Subscribe to all ActiveStorage service events
- 1
::ActiveSupport::Notifications.subscribe(/service_.*\.active_storage/) do |*args|
process_active_storage_event(::ActiveSupport::Notifications::Event.new(*args), config)
end
- 1
true
end
- 2
private_class_method
# Process ActiveStorage events and create structured logs
- 2
sig { params(event: ActiveSupport::Notifications::Event, config: LogStruct::Configuration).void }
- 2
def self.process_active_storage_event(event, config)
return unless config.enabled
return unless config.integrations.enable_activestorage
# Extract key information from the event
event_name = event.name.sub(/\.active_storage$/, "")
service_name = event.payload[:service]
duration_ms = event.duration
# Map service events to log event types
event_type = case event_name
when "service_upload"
Event::Upload
when "service_download"
Event::Download
when "service_delete"
Event::Delete
when "service_delete_prefixed"
Event::Delete
when "service_exist"
Event::Exist
when "service_url"
Event::Url
when "service_download_chunk"
Event::Download
when "service_stream"
Event::Stream
when "service_update_metadata"
Event::Metadata
else
Event::Unknown
end
# Map the event name to an operation
event_name.sub(/^service_/, "").to_sym
# Create structured log event using generated classes
log_data = case event_type
when Event::Upload
Log::ActiveStorage::Upload.new(
storage: service_name.to_sym,
file_id: event.payload[:key]&.to_s,
checksum: event.payload[:checksum]&.to_s,
duration_ms: duration_ms,
metadata: event.payload[:metadata],
filename: event.payload[:filename],
mime_type: event.payload[:content_type],
size: event.payload[:byte_size]
)
when Event::Download
Log::ActiveStorage::Download.new(
storage: service_name.to_sym,
file_id: event.payload[:key]&.to_s,
filename: event.payload[:filename],
range: event.payload[:range],
duration_ms: duration_ms
)
when Event::Delete
Log::ActiveStorage::Delete.new(
storage: service_name.to_sym,
file_id: event.payload[:key]&.to_s
)
when Event::Metadata
Log::ActiveStorage::Metadata.new(
storage: service_name.to_sym,
file_id: event.payload[:key]&.to_s,
metadata: event.payload[:metadata]
)
when Event::Exist
Log::ActiveStorage::Exist.new(
storage: service_name.to_sym,
file_id: event.payload[:key]&.to_s,
exist: event.payload[:exist]
)
when Event::Stream
Log::ActiveStorage::Stream.new(
storage: service_name.to_sym,
file_id: event.payload[:key]&.to_s,
prefix: event.payload[:prefix]
)
when Event::Url
Log::ActiveStorage::Url.new(
storage: service_name.to_sym,
file_id: event.payload[:key]&.to_s,
url: event.payload[:url]
)
else
Log::ActiveStorage::Metadata.new(
storage: service_name.to_sym,
file_id: event.payload[:key]&.to_s,
metadata: event.payload[:metadata]
)
end
# Log structured data
LogStruct.info(log_data)
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
- 2
module Integrations
# Ahoy analytics integration. If Ahoy is present, prepend a small hook to
# Ahoy::Tracker#track to emit a structured log for analytics events.
- 2
module Ahoy
- 2
extend T::Sig
- 4
sig { params(config: LogStruct::Configuration).returns(T.nilable(TrueClass)) }
- 2
def self.setup(config)
- 3
return nil unless defined?(::Ahoy)
- 1
if defined?(::Ahoy::Tracker)
- 1
mod = Module.new do
- 1
extend T::Sig
- 2
sig { params(name: T.untyped, properties: T.nilable(T::Hash[T.untyped, T.untyped]), options: T.untyped).returns(T.untyped) }
- 1
def track(name, properties = nil, options = {})
- 1
result = super
begin
# Emit a lightweight structured log about the analytics event
data = {
- 1
ahoy_event: T.let(name, T.untyped)
}
- 1
data[:properties] = properties if properties
- 1
LogStruct.info(
LogStruct::Log::Ahoy.new(
message: "ahoy.track",
ahoy_event: T.must(T.let(name, T.nilable(String))),
properties: T.let(
- 2
properties && properties.transform_keys { |k| k.to_sym },
T.nilable(T::Hash[Symbol, T.untyped])
)
)
)
rescue => e
# Never raise from logging; rely on global error handling policies
LogStruct.handle_exception(e, source: LogStruct::Source::App, context: {integration: :ahoy})
end
- 1
result
end
end
- 1
T.unsafe(::Ahoy::Tracker).prepend(mod)
end
- 1
true
end
end
end
end
# typed: strict
# frozen_string_literal: true
begin
- 2
require "carrierwave"
rescue LoadError
# CarrierWave gem is not available, integration will be skipped
end
- 2
module LogStruct
- 2
module Integrations
# CarrierWave integration for structured logging
- 2
module CarrierWave
- 2
extend T::Sig
- 2
extend IntegrationInterface
# Set up CarrierWave structured logging
- 4
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def self.setup(config)
- 2
return nil unless defined?(::CarrierWave)
return nil unless config.enabled
return nil unless config.integrations.enable_carrierwave
# Patch CarrierWave to add logging
::CarrierWave::Uploader::Base.prepend(LoggingMethods)
true
end
# Methods to add logging to CarrierWave operations
- 2
module LoggingMethods
- 2
extend T::Sig
- 2
extend T::Helpers
- 2
requires_ancestor { ::CarrierWave::Uploader::Base }
# Log file storage operations
- 2
sig { params(args: T.untyped).returns(T.untyped) }
- 2
def store!(*args)
start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
result = super
duration = Process.clock_gettime(Process::CLOCK_MONOTONIC) - start_time
# Extract file information
file_size = file.size if file.respond_to?(:size)
{
identifier: identifier,
filename: file.filename,
content_type: file.content_type,
size: file_size,
store_path: store_path,
extension: file.extension
}
# Log the store operation with structured data
log_data = Log::CarrierWave::Upload.new(
storage: storage.class.name.split("::").last.downcase.to_sym,
file_id: identifier,
filename: file.filename,
mime_type: file.content_type,
size: file_size,
duration_ms: (duration * 1000.0).to_f,
uploader: self.class.name,
model: model.class.name,
mount_point: mounted_as.to_s,
version: version_name.to_s,
store_path: store_path,
extension: file.extension
)
::Rails.logger.info(log_data)
result
end
# Log file retrieve operations
- 2
sig { params(identifier: T.untyped, args: T.untyped).returns(T.untyped) }
- 2
def retrieve_from_store!(identifier, *args)
Process.clock_gettime(Process::CLOCK_MONOTONIC)
result = super
Process.clock_gettime(Process::CLOCK_MONOTONIC)
# Extract file information if available
file_size = file.size if file&.respond_to?(:size)
# Log the retrieve operation with structured data
log_data = Log::CarrierWave::Download.new(
storage: storage.class.name.split("::").last.downcase.to_sym,
file_id: identifier,
filename: file&.filename,
mime_type: file&.content_type,
size: file_size,
# No duration field on Download event schema
uploader: self.class.name,
model: model.class.name,
mount_point: mounted_as.to_s,
version: version_name.to_s,
store_path: store_path,
extension: file&.extension
)
::Rails.logger.info(log_data)
result
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# rubocop:disable Sorbet/ConstantsFromStrings
- 2
require_relative "../boot_buffer"
- 2
require "pathname"
begin
- 2
require "dotenv-rails"
rescue LoadError
# Dotenv-rails gem is not available, integration will be skipped
end
- 2
module LogStruct
- 2
module Integrations
# Dotenv integration: emits structured logs for load/update/save/restore events
- 2
module Dotenv
- 2
extend T::Sig
- 2
extend IntegrationInterface
- 2
@original_logger_setter = T.let(nil, T.nilable(UnboundMethod))
# Internal state holder to avoid duplicate subscriptions in a Sorbet-friendly way
- 2
State = ::Struct.new(:subscribed)
- 2
STATE = T.let(State.new(false), State)
- 4
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def self.setup(config)
# Subscribe regardless of dotenv gem presence so instrumentation via
# ActiveSupport::Notifications can be captured during tests and runtime.
- 2
subscribe!
- 2
true
end
- 2
class << self
- 2
extend T::Sig
- 4
sig { void }
- 2
def subscribe!
# Guard against double subscription
- 4
return if STATE.subscribed
- 2
instrumenter = defined?(::ActiveSupport::Notifications) ? ::ActiveSupport::Notifications : nil
- 2
return unless instrumenter
- 2
instrumenter.subscribe("load.dotenv") do |*args|
# Allow tests to stub Log::Dotenv.new to force an error path
LogStruct::Log::Dotenv.new
event = ::ActiveSupport::Notifications::Event.new(*args)
env = event.payload[:env]
abs = env.filename
file = begin
if defined?(::Rails) && ::Rails.respond_to?(:root) && ::Rails.root
Pathname.new(abs).relative_path_from(Pathname.new(::Rails.root.to_s)).to_s
else
abs
end
rescue
abs
end
ts = event.time ? Time.at(event.time) : Time.now
LogStruct.info(Log::Dotenv::Load.new(file: file, timestamp: ts))
rescue => e
if defined?(::Rails) && ::Rails.respond_to?(:env) && ::Rails.env == "test"
raise
else
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
end
end
- 2
instrumenter.subscribe("update.dotenv") do |*args|
LogStruct::Log::Dotenv.new
event = ::ActiveSupport::Notifications::Event.new(*args)
diff = event.payload[:diff]
vars = diff.env.keys.map(&:to_s)
ts = event.time ? Time.at(event.time) : Time.now
LogStruct.debug(Log::Dotenv::Update.new(vars: vars, timestamp: ts))
rescue => e
if defined?(::Rails) && ::Rails.respond_to?(:env) && ::Rails.env == "test"
raise
else
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
end
end
- 2
instrumenter.subscribe("save.dotenv") do |*args|
- 1
LogStruct::Log::Dotenv.new
- 1
event = ::ActiveSupport::Notifications::Event.new(*args)
- 1
ts = event.time ? Time.at(event.time) : Time.now
- 1
LogStruct.info(Log::Dotenv::Save.new(snapshot: true, timestamp: ts))
rescue => e
if defined?(::Rails) && ::Rails.respond_to?(:env) && ::Rails.env == "test"
raise
else
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
end
end
- 2
instrumenter.subscribe("restore.dotenv") do |*args|
LogStruct::Log::Dotenv.new
event = ::ActiveSupport::Notifications::Event.new(*args)
diff = event.payload[:diff]
vars = diff.env.keys.map(&:to_s)
ts = event.time ? Time.at(event.time) : Time.now
LogStruct.info(Log::Dotenv::Restore.new(vars: vars, timestamp: ts))
rescue => e
if defined?(::Rails) && ::Rails.respond_to?(:env) && ::Rails.env == "test"
raise
else
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
end
end
- 2
STATE.subscribed = true
end
end
# Early boot subscription to buffer structured logs until logger is ready
- 2
@@boot_subscribed = T.let(false, T::Boolean)
- 4
sig { void }
- 2
def self.setup_boot
- 2
return if @@boot_subscribed
- 2
return unless defined?(::ActiveSupport::Notifications)
- 2
instrumenter = if Object.const_defined?(:Dotenv)
- 1
dm = T.unsafe(Object.const_get(:Dotenv))
- 1
dm.respond_to?(:instrumenter) ? T.unsafe(dm).instrumenter : ::ActiveSupport::Notifications
else
- 1
::ActiveSupport::Notifications
end
- 2
instrumenter.subscribe("load.dotenv") do |*args|
- 1
event = ::ActiveSupport::Notifications::Event.new(*args)
- 1
env = event.payload[:env]
- 1
abs = env.filename
file = begin
- 1
if defined?(::Rails) && ::Rails.respond_to?(:root) && ::Rails.root
- 1
Pathname.new(abs).relative_path_from(Pathname.new(::Rails.root.to_s)).to_s
else
abs
end
rescue
abs
end
- 1
ts = event.time ? Time.at(event.time) : Time.now
- 1
LogStruct::BootBuffer.add(Log::Dotenv::Load.new(file: file, timestamp: ts))
rescue => e
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
end
- 2
instrumenter.subscribe("update.dotenv") do |*args|
- 1
event = ::ActiveSupport::Notifications::Event.new(*args)
- 1
diff = event.payload[:diff]
- 1
vars = diff.env.keys.map(&:to_s)
- 1
ts = event.time ? Time.at(event.time) : Time.now
- 1
LogStruct::BootBuffer.add(Log::Dotenv::Update.new(vars: vars, timestamp: ts))
rescue => e
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
end
- 2
instrumenter.subscribe("save.dotenv") do |*args|
- 1
event = ::ActiveSupport::Notifications::Event.new(*args)
- 1
ts = event.time ? Time.at(event.time) : Time.now
- 1
LogStruct::BootBuffer.add(Log::Dotenv::Save.new(snapshot: true, timestamp: ts))
rescue => e
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
end
- 2
instrumenter.subscribe("restore.dotenv") do |*args|
event = ::ActiveSupport::Notifications::Event.new(*args)
diff = event.payload[:diff]
vars = diff.env.keys.map(&:to_s)
ts = event.time ? Time.at(event.time) : Time.now
LogStruct::BootBuffer.add(Log::Dotenv::Restore.new(vars: vars, timestamp: ts))
rescue => e
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
end
- 2
@@boot_subscribed = true
end
# Intercept Dotenv::Rails#logger= to defer replay until we resolve policy
- 4
sig { void }
- 2
def self.intercept_logger_setter!
- 2
return unless Object.const_defined?(:Dotenv)
# Do not intercept when LogStruct is disabled; allow original dotenv replay
- 1
return unless LogStruct.enabled?
- 1
dotenv_mod = T.unsafe(Object.const_get(:Dotenv))
- 1
return unless dotenv_mod.const_defined?(:Rails)
- 1
klass = T.unsafe(dotenv_mod.const_get(:Rails))
- 1
return if klass.instance_variable_defined?(:@_logstruct_replay_patched)
- 1
original = klass.instance_method(:logger=)
- 1
@original_logger_setter = original
- 1
mod = Module.new do
- 1
define_method :logger= do |new_logger|
# Defer replay: store desired logger, keep ReplayLogger as current
- 1
instance_variable_set(:@logstruct_pending_dotenv_logger, new_logger)
- 1
new_logger
end
- 1
define_method :logstruct_pending_dotenv_logger do
- 1
instance_variable_get(:@logstruct_pending_dotenv_logger)
end
end
- 1
klass.prepend(mod)
- 1
klass.instance_variable_set(:@_logstruct_replay_patched, true)
end
# Decide which boot logs to emit after user initializers
- 4
sig { void }
- 2
def self.resolve_boot_logs!
# If LogStruct is disabled, do not alter dotenv behavior at all
- 2
return unless LogStruct.enabled?
- 2
dotenv_mod = Object.const_defined?(:Dotenv) ? T.unsafe(Object.const_get(:Dotenv)) : nil
- 2
klass = dotenv_mod&.const_defined?(:Rails) ? T.unsafe(dotenv_mod.const_get(:Rails)) : nil
- 2
pending_logger = nil
- 2
railtie_instance = nil
- 2
if klass&.respond_to?(:instance)
- 1
railtie_instance = klass.instance
- 1
if railtie_instance.respond_to?(:logstruct_pending_dotenv_logger)
- 1
pending_logger = T.unsafe(railtie_instance).logstruct_pending_dotenv_logger
end
end
- 2
if LogStruct.enabled? && LogStruct.config.integrations.enable_dotenv
# Structured path
- 2
if pending_logger && railtie_instance
# Clear any buffered original logs
- 1
current_logger = railtie_instance.logger if railtie_instance.respond_to?(:logger)
- 1
if current_logger && current_logger.class.name.end_with?("ReplayLogger")
begin
- 1
logs = current_logger.instance_variable_get(:@logs)
- 1
logs.clear if logs.respond_to?(:clear)
rescue
# best effort
end
end
- 1
railtie_instance.config.dotenv.logger = pending_logger
end
# Detach original subscriber and subscribe runtime structured
- 2
if dotenv_mod&.const_defined?(:LogSubscriber)
- 1
T.unsafe(dotenv_mod.const_get(:LogSubscriber)).detach_from(:dotenv)
end
- 2
LogStruct::Integrations::Dotenv.subscribe!
- 2
require_relative "../boot_buffer"
- 2
LogStruct::BootBuffer.flush
else
# Original path: replay dotenv lines, drop structured buffer
if railtie_instance && @original_logger_setter
setter = @original_logger_setter
new_logger = pending_logger
if new_logger.nil? && ENV["RAILS_LOG_TO_STDOUT"].to_s.strip != ""
require "logger"
require "active_support/tagged_logging"
new_logger = ActiveSupport::TaggedLogging.new(::Logger.new($stdout)).tagged("dotenv")
end
setter.bind_call(railtie_instance, new_logger) if new_logger
end
require_relative "../boot_buffer"
LogStruct::BootBuffer.clear
end
end
end
end
end
# Subscribe immediately to capture earliest dotenv events into BootBuffer
- 2
LogStruct::Integrations::Dotenv.setup_boot
# rubocop:enable Sorbet/ConstantsFromStrings
# typed: strict
# frozen_string_literal: true
begin
- 2
require "good_job"
rescue LoadError
# GoodJob gem is not available, integration will be skipped
end
- 2
require_relative "good_job/logger" if defined?(::GoodJob)
- 2
require_relative "good_job/log_subscriber" if defined?(::GoodJob)
- 2
module LogStruct
- 2
module Integrations
# GoodJob integration for structured logging
#
# GoodJob is a PostgreSQL-based ActiveJob backend that provides reliable,
# scalable job processing for Rails applications. This integration provides
# structured logging for all GoodJob operations.
#
# ## Features:
# - Structured logging for job execution lifecycle
# - Error tracking and retry logging
# - Performance metrics and timing data
# - Database operation logging
# - Thread and process tracking
# - Custom GoodJob logger with LogStruct formatting
#
# ## Integration Points:
# - Replaces GoodJob.logger with LogStruct-compatible logger
# - Subscribes to GoodJob's ActiveSupport notifications
# - Captures job execution events, errors, and performance metrics
# - Logs database operations and connection information
#
# ## Configuration:
# The integration is automatically enabled when GoodJob is detected and
# LogStruct configuration allows it. It can be disabled by setting:
#
# ```ruby
# config.integrations.enable_goodjob = false
# ```
- 2
module GoodJob
- 2
extend T::Sig
- 2
extend IntegrationInterface
# Set up GoodJob structured logging
#
# This method configures GoodJob to use LogStruct's structured logging
# by replacing the default logger and subscribing to job events.
#
# @param config [LogStruct::Configuration] The LogStruct configuration
# @return [Boolean, nil] Returns true if setup was successful, nil if skipped
- 4
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def self.setup(config)
- 5
return nil unless defined?(::GoodJob)
return nil unless config.enabled
return nil unless config.integrations.enable_goodjob
# Replace GoodJob's logger with our structured logger
configure_logger
# Subscribe to GoodJob's ActiveSupport notifications
subscribe_to_notifications
true
end
# Configure GoodJob to use LogStruct's structured logger
- 2
sig { void }
- 2
def self.configure_logger
return unless defined?(::GoodJob)
# Use direct reference to avoid const_get - GoodJob is guaranteed to be defined here
goodjob_module = T.unsafe(GoodJob)
# Replace GoodJob.logger with our structured logger if GoodJob is available
if goodjob_module.respond_to?(:logger=)
goodjob_module.logger = LogStruct::Integrations::GoodJob::Logger.new("GoodJob")
end
# Configure error handling for thread errors if GoodJob supports it
if goodjob_module.respond_to?(:on_thread_error=)
goodjob_module.on_thread_error = ->(exception) do
log_entry = LogStruct::Log::GoodJob::Error.new(
error_class: exception.class.name,
error_message: exception.message,
backtrace: exception.backtrace,
process_id: ::Process.pid,
thread_id: Thread.current.object_id.to_s(36)
)
goodjob_module.logger.error(log_entry)
end
end
end
# Subscribe to GoodJob's ActiveSupport notifications
- 2
sig { void }
- 2
def self.subscribe_to_notifications
return unless defined?(::GoodJob)
# Subscribe to our custom log subscriber for GoodJob events
LogStruct::Integrations::GoodJob::LogSubscriber.attach_to :good_job
end
- 2
private_class_method :configure_logger
- 2
private_class_method :subscribe_to_notifications
end
end
end
# typed: strict
# frozen_string_literal: true
begin
- 1
require "active_support/log_subscriber"
rescue LoadError
# ActiveSupport is not available, log subscriber will be skipped
end
- 1
require_relative "../../log/good_job"
- 1
require_relative "../../enums/event"
- 1
require_relative "../../enums/level"
- 1
module LogStruct
- 1
module Integrations
- 1
module GoodJob
# LogSubscriber for GoodJob ActiveSupport notifications
#
# This subscriber captures GoodJob's ActiveSupport notifications and converts
# them into structured LogStruct::Log::GoodJob entries. It provides detailed
# logging for job lifecycle events, performance metrics, and error tracking.
#
# ## Supported Events:
# - job.enqueue - Job queued for execution
# - job.start - Job execution started
# - job.finish - Job completed successfully
# - job.error - Job failed with error
# - job.retry - Job retry initiated
# - job.schedule - Job scheduled for future execution
#
# ## Event Data Captured:
# - Job identification (ID, class, queue)
# - Execution context (arguments, priority, scheduled time)
# - Performance metrics (execution time, wait time)
# - Error information (class, message, backtrace)
# - Process and thread information
- 1
class LogSubscriber < ::ActiveSupport::LogSubscriber
- 1
extend T::Sig
# Job enqueued event
- 2
sig { params(event: ::ActiveSupport::Notifications::Event).void }
- 1
def enqueue(event)
- 2
payload = T.let(event.payload, T::Hash[Symbol, T.untyped])
- 2
job = payload[:job]
- 2
base_fields = build_base_fields(job, payload)
- 2
ts = event.time ? Time.at(event.time) : Time.now
- 2
logger.info(Log::GoodJob::Enqueue.new(
**base_fields.to_kwargs,
- 2
scheduled_at: (job&.scheduled_at ? Time.at(job.scheduled_at.to_i) : nil),
duration_ms: event.duration.to_f,
enqueue_caller: job&.enqueue_caller_location,
timestamp: ts
))
end
# Job execution started event
- 2
sig { params(event: ::ActiveSupport::Notifications::Event).void }
- 1
def start(event)
- 1
payload = T.let(event.payload, T::Hash[Symbol, T.untyped])
- 1
job = payload[:job]
- 1
execution = payload[:execution] || payload[:good_job_execution]
- 1
base_fields = build_base_fields(job, payload)
- 1
ts = event.time ? Time.at(event.time) : Time.now
- 1
logger.info(Log::GoodJob::Start.new(
**base_fields.to_kwargs,
wait_ms: begin
- 1
wt = execution&.wait_time || calculate_wait_time(execution)
- 1
wt ? (wt.to_f * 1000.0) : nil
end,
- 1
scheduled_at: (job&.scheduled_at ? Time.at(job.scheduled_at.to_i) : nil),
process_id: ::Process.pid,
thread_id: Thread.current.object_id.to_s(36),
timestamp: ts
))
end
# Job completed successfully event
- 2
sig { params(event: ::ActiveSupport::Notifications::Event).void }
- 1
def finish(event)
- 1
payload = T.let(event.payload, T::Hash[Symbol, T.untyped])
- 1
job = payload[:job]
- 1
base_fields = build_base_fields(job, payload)
- 1
start_ts = event.time ? Time.at(event.time) : Time.now
- 1
end_ts = event.end ? Time.at(event.end) : Time.now
- 1
logger.info(Log::GoodJob::Finish.new(
**base_fields.to_kwargs,
duration_ms: event.duration.to_f,
finished_at: end_ts,
process_id: ::Process.pid,
thread_id: Thread.current.object_id.to_s(36),
result: payload[:result]&.to_s,
timestamp: start_ts
))
end
# Job failed with error event
- 2
sig { params(event: ::ActiveSupport::Notifications::Event).void }
- 1
def error(event)
- 2
payload = T.let(event.payload, T::Hash[Symbol, T.untyped])
- 2
job = payload[:job]
- 2
execution = payload[:execution] || payload[:good_job_execution]
- 2
exception = payload[:exception] || payload[:error]
- 2
ts = event.time ? Time.at(event.time) : Time.now
- 2
base_fields = build_base_fields(job, payload)
- 2
logger.error(Log::GoodJob::Error.new(
**base_fields.to_kwargs,
exception_executions: execution&.exception_executions,
error_class: exception&.class&.name,
error_message: exception&.message,
backtrace: exception&.backtrace,
duration_ms: event.duration.to_f,
process_id: ::Process.pid,
thread_id: Thread.current.object_id.to_s(36),
timestamp: ts
))
end
# Job scheduled for future execution event
- 2
sig { params(event: ::ActiveSupport::Notifications::Event).void }
- 1
def schedule(event)
- 1
payload = T.let(event.payload, T::Hash[Symbol, T.untyped])
- 1
job = payload[:job]
- 1
base_fields = build_base_fields(job, payload)
- 1
ts = event.time ? Time.at(event.time) : Time.now
- 1
logger.info(Log::GoodJob::Schedule.new(
**base_fields.to_kwargs,
- 1
scheduled_at: (job&.scheduled_at ? Time.at(job.scheduled_at.to_i) : nil),
priority: job&.priority,
cron_key: job&.cron_key,
duration_ms: event.duration.to_f,
timestamp: ts
))
end
- 1
private
# Build BaseFields from job + payload (execution)
- 2
sig { params(job: T.untyped, payload: T::Hash[Symbol, T.untyped]).returns(Log::GoodJob::BaseFields) }
- 1
def build_base_fields(job, payload)
- 7
execution = payload[:execution] || payload[:good_job_execution]
- 7
Log::GoodJob::BaseFields.new(
job_id: job&.job_id,
job_class: job&.job_class,
queue_name: job&.queue_name&.to_sym,
arguments: job&.arguments,
executions: execution&.executions
)
end
# Calculate wait time from job creation to execution start
- 2
sig { params(execution: T.untyped).returns(T.nilable(Float)) }
- 1
def calculate_wait_time(execution)
- 2
return nil unless execution.respond_to?(:created_at)
- 2
return nil unless execution.respond_to?(:performed_at)
- 2
return nil unless execution.created_at && execution.performed_at
- 1
(execution.performed_at - execution.created_at).to_f
rescue
# Return nil if calculation fails
nil
end
# Get the appropriate logger for GoodJob events
- 2
sig { returns(T.untyped) }
- 1
def logger
# Always use Rails.logger - in production it will be configured by the integration setup,
# in tests it will be set up by the test harness
- 7
Rails.logger
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 1
require_relative "../../semantic_logger/logger"
- 1
require_relative "../../log/good_job"
- 1
require_relative "../../enums/source"
- 1
module LogStruct
- 1
module Integrations
- 1
module GoodJob
# Custom Logger for GoodJob that creates LogStruct::Log::GoodJob entries
#
# This logger extends LogStruct's SemanticLogger to provide optimal logging
# performance while creating structured log entries specifically for GoodJob
# operations and events.
#
# ## Benefits:
# - High-performance logging with SemanticLogger backend
# - Structured GoodJob-specific log entries
# - Automatic job context capture
# - Thread and process information
# - Performance metrics and timing data
#
# ## Usage:
# This logger is automatically configured when the GoodJob integration
# is enabled. It replaces GoodJob.logger to provide structured logging
# for all GoodJob operations.
- 1
class Logger < LogStruct::SemanticLogger::Logger
- 1
extend T::Sig
# Override log methods to create GoodJob-specific log structs
- 1
%i[debug info warn error fatal].each do |level|
- 5
define_method(level) do |message = nil, payload = nil, &block|
# Extract basic job context from thread-local variables
- 12
job_context = {}
- 12
if Thread.current[:good_job_execution]
- 2
execution = Thread.current[:good_job_execution]
- 2
if execution.respond_to?(:job_id)
- 2
job_context[:job_id] = execution.job_id
- 2
job_context[:job_class] = execution.job_class if execution.respond_to?(:job_class)
- 2
job_context[:queue_name] = execution.queue_name if execution.respond_to?(:queue_name)
- 2
job_context[:executions] = execution.executions if execution.respond_to?(:executions)
- 2
job_context[:scheduled_at] = execution.scheduled_at if execution.respond_to?(:scheduled_at)
- 2
job_context[:priority] = execution.priority if execution.respond_to?(:priority)
end
end
- 12
log_struct = Log::GoodJob::Log.new(
- 1
message: message || (block ? block.call : ""),
process_id: ::Process.pid,
thread_id: Thread.current.object_id.to_s(36),
job_id: job_context[:job_id],
job_class: job_context[:job_class],
queue_name: job_context[:queue_name],
executions: job_context[:executions],
scheduled_at: job_context[:scheduled_at],
priority: job_context[:priority]
)
- 12
super(log_struct, payload, &nil)
end
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "action_dispatch/middleware/host_authorization"
- 2
require_relative "../enums/event"
- 2
require_relative "../log/security/blocked_host"
- 2
module LogStruct
- 2
module Integrations
# Host Authorization integration for structured logging of blocked hosts
- 2
module HostAuthorization
- 2
extend T::Sig
- 2
extend IntegrationInterface
- 2
RESPONSE_HTML = T.let(
"<html><head><title>Blocked Host</title></head><body>" \
"<h1>Blocked Host</h1>" \
"<p>This host is not permitted to access this application.</p>" \
"<p>If you are the administrator, check your configuration.</p>" \
"</body></html>",
String
)
- 2
RESPONSE_HEADERS = T.let(
{
"Content-Type" => "text/html",
"Content-Length" => RESPONSE_HTML.bytesize.to_s
}.freeze,
T::Hash[String, String]
)
- 2
FORBIDDEN_STATUS = T.let(403, Integer)
# Set up host authorization logging
- 4
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def self.setup(config)
- 2
return nil unless config.enabled
- 2
return nil unless config.integrations.enable_host_authorization
# Define the response app as a separate variable to fix block alignment
- 2
response_app = lambda do |env|
request = ::ActionDispatch::Request.new(env)
# Include the blocked hosts app configuration in the log entry
# This can be helpful later when reviewing logs.
blocked_hosts = env["action_dispatch.blocked_hosts"]
# Build allowed_hosts array
allowed_hosts_array = T.let(nil, T.nilable(T::Array[String]))
if blocked_hosts.respond_to?(:allowed_hosts)
allowed_hosts_array = blocked_hosts.allowed_hosts
end
# Get allow_ip_hosts value
allow_ip_hosts_value = T.let(nil, T.nilable(T::Boolean))
if blocked_hosts.respond_to?(:allow_ip_hosts)
allow_ip_hosts_value = blocked_hosts.allow_ip_hosts
end
# Create structured log entry for blocked host
log_entry = LogStruct::Log::Security::BlockedHost.new(
message: "Blocked host detected: #{request.host}",
blocked_host: request.host,
path: request.path,
http_method: request.method,
source_ip: request.ip,
user_agent: request.user_agent,
referer: request.referer,
request_id: request.request_id,
x_forwarded_for: request.x_forwarded_for,
allowed_hosts: allowed_hosts_array&.empty? ? nil : allowed_hosts_array,
allow_ip_hosts: allow_ip_hosts_value
)
# Log the blocked host
LogStruct.warn(log_entry)
# Use pre-defined headers and response if we are only logging or reporting
# Dup the headers so they can be modified by downstream middleware
[FORBIDDEN_STATUS, RESPONSE_HEADERS.dup, [RESPONSE_HTML]]
end
# Merge our response_app into existing host_authorization config to preserve excludes
- 2
existing = Rails.application.config.host_authorization
- 2
unless existing.is_a?(Hash)
existing = {}
end
- 2
existing = existing.dup
- 2
existing[:response_app] = response_app
- 2
Rails.application.config.host_authorization = existing
- 2
true
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
- 2
module Integrations
# Interface that all integrations must implement
# This ensures consistent behavior across all integration modules
- 2
module IntegrationInterface
- 2
extend T::Sig
- 2
extend T::Helpers
# This is an interface that should be implemented by all integration modules
- 2
interface!
# All integrations must implement this method to set up their functionality
# @return [Boolean, nil] Returns true if setup was successful, nil if skipped
- 4
sig { abstract.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def setup(config); end
end
end
end
# typed: strict
# frozen_string_literal: true
begin
- 2
require "lograge"
rescue LoadError
# Lograge gem is not available, integration will be skipped
end
- 2
module LogStruct
- 2
module Integrations
# Lograge integration for structured request logging
- 2
module Lograge
- 2
extend IntegrationInterface
- 2
class << self
- 2
extend T::Sig
# Set up lograge for structured request logging
- 4
sig { override.params(logstruct_config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def setup(logstruct_config)
- 3
return nil unless defined?(::Lograge)
- 3
return nil unless logstruct_config.enabled
- 3
return nil unless logstruct_config.integrations.enable_lograge
- 3
configure_lograge(logstruct_config)
- 3
true
end
- 2
private_class_method
- 4
sig { params(logstruct_config: LogStruct::Configuration).void }
- 2
def configure_lograge(logstruct_config)
- 3
::Rails.application.configure do
- 3
config.lograge.enabled = true
# Use a raw formatter that just returns the log struct.
# The struct is converted to JSON by our Formatter (after filtering, etc.)
- 3
config.lograge.formatter = T.let(
lambda do |data|
# Coerce common fields to expected types
- 2
status = ((s = data[:status]) && s.respond_to?(:to_i)) ? s.to_i : s
- 2
duration_ms = ((d = data[:duration]) && d.respond_to?(:to_f)) ? d.to_f : d
- 2
view = ((v = data[:view]) && v.respond_to?(:to_f)) ? v.to_f : v
- 2
db = ((b = data[:db]) && b.respond_to?(:to_f)) ? b.to_f : b
- 2
params = data[:params]
- 2
params = params.deep_symbolize_keys if params&.respond_to?(:deep_symbolize_keys)
- 2
Log::Request.new(
http_method: data[:method]&.to_s,
path: data[:path]&.to_s,
format: data[:format]&.to_sym,
controller: data[:controller]&.to_s,
action: data[:action]&.to_s,
status: status,
duration_ms: duration_ms,
view: view,
database: db,
params: params,
timestamp: Time.now
)
end,
T.proc.params(hash: T::Hash[Symbol, T.untyped]).returns(Log::Request)
)
# Add custom options to lograge
- 3
config.lograge.custom_options = lambda do |event|
Integrations::Lograge.lograge_default_options(event)
end
end
end
- 2
sig { params(event: ActiveSupport::Notifications::Event).returns(T::Hash[Symbol, T.untyped]) }
- 2
def lograge_default_options(event)
# Extract essential fields from the payload
options = event.payload.slice(
:request_id,
:host,
:source_ip
).compact
if event.payload[:params].present?
options[:params] = event.payload[:params].except("controller", "action")
end
# Process headers if available
process_headers(event, options)
# Apply custom options from application if provided
apply_custom_options(event, options)
options
end
# Process headers from the event payload
- 2
sig { params(event: ActiveSupport::Notifications::Event, options: T::Hash[Symbol, T.untyped]).void }
- 2
def process_headers(event, options)
headers = event.payload[:headers]
return if headers.blank?
options[:user_agent] = headers["HTTP_USER_AGENT"]
options[:content_type] = headers["CONTENT_TYPE"]
options[:accept] = headers["HTTP_ACCEPT"]
end
# Apply custom options from the application's configuration
- 2
sig { params(event: ActiveSupport::Notifications::Event, options: T::Hash[Symbol, T.untyped]).void }
- 2
def apply_custom_options(event, options)
custom_options_proc = LogStruct.config.integrations.lograge_custom_options
return unless custom_options_proc&.respond_to?(:call)
# Call the proc with the event and options
# The proc can modify the options hash directly
custom_options_proc.call(event, options)
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
- 2
module Integrations
- 2
module Puma
- 2
extend T::Sig
- 2
extend T::Helpers
- 2
STATE = T.let(
{
installed: false,
boot_emitted: false,
shutdown_emitted: false,
handler_pending_started: false,
start_info: {
mode: nil,
puma_version: nil,
puma_codename: nil,
ruby_version: nil,
min_threads: nil,
max_threads: nil,
environment: nil,
pid: nil,
listening: []
}
},
T::Hash[Symbol, T.untyped]
)
- 2
class << self
- 2
extend T::Sig
- 4
sig { params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def setup(config)
- 2
return nil unless config.integrations.enable_puma
# No stdout wrapping here.
# Ensure Puma is loaded so we can patch its classes
begin
- 2
require "puma"
rescue LoadError
# If Puma isn't available, skip setup
- 1
return nil
end
- 1
install_patches!
- 1
if ARGV.include?("server")
# Emit deterministic boot/started events based on CLI args
begin
port = T.let(nil, T.nilable(String))
ARGV.each_with_index do |arg, idx|
if arg == "-p" || arg == "--port"
port = ARGV[idx + 1]
break
elsif arg.start_with?("--port=")
port = arg.split("=", 2)[1]
break
end
end
si = T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])
si[:pid] ||= Process.pid
si[:environment] ||= ((defined?(::Rails) && ::Rails.respond_to?(:env)) ? ::Rails.env : nil)
si[:mode] ||= "single"
if port && !T.cast(si[:listening], T::Array[T.untyped]).any? { |a| a.to_s.include?(":" + port.to_s) }
si[:listening] = ["tcp://127.0.0.1:#{port}"]
end
emit_boot_if_needed!
unless STATE[:started_emitted]
emit_started!
STATE[:started_emitted] = true
end
rescue => e
handle_integration_error(e)
end
begin
%w[TERM INT].each do |sig|
Signal.trap(sig) { emit_shutdown!(sig) }
end
rescue => e
handle_integration_error(e)
end
at_exit do
emit_shutdown!("Exiting")
rescue => e
handle_integration_error(e)
end
# Connection-based readiness: emit started once port is accepting connections
# No background threads or sockets; rely solely on parsing Puma output
end
- 1
true
end
- 3
sig { void }
- 2
def install_patches!
- 1
return if STATE[:installed]
- 1
STATE[:installed] = true
- 1
state_reset!
begin
begin
- 1
require "puma"
rescue => e
handle_integration_error(e)
end
- 1
puma_mod = ::Object.const_defined?(:Puma) ? T.unsafe(::Object.const_get(:Puma)) : nil # rubocop:disable Sorbet/ConstantsFromStrings
# rubocop:disable Sorbet/ConstantsFromStrings
- 1
if puma_mod&.const_defined?(:LogWriter)
- 1
T.unsafe(::Object.const_get("Puma::LogWriter")).prepend(LogWriterPatch)
end
- 1
if puma_mod&.const_defined?(:Events)
ev = T.unsafe(::Object.const_get("Puma::Events"))
ev.prepend(EventsPatch)
end
# Patch Rack::Handler::Puma.run to emit lifecycle logs using options
- 1
if ::Object.const_defined?(:Rack)
- 1
rack_mod = T.unsafe(::Object.const_get(:Rack))
- 1
if rack_mod.const_defined?(:Handler)
handler_mod = T.unsafe(rack_mod.const_get(:Handler))
if handler_mod.const_defined?(:Puma)
handler = T.unsafe(handler_mod.const_get(:Puma))
handler.singleton_class.prepend(RackHandlerPatch)
end
end
end
# Avoid patching CLI/Server; rely on log parsing
# Avoid patching CLI to minimize version-specific risks
# rubocop:enable Sorbet/ConstantsFromStrings
rescue => e
handle_integration_error(e)
end
# Rely on Puma patches to observe lines
end
- 2
sig { params(e: StandardError).void }
- 2
def handle_integration_error(e)
server_mode = ::LogStruct.server_mode?
if defined?(::Rails) && ::Rails.respond_to?(:env) && ::Rails.env.test? && !server_mode
raise e
else
LogStruct.handle_exception(e, source: Source::Puma)
end
end
# No stdout interception
- 4
sig { void }
- 2
def state_reset!
- 9
STATE[:boot_emitted] = false
- 9
STATE[:shutdown_emitted] = false
- 9
STATE[:started_emitted] = false
- 9
STATE[:handler_pending_started] = false
- 9
STATE[:start_info] = {
mode: nil,
puma_version: nil,
puma_codename: nil,
ruby_version: nil,
min_threads: nil,
max_threads: nil,
environment: nil,
pid: nil,
listening: []
}
end
- 3
sig { params(line: String).returns(T::Boolean) }
- 2
def process_line(line)
- 13
l = line.to_s.strip
- 13
return false if l.empty?
# Suppress non-JSON rails banners
- 13
return true if l.start_with?("=> ")
# Ignore boot line
- 12
return true if l.start_with?("=> Booting Puma")
- 12
if l.start_with?("Puma starting in ")
# Example: Puma starting in single mode...
- 1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:mode] = l.sub("Puma starting in ", "").sub(" mode...", "")
- 1
return true
end
- 11
if (m = l.match(/^(?:\*\s*)?Puma version: (\S+)(?:.*"([^\"]+)")?/))
- 1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:puma_version] = m[1]
- 1
if m[2]
- 1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:puma_codename] = m[2]
end
- 1
return true
end
- 10
if (m = l.match(/^\* Ruby version: (.+)$/))
- 1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:ruby_version] = m[1]
- 1
return true
end
- 9
if (m = l.match(/^(?:\*\s*)?Min threads: (\d+)/))
- 1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:min_threads] = m[1].to_i
- 1
return true
end
- 8
if (m = l.match(/^(?:\*\s*)?Max threads: (\d+)/))
- 1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:max_threads] = m[1].to_i
- 1
return true
end
- 7
if (m = l.match(/^(?:\*\s*)?Environment: (\S+)/))
- 1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:environment] = m[1]
- 1
return true
end
- 6
if (m = l.match(/^(?:\*\s*)?PID:\s+(\d+)/))
- 1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:pid] = m[1].to_i
- 1
return true
end
- 5
if (m = l.match(/^\*?\s*Listening on (.+)$/))
- 1
si = T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])
- 1
list = T.cast(si[:listening], T::Array[T.untyped])
- 1
address = T.must(m[1])
- 1
list << address unless list.include?(address)
# Emit started when we see the first listening address
- 1
if !STATE[:started_emitted]
- 1
emit_started!
- 1
STATE[:started_emitted] = true
end
- 1
return true
end
- 4
if l == "Use Ctrl-C to stop"
- 1
si = T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])
# Fallback: if no listening address captured yet, infer from ARGV
- 1
if T.cast(si[:listening], T::Array[T.untyped]).empty?
begin
- 1
port = T.let(nil, T.untyped)
- 1
ARGV.each_with_index do |arg, idx|
- 2
if arg == "-p" || arg == "--port"
- 1
port = ARGV[idx + 1]
- 1
break
- 1
elsif arg.start_with?("--port=")
port = arg.split("=", 2)[1]
break
end
end
- 1
if port
- 1
si[:listening] << "tcp://127.0.0.1:#{port}"
end
rescue => e
handle_integration_error(e)
end
end
- 1
if !STATE[:started_emitted]
- 1
emit_started!
- 1
STATE[:started_emitted] = true
end
- 1
return false
end
- 3
if l.start_with?("- Gracefully stopping")
- 1
emit_shutdown!(l)
- 1
return true
end
- 2
if l.start_with?("=== puma shutdown:")
emit_shutdown!(l)
return true
end
- 2
if l == "- Goodbye!"
# Swallow
- 1
return true
end
- 1
if l == "Exiting"
emit_shutdown!(l)
return true
end
- 1
false
end
- 2
sig { void }
- 2
def emit_boot_if_needed!
# Intentionally no-op: we no longer emit a boot log
STATE[:boot_emitted] = true
end
# No server hooks; rely on parsing only
- 3
sig { void }
- 2
def emit_started!
- 2
si = T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])
- 2
log = Log::Puma::Start.new(
mode: T.cast(si[:mode], T.nilable(String)),
puma_version: T.cast(si[:puma_version], T.nilable(String)),
puma_codename: T.cast(si[:puma_codename], T.nilable(String)),
ruby_version: T.cast(si[:ruby_version], T.nilable(String)),
min_threads: T.cast(si[:min_threads], T.nilable(Integer)),
max_threads: T.cast(si[:max_threads], T.nilable(Integer)),
environment: T.cast(si[:environment], T.nilable(String)),
process_id: T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:pid] || Process.pid,
listening_addresses: T.cast(T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:listening], T::Array[String]),
level: Level::Info,
timestamp: Time.now
)
- 2
LogStruct.info(log)
- 2
STATE[:handler_pending_started] = false
# Only use LogStruct; SemanticLogger routes to STDOUT in test
end
- 3
sig { params(_message: String).void }
- 2
def emit_shutdown!(_message)
- 1
return if STATE[:shutdown_emitted]
- 1
STATE[:shutdown_emitted] = true
- 1
log = Log::Puma::Shutdown.new(
process_id: T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:pid] || Process.pid,
level: Level::Info,
timestamp: Time.now
)
- 1
LogStruct.info(log)
# Only use LogStruct; SemanticLogger routes to STDOUT in test
# Let SemanticLogger appender write to STDOUT
end
end
# STDOUT interception is handled globally via StdoutFilter; keep Puma patches minimal
# Patch Puma::LogWriter to intercept log writes
- 2
module LogWriterPatch
- 2
extend T::Sig
- 2
sig { params(msg: String).returns(T.untyped) }
- 2
def log(msg)
consumed = ::LogStruct::Integrations::Puma.process_line(msg)
super unless consumed
end
- 2
sig { params(msg: String).returns(T.untyped) }
- 2
def write(msg)
any_consumed = T.let(false, T::Boolean)
msg.to_s.each_line do |l|
any_consumed = true if ::LogStruct::Integrations::Puma.process_line(l)
end
super unless any_consumed
end
- 2
sig { params(msg: String).returns(T.untyped) }
- 2
def <<(msg)
any_consumed = T.let(false, T::Boolean)
msg.to_s.each_line do |l|
any_consumed = true if ::LogStruct::Integrations::Puma.process_line(l)
end
super unless any_consumed
end
- 2
sig { params(msg: String).returns(T.untyped) }
- 2
def puts(msg)
consumed = ::LogStruct::Integrations::Puma.process_line(msg)
if consumed
# attempt to suppress; only forward if not consumed
return nil
end
if ::Kernel.instance_variables.include?(:@stdout)
io = T.unsafe(::Kernel.instance_variable_get(:@stdout))
return io.puts(msg)
end
super
end
- 2
sig { params(msg: String).returns(T.untyped) }
- 2
def info(msg)
consumed = ::LogStruct::Integrations::Puma.process_line(msg)
super unless consumed
end
end
# Patch Puma::Events as a fallback for some versions where Events handles output
- 2
module EventsPatch
- 2
extend T::Sig
- 2
sig { params(str: String).returns(T.untyped) }
- 2
def log(str)
consumed = ::LogStruct::Integrations::Puma.process_line(str)
super unless consumed
end
end
# Hook Rack::Handler::Puma.run to emit structured started/shutdown
- 2
module RackHandlerPatch
- 2
extend T::Sig
- 2
sig do
params(
app: T.untyped,
args: T.untyped,
block: T.nilable(T.proc.returns(T.untyped))
).returns(T.untyped)
end
- 2
def run(app, *args, &block)
rest = args
options = T.let({}, T::Hash[T.untyped, T.untyped])
rest.each do |value|
next unless value.is_a?(Hash)
options.merge!(value)
end
begin
si = T.cast(::LogStruct::Integrations::Puma::STATE[:start_info], T::Hash[Symbol, T.untyped])
si[:mode] ||= "single"
si[:environment] ||= ((defined?(::Rails) && ::Rails.respond_to?(:env)) ? ::Rails.env : nil)
si[:pid] ||= Process.pid
si[:listening] ||= []
port = T.let(nil, T.untyped)
host = T.let(nil, T.untyped)
if options.respond_to?(:[])
port = options[:Port] || options["Port"] || options[:port] || options["port"]
host = options[:Host] || options["Host"] || options[:host] || options["host"]
end
if port
list = T.cast(si[:listening], T::Array[T.untyped])
list.clear
h = (host && host != "0.0.0.0") ? host : "127.0.0.1"
list << "tcp://#{h}:#{port}"
end
state = ::LogStruct::Integrations::Puma::STATE
state[:handler_pending_started] = true unless state[:started_emitted]
rescue => e
::LogStruct::Integrations::Puma.handle_integration_error(e)
end
begin
Kernel.at_exit do
unless ::LogStruct::Integrations::Puma::STATE[:shutdown_emitted]
::LogStruct::Integrations::Puma.emit_shutdown!("Exiting")
::LogStruct::Integrations::Puma::STATE[:shutdown_emitted] = true
end
rescue => e
::LogStruct::Integrations::Puma.handle_integration_error(e)
end
rescue => e
::LogStruct::Integrations::Puma.handle_integration_error(e)
end
begin
result = super(app, **options, &block)
ensure
state = ::LogStruct::Integrations::Puma::STATE
if state[:handler_pending_started] && !state[:started_emitted]
begin
::LogStruct::Integrations::Puma.emit_started!
state[:started_emitted] = true
rescue => e
::LogStruct::Integrations::Puma.handle_integration_error(e)
ensure
state[:handler_pending_started] = false
end
end
end
result
end
end
# (No Launcher patch)
# No Server patch
# No InterceptorIO
# Removed EventsInitPatch and CLIPatch to avoid version-specific conflicts
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "rack"
- 2
require "action_dispatch/middleware/show_exceptions"
- 2
require_relative "rack_error_handler/middleware"
- 2
module LogStruct
- 2
module Integrations
# Rack middleware integration for structured logging
- 2
module RackErrorHandler
- 2
extend T::Sig
- 2
extend IntegrationInterface
# Set up Rack middleware for structured error logging
- 4
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def self.setup(config)
- 2
return nil unless config.enabled
- 2
return nil unless config.integrations.enable_rack_error_handler
# Add structured logging middleware for security violations and errors
# Need to insert before RemoteIp to catch IP spoofing errors it raises
- 2
::Rails.application.middleware.insert_before(
::ActionDispatch::RemoteIp,
Integrations::RackErrorHandler::Middleware
)
- 2
true
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
- 2
module Integrations
- 2
module RackErrorHandler
# Custom middleware to enhance Rails error logging with JSON format and request details
- 2
class Middleware
- 2
extend T::Sig
# IP Spoofing error response
- 2
IP_SPOOF_HTML = T.let(
"<html><head><title>IP Spoofing Detected</title></head><body>" \
"<h1>Forbidden</h1>" \
"<p>IP spoofing detected. This request has been blocked for security reasons.</p>" \
"</body></html>",
String
)
# CSRF error response
- 2
CSRF_HTML = T.let(
"<html><head><title>CSRF Error</title></head><body>" \
"<h1>Forbidden</h1>" \
"<p>Invalid authenticity token. This request has been blocked to prevent cross-site request forgery.</p>" \
"</body></html>",
String
)
# Response headers calculated at load time
- 2
IP_SPOOF_HEADERS = T.let(
{
"Content-Type" => "text/html",
"Content-Length" => IP_SPOOF_HTML.bytesize.to_s
}.freeze,
T::Hash[String, String]
)
- 2
CSRF_HEADERS = T.let(
{
"Content-Type" => "text/html",
"Content-Length" => CSRF_HTML.bytesize.to_s
}.freeze,
T::Hash[String, String]
)
# HTTP status code for forbidden responses
- 2
FORBIDDEN_STATUS = T.let(403, Integer)
- 4
sig { params(app: T.untyped).void }
- 2
def initialize(app)
- 2
@app = app
end
- 2
sig { params(env: T.untyped).returns(T.untyped) }
- 2
def call(env)
return @app.call(env) unless LogStruct.enabled?
request = ::ActionDispatch::Request.new(env)
begin
# Trigger the same spoofing checks that ActionDispatch::RemoteIp performs after
# it is initialized in the middleware stack. We run this manually because we
# execute before that middleware and still want spoofing attacks to surface here.
perform_remote_ip_check!(request)
@app.call(env)
rescue ::ActionDispatch::RemoteIp::IpSpoofAttackError => ip_spoof_error
# Create a security log for IP spoofing
security_log = Log::Security::IPSpoof.new(
path: env["PATH_INFO"],
http_method: env["REQUEST_METHOD"],
user_agent: env["HTTP_USER_AGENT"],
referer: env["HTTP_REFERER"],
request_id: request.request_id,
message: ip_spoof_error.message,
client_ip: env["HTTP_CLIENT_IP"],
x_forwarded_for: env["HTTP_X_FORWARDED_FOR"],
timestamp: Time.now
)
::Rails.logger.warn(security_log)
[FORBIDDEN_STATUS, IP_SPOOF_HEADERS.dup, [IP_SPOOF_HTML]]
rescue ::ActionController::InvalidAuthenticityToken => invalid_auth_token_error
# Create a security log for CSRF error
security_log = Log::Security::CSRFViolation.new(
path: request.path,
http_method: request.method,
source_ip: request.remote_ip,
user_agent: request.user_agent,
referer: request.referer,
request_id: request.request_id,
message: invalid_auth_token_error.message,
timestamp: Time.now
)
LogStruct.error(security_log)
# Report to error reporting service and/or re-raise
context = extract_request_context(env, request)
LogStruct.handle_exception(invalid_auth_token_error, source: Source::Security, context: context)
# If handle_exception raised an exception then Rails will deal with it (e.g. config.exceptions_app)
# If we are only logging or reporting these security errors, then return a default response
[FORBIDDEN_STATUS, CSRF_HEADERS.dup, [CSRF_HTML]]
rescue => error
# Extract request context for error reporting
context = extract_request_context(env, request)
# Create and log a structured exception with request context
exception_log = Log.from_exception(Source::Rails, error, context)
LogStruct.error(exception_log)
# Re-raise any standard errors to let Rails or error reporter handle it.
# Rails will also log the request details separately
raise error
end
end
- 2
private
- 2
sig { params(request: ::ActionDispatch::Request).void }
- 2
def perform_remote_ip_check!(request)
action_dispatch_config = ::Rails.application.config.action_dispatch
check_ip = action_dispatch_config.ip_spoofing_check
return unless check_ip
proxies = normalized_trusted_proxies(action_dispatch_config.trusted_proxies)
::ActionDispatch::RemoteIp::GetIp
.new(request, check_ip, proxies)
.to_s
end
- 2
sig { params(env: T::Hash[String, T.untyped], request: T.nilable(::ActionDispatch::Request)).returns(T::Hash[Symbol, T.untyped]) }
- 2
def extract_request_context(env, request = nil)
request ||= ::ActionDispatch::Request.new(env)
{
request_id: request.request_id,
path: request.path,
method: request.method,
user_agent: request.user_agent,
referer: request.referer
}
rescue => error
# If we can't extract request context, return minimal info
{error_extracting_context: error.message}
end
- 2
sig { params(configured_proxies: T.untyped).returns(T.untyped) }
- 2
def normalized_trusted_proxies(configured_proxies)
if configured_proxies.nil? || (configured_proxies.respond_to?(:empty?) && configured_proxies.empty?)
return ::ActionDispatch::RemoteIp::TRUSTED_PROXIES
end
return configured_proxies if configured_proxies.respond_to?(:any?)
raise(
ArgumentError,
<<~EOM
Setting config.action_dispatch.trusted_proxies to a single value isn't
supported. Please set this to an enumerable instead. For
example, instead of:
config.action_dispatch.trusted_proxies = IPAddr.new("10.0.0.0/8")
Wrap the value in an Array:
config.action_dispatch.trusted_proxies = [IPAddr.new("10.0.0.0/8")]
Note that passing an enumerable will *replace* the default set of trusted proxies.
EOM
)
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
begin
- 2
require "shrine"
rescue LoadError
# Shrine gem is not available, integration will be skipped
end
- 2
module LogStruct
- 2
module Integrations
# Shrine integration for structured logging
- 2
module Shrine
- 2
extend T::Sig
- 2
extend IntegrationInterface
# Set up Shrine structured logging
- 4
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def self.setup(config)
- 2
return nil unless defined?(::Shrine)
return nil unless config.enabled
return nil unless config.integrations.enable_shrine
# Create a structured log subscriber for Shrine
# ActiveSupport::Notifications::Event has name, time, end, transaction_id, payload, and duration
shrine_log_subscriber = T.unsafe(lambda do |event|
payload = event.payload.except(:io, :metadata, :name).dup
# Map event name to Event type
event_type = case event.name
when :upload then Event::Upload
when :download then Event::Download
when :delete then Event::Delete
when :metadata then Event::Metadata
when :exists then Event::Exist # ActiveStorage uses 'exist', may as well use that
else Event::Unknown
end
# Create structured log data
# Ensure storage is always a symbol
storage_sym = payload[:storage].to_sym
log_data = case event_type
when Event::Upload
Log::Shrine::Upload.new(
storage: storage_sym,
location: payload[:location],
uploader: payload[:uploader]&.to_s,
upload_options: payload[:upload_options],
options: payload[:options],
duration_ms: event.duration.to_f
)
when Event::Download
Log::Shrine::Download.new(
storage: storage_sym,
location: payload[:location],
download_options: payload[:download_options]
)
when Event::Delete
Log::Shrine::Delete.new(
storage: storage_sym,
location: payload[:location]
)
when Event::Metadata
metadata_params = {
storage: storage_sym,
metadata: payload[:metadata]
}
metadata_params[:location] = payload[:location] if payload[:location]
Log::Shrine::Metadata.new(**metadata_params)
when Event::Exist
Log::Shrine::Exist.new(
storage: storage_sym,
location: payload[:location],
exist: payload[:exist]
)
else
unknown_params = {storage: storage_sym, metadata: payload[:metadata]}
unknown_params[:location] = payload[:location] if payload[:location]
Log::Shrine::Metadata.new(**unknown_params)
end
# Pass the structured hash to the logger
# If Rails.logger has our Formatter, it will handle JSON conversion
::Shrine.logger.info log_data
end)
# Configure Shrine to use our structured log subscriber
::Shrine.plugin :instrumentation,
events: %i[upload exists download delete],
log_subscriber: shrine_log_subscriber
true
end
end
end
end
# typed: strict
# frozen_string_literal: true
begin
- 2
require "sidekiq"
rescue LoadError
# Sidekiq gem is not available, integration will be skipped
end
- 2
require_relative "sidekiq/logger" if defined?(::Sidekiq)
- 2
module LogStruct
- 2
module Integrations
# Sidekiq integration for structured logging
- 2
module Sidekiq
- 2
extend T::Sig
- 2
extend IntegrationInterface
# Set up Sidekiq structured logging
- 4
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def self.setup(config)
- 2
return nil unless defined?(::Sidekiq)
return nil unless config.enabled
return nil unless config.integrations.enable_sidekiq
# Configure Sidekiq server (worker) to use our logger
::Sidekiq.configure_server do |sidekiq_config|
sidekiq_config.logger = LogStruct::Integrations::Sidekiq::Logger.new("Sidekiq-Server")
end
# Configure Sidekiq client (Rails app) to use our logger
::Sidekiq.configure_client do |sidekiq_config|
sidekiq_config.logger = LogStruct::Integrations::Sidekiq::Logger.new("Sidekiq-Client")
end
true
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "sorbet-runtime"
- 2
module LogStruct
- 2
module Integrations
# Integration for Sorbet runtime type checking error handlers
# This module installs error handlers that report type errors through LogStruct
# These handlers can be enabled/disabled using configuration
- 2
module Sorbet
- 2
extend T::Sig
- 2
extend IntegrationInterface
# Set up Sorbet error handlers to report errors through LogStruct
- 4
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
- 2
def self.setup(config)
- 3
return nil unless config.integrations.enable_sorbet_error_handlers
- 3
clear_sig_error_handler!
- 3
install_error_handler!
# Install inline type error handler
# Called when T.let, T.cast, T.must, etc. fail
- 3
T::Configuration.inline_type_error_handler = lambda do |error, _opts|
LogStruct.handle_exception(error, source: LogStruct::Source::TypeChecking)
end
# Install call validation error handler
# Called when method signature validation fails
- 3
T::Configuration.call_validation_error_handler = lambda do |_signature, opts|
- 1
error = TypeError.new(opts[:pretty_message])
- 1
LogStruct.handle_exception(error, source: LogStruct::Source::TypeChecking)
end
# Install sig builder error handler
# Called when there's a problem with a signature definition
- 3
T::Configuration.sig_builder_error_handler = lambda do |error, _location|
LogStruct.handle_exception(error, source: LogStruct::Source::TypeChecking)
end
# Install sig validation error handler
# Called when there's a problem with a signature validation
- 3
T::Configuration.sig_validation_error_handler = lambda do |error, _opts|
LogStruct.handle_exception(error, source: LogStruct::Source::TypeChecking)
end
- 3
true
end
- 2
@installed = T.let(false, T::Boolean)
- 2
class << self
- 2
extend T::Sig
- 2
private
- 4
sig { void }
- 2
def install_error_handler!
- 3
return if installed?
- 3
T::Configuration.sig_builder_error_handler = lambda do |error, source|
- 1
LogStruct.handle_exception(error, source: source, context: nil)
end
- 3
@installed = true
end
- 2
sig do
- 2
returns(
T.nilable(
T.proc.params(error: StandardError, location: Thread::Backtrace::Location).void
)
)
end
- 2
def clear_sig_error_handler!
- 5
previous_handler = T.cast(
T::Configuration.instance_variable_get(:@sig_builder_error_handler),
T.nilable(
T.proc.params(error: StandardError, location: Thread::Backtrace::Location).void
)
)
- 5
T::Configuration.sig_builder_error_handler = nil
- 5
@installed = false
- 5
previous_handler
end
- 4
sig { returns(T::Boolean) }
- 2
def installed?
- 3
@installed
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# Common enums and shared interfaces
- 2
require_relative "enums/source"
- 2
require_relative "enums/event"
- 2
require_relative "enums/level"
- 2
require_relative "enums/log_field"
- 2
require_relative "log/interfaces/public_common_fields"
- 2
require_relative "shared/serialize_common_public"
# Dynamically require all top-level log structs under log/*
# Nested per-event files are required by their parent files.
- 2
Dir[File.join(__dir__, "log", "*.rb")].sort.each do |file|
- 32
require file
end
- 2
module LogStruct
- 2
module Log
- 2
extend T::Sig
# Build an Error log from an exception with optional context and timestamp
- 2
sig do
- 1
params(
source: Source,
ex: StandardError,
additional_data: T::Hash[T.any(String, Symbol), T.untyped],
timestamp: Time
).returns(LogStruct::Log::Error)
end
- 2
def self.from_exception(source, ex, additional_data = {}, timestamp = Time.now)
- 5
LogStruct::Log::Error.new(
source: source,
error_class: ex.class,
message: ex.message,
backtrace: ex.backtrace,
additional_data: additional_data,
timestamp: timestamp
)
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
- 2
require_relative "action_mailer/delivery"
- 2
require_relative "action_mailer/delivered"
- 2
require_relative "action_mailer/error"
- 2
module LogStruct
- 2
module Log
- 2
class ActionMailer
- 2
class BaseFields < T::Struct
- 2
extend T::Sig
- 2
const :to, T.nilable(T::Array[String]), default: nil
- 2
const :from, T.nilable(String), default: nil
- 2
const :subject, T.nilable(String), default: nil
- 2
const :message_id, T.nilable(String), default: nil
- 2
const :mailer_class, T.nilable(String), default: nil
- 2
const :mailer_action, T.nilable(String), default: nil
- 2
const :attachment_count, T.nilable(Integer), default: nil
- 2
Kwargs = T.type_alias do
{
- 1
to: T.nilable(T::Array[String]),
from: T.nilable(String),
subject: T.nilable(String),
message_id: T.nilable(String),
mailer_class: T.nilable(String),
mailer_action: T.nilable(String),
attachment_count: T.nilable(Integer)
}
end
- 3
sig { returns(Kwargs) }
- 2
def to_kwargs
{
- 9
to: to,
from: from,
subject: subject,
message_id: message_id,
mailer_class: mailer_class,
mailer_action: mailer_action,
attachment_count: attachment_count
}
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActionMailer
- 2
class Delivered < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Mailer, default: Source::Mailer
- 2
const :event, Event, default: Event::Delivered
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :to, T.nilable(T::Array[String]), default: nil
- 2
const :from, T.nilable(String), default: nil
- 2
const :subject, T.nilable(String), default: nil
- 2
const :message_id, T.nilable(String), default: nil
- 2
const :mailer_class, T.nilable(String), default: nil
- 2
const :mailer_action, T.nilable(String), default: nil
- 2
const :attachment_count, T.nilable(Integer), default: nil
# Additional data
- 2
include LogStruct::Log::Interfaces::AdditionalDataField
- 2
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
- 2
include LogStruct::Log::Shared::MergeAdditionalDataFields
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 1
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 1
h[LogField::To] = to unless to.nil?
- 1
h[LogField::From] = from unless from.nil?
- 1
h[LogField::Subject] = subject unless subject.nil?
- 1
h[LogField::MessageId] = message_id unless message_id.nil?
- 1
h[LogField::MailerClass] = mailer_class unless mailer_class.nil?
- 1
h[LogField::MailerAction] = mailer_action unless mailer_action.nil?
- 1
h[LogField::AttachmentCount] = attachment_count unless attachment_count.nil?
- 1
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActionMailer
- 2
class Delivery < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Mailer, default: Source::Mailer
- 2
const :event, Event, default: Event::Delivery
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :to, T.nilable(T::Array[String]), default: nil
- 2
const :from, T.nilable(String), default: nil
- 2
const :subject, T.nilable(String), default: nil
- 2
const :message_id, T.nilable(String), default: nil
- 2
const :mailer_class, T.nilable(String), default: nil
- 2
const :mailer_action, T.nilable(String), default: nil
- 2
const :attachment_count, T.nilable(Integer), default: nil
# Additional data
- 2
include LogStruct::Log::Interfaces::AdditionalDataField
- 2
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
- 2
include LogStruct::Log::Shared::MergeAdditionalDataFields
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 1
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 1
h[LogField::To] = to unless to.nil?
- 1
h[LogField::From] = from unless from.nil?
- 1
h[LogField::Subject] = subject unless subject.nil?
- 1
h[LogField::MessageId] = message_id unless message_id.nil?
- 1
h[LogField::MailerClass] = mailer_class unless mailer_class.nil?
- 1
h[LogField::MailerAction] = mailer_action unless mailer_action.nil?
- 1
h[LogField::AttachmentCount] = attachment_count unless attachment_count.nil?
- 1
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActionMailer
- 2
class Error < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Mailer, default: Source::Mailer
- 2
const :event, Event, default: Event::Error
- 3
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :to, T.nilable(T::Array[String]), default: nil
- 2
const :from, T.nilable(String), default: nil
- 2
const :subject, T.nilable(String), default: nil
- 2
const :message_id, T.nilable(String), default: nil
- 2
const :mailer_class, T.nilable(String), default: nil
- 2
const :mailer_action, T.nilable(String), default: nil
- 2
const :attachment_count, T.nilable(Integer), default: nil
# Event-specific fields
- 2
const :error_class, T.class_of(StandardError)
- 2
const :message, String
- 2
const :backtrace, T.nilable(T::Array[String]), default: nil
# Additional data
- 2
include LogStruct::Log::Interfaces::AdditionalDataField
- 2
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
- 2
include LogStruct::Log::Shared::MergeAdditionalDataFields
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 2
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 2
h[LogField::To] = to unless to.nil?
- 2
h[LogField::From] = from unless from.nil?
- 2
h[LogField::Subject] = subject unless subject.nil?
- 2
h[LogField::MessageId] = message_id unless message_id.nil?
- 2
h[LogField::MailerClass] = mailer_class unless mailer_class.nil?
- 2
h[LogField::MailerAction] = mailer_action unless mailer_action.nil?
- 2
h[LogField::AttachmentCount] = attachment_count unless attachment_count.nil?
- 2
h[LogField::ErrorClass] = error_class
- 2
h[LogField::Message] = message
- 2
h[LogField::Backtrace] = backtrace unless backtrace.nil?
- 2
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
- 2
require_relative "active_job/enqueue"
- 2
require_relative "active_job/schedule"
- 2
require_relative "active_job/start"
- 2
require_relative "active_job/finish"
- 2
module LogStruct
- 2
module Log
- 2
class ActiveJob
- 2
class BaseFields < T::Struct
- 2
extend T::Sig
- 2
const :job_id, String
- 2
const :job_class, String
- 2
const :queue_name, T.nilable(Symbol), default: nil
- 2
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
- 2
const :executions, T.nilable(Integer), default: nil
- 2
const :provider_job_id, T.nilable(String), default: nil
- 2
Kwargs = T.type_alias do
{
job_id: String,
job_class: String,
queue_name: T.nilable(Symbol),
arguments: T.nilable(T::Array[T.untyped]),
executions: T.nilable(Integer),
provider_job_id: T.nilable(String)
}
end
- 2
sig { returns(Kwargs) }
- 2
def to_kwargs
{
job_id: job_id,
job_class: job_class,
queue_name: queue_name,
arguments: arguments,
executions: executions,
provider_job_id: provider_job_id
}
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActiveJob
- 2
class Enqueue < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Job, default: Source::Job
- 2
const :event, Event, default: Event::Enqueue
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :job_id, String
- 2
const :job_class, String
- 2
const :queue_name, T.nilable(Symbol), default: nil
- 2
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
- 2
const :executions, T.nilable(Integer), default: nil
- 2
const :provider_job_id, T.nilable(String), default: nil
# Event-specific fields
- 2
const :retries, T.nilable(Integer), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::JobId] = job_id
h[LogField::JobClass] = job_class
h[LogField::QueueName] = queue_name unless queue_name.nil?
h[LogField::Arguments] = arguments unless arguments.nil?
h[LogField::Executions] = executions unless executions.nil?
h[LogField::ProviderJobId] = provider_job_id unless provider_job_id.nil?
h[LogField::Retries] = retries unless retries.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActiveJob
- 2
class Finish < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Job, default: Source::Job
- 2
const :event, Event, default: Event::Finish
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :job_id, String
- 2
const :job_class, String
- 2
const :queue_name, T.nilable(Symbol), default: nil
- 2
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
- 2
const :executions, T.nilable(Integer), default: nil
- 2
const :provider_job_id, T.nilable(String), default: nil
# Event-specific fields
- 2
const :duration_ms, Float
- 2
const :finished_at, Time
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::JobId] = job_id
h[LogField::JobClass] = job_class
h[LogField::QueueName] = queue_name unless queue_name.nil?
h[LogField::Arguments] = arguments unless arguments.nil?
h[LogField::Executions] = executions unless executions.nil?
h[LogField::ProviderJobId] = provider_job_id unless provider_job_id.nil?
h[LogField::DurationMs] = duration_ms
h[LogField::FinishedAt] = finished_at
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActiveJob
- 2
class Schedule < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Job, default: Source::Job
- 2
const :event, Event, default: Event::Schedule
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :job_id, String
- 2
const :job_class, String
- 2
const :queue_name, T.nilable(Symbol), default: nil
- 2
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
- 2
const :executions, T.nilable(Integer), default: nil
- 2
const :provider_job_id, T.nilable(String), default: nil
# Event-specific fields
- 2
const :scheduled_at, Time
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::JobId] = job_id
h[LogField::JobClass] = job_class
h[LogField::QueueName] = queue_name unless queue_name.nil?
h[LogField::Arguments] = arguments unless arguments.nil?
h[LogField::Executions] = executions unless executions.nil?
h[LogField::ProviderJobId] = provider_job_id unless provider_job_id.nil?
h[LogField::ScheduledAt] = scheduled_at
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActiveJob
- 2
class Start < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Job, default: Source::Job
- 2
const :event, Event, default: Event::Start
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :job_id, String
- 2
const :job_class, String
- 2
const :queue_name, T.nilable(Symbol), default: nil
- 2
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
- 2
const :executions, T.nilable(Integer), default: nil
- 2
const :provider_job_id, T.nilable(String), default: nil
# Event-specific fields
- 2
const :started_at, Time
- 2
const :attempt, T.nilable(Integer), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::JobId] = job_id
h[LogField::JobClass] = job_class
h[LogField::QueueName] = queue_name unless queue_name.nil?
h[LogField::Arguments] = arguments unless arguments.nil?
h[LogField::Executions] = executions unless executions.nil?
h[LogField::ProviderJobId] = provider_job_id unless provider_job_id.nil?
h[LogField::StartedAt] = started_at
h[LogField::Attempt] = attempt unless attempt.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../enums/source"
- 2
require_relative "../enums/event"
- 2
require_relative "../enums/level"
- 2
require_relative "../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActiveModelSerializers < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Rails, default: Source::Rails
- 2
const :event, Event, default: Event::Generate
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :message, String
- 2
const :serializer, T.nilable(String), default: nil
- 2
const :adapter, T.nilable(String), default: nil
- 2
const :resource_class, T.nilable(String), default: nil
- 2
const :duration_ms, Float
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 1
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 1
h[LogField::Message] = message
- 1
h[LogField::Serializer] = serializer unless serializer.nil?
- 1
h[LogField::Adapter] = adapter unless adapter.nil?
- 1
h[LogField::ResourceClass] = resource_class unless resource_class.nil?
- 1
h[LogField::DurationMs] = duration_ms
- 1
h
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
- 2
require_relative "active_storage/upload"
- 2
require_relative "active_storage/download"
- 2
require_relative "active_storage/delete"
- 2
require_relative "active_storage/metadata"
- 2
require_relative "active_storage/exist"
- 2
require_relative "active_storage/stream"
- 2
require_relative "active_storage/url"
- 2
module LogStruct
- 2
module Log
- 2
class ActiveStorage
- 2
class BaseFields < T::Struct
- 2
extend T::Sig
- 2
const :storage, Symbol
- 2
const :file_id, String
- 2
Kwargs = T.type_alias do
{
storage: Symbol,
file_id: String
}
end
- 2
sig { returns(Kwargs) }
- 2
def to_kwargs
{
storage: storage,
file_id: file_id
}
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActiveStorage
- 2
class Delete < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Storage, default: Source::Storage
- 2
const :event, Event, default: Event::Delete
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :storage, Symbol
- 2
const :file_id, String
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::FileId] = file_id
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActiveStorage
- 2
class Download < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Storage, default: Source::Storage
- 2
const :event, Event, default: Event::Download
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :storage, Symbol
- 2
const :file_id, String
# Event-specific fields
- 2
const :filename, T.nilable(String), default: nil
- 2
const :range, T.nilable(String), default: nil
- 2
const :duration_ms, T.nilable(Float), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::FileId] = file_id
h[LogField::Filename] = filename unless filename.nil?
h[LogField::Range] = range unless range.nil?
h[LogField::DurationMs] = duration_ms unless duration_ms.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActiveStorage
- 2
class Exist < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Storage, default: Source::Storage
- 2
const :event, Event, default: Event::Exist
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :storage, Symbol
- 2
const :file_id, String
# Event-specific fields
- 2
const :exist, T.nilable(T::Boolean), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::FileId] = file_id
h[LogField::Exist] = exist unless exist.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActiveStorage
- 2
class Metadata < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Storage, default: Source::Storage
- 2
const :event, Event, default: Event::Metadata
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :storage, Symbol
- 2
const :file_id, String
# Event-specific fields
- 2
const :metadata, T.nilable(T::Hash[String, T.untyped]), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::FileId] = file_id
h[LogField::Metadata] = metadata unless metadata.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActiveStorage
- 2
class Stream < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Storage, default: Source::Storage
- 2
const :event, Event, default: Event::Stream
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :storage, Symbol
- 2
const :file_id, String
# Event-specific fields
- 2
const :prefix, T.nilable(String), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::FileId] = file_id
h[LogField::Prefix] = prefix unless prefix.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActiveStorage
- 2
class Upload < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Storage, default: Source::Storage
- 2
const :event, Event, default: Event::Upload
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :storage, Symbol
- 2
const :file_id, String
# Event-specific fields
- 2
const :filename, T.nilable(String), default: nil
- 2
const :mime_type, T.nilable(String), default: nil
- 2
const :size, T.nilable(Integer), default: nil
- 2
const :metadata, T.nilable(T::Hash[String, T.untyped]), default: nil
- 2
const :duration_ms, T.nilable(Float), default: nil
- 2
const :checksum, T.nilable(String), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::FileId] = file_id
h[LogField::Filename] = filename unless filename.nil?
h[LogField::MimeType] = mime_type unless mime_type.nil?
h[LogField::Size] = size unless size.nil?
h[LogField::Metadata] = metadata unless metadata.nil?
h[LogField::DurationMs] = duration_ms unless duration_ms.nil?
h[LogField::Checksum] = checksum unless checksum.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class ActiveStorage
- 2
class Url < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Storage, default: Source::Storage
- 2
const :event, Event, default: Event::Url
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :storage, Symbol
- 2
const :file_id, String
# Event-specific fields
- 2
const :url, String
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::FileId] = file_id
h[LogField::Url] = url
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../enums/source"
- 2
require_relative "../enums/event"
- 2
require_relative "../enums/level"
- 2
require_relative "../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Ahoy < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::App, default: Source::App
- 2
const :event, Event, default: Event::Log
- 3
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :message, String
- 2
const :ahoy_event, String
- 2
const :properties, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 1
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 1
h[LogField::Message] = message
- 1
h[LogField::AhoyEvent] = ahoy_event
- 1
h[LogField::Properties] = properties unless properties.nil?
- 1
h
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
- 2
require_relative "carrierwave/upload"
- 2
require_relative "carrierwave/delete"
- 2
require_relative "carrierwave/download"
- 2
module LogStruct
- 2
module Log
- 2
class CarrierWave
- 2
class BaseFields < T::Struct
- 2
extend T::Sig
- 2
const :storage, Symbol
- 2
const :file_id, String
- 2
const :uploader, T.nilable(String), default: nil
- 2
const :model, T.nilable(String), default: nil
- 2
const :mount_point, T.nilable(String), default: nil
- 2
const :version, T.nilable(String), default: nil
- 2
const :store_path, T.nilable(String), default: nil
- 2
const :extension, T.nilable(String), default: nil
- 2
Kwargs = T.type_alias do
{
storage: Symbol,
file_id: String,
uploader: T.nilable(String),
model: T.nilable(String),
mount_point: T.nilable(String),
version: T.nilable(String),
store_path: T.nilable(String),
extension: T.nilable(String)
}
end
- 2
sig { returns(Kwargs) }
- 2
def to_kwargs
{
storage: storage,
file_id: file_id,
uploader: uploader,
model: model,
mount_point: mount_point,
version: version,
store_path: store_path,
extension: extension
}
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class CarrierWave
- 2
class Delete < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::CarrierWave, default: Source::CarrierWave
- 2
const :event, Event, default: Event::Delete
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :storage, Symbol
- 2
const :file_id, String
- 2
const :uploader, T.nilable(String), default: nil
- 2
const :model, T.nilable(String), default: nil
- 2
const :mount_point, T.nilable(String), default: nil
- 2
const :version, T.nilable(String), default: nil
- 2
const :store_path, T.nilable(String), default: nil
- 2
const :extension, T.nilable(String), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::FileId] = file_id
h[LogField::Uploader] = uploader unless uploader.nil?
h[LogField::Model] = model unless model.nil?
h[LogField::MountPoint] = mount_point unless mount_point.nil?
h[LogField::Version] = version unless version.nil?
h[LogField::StorePath] = store_path unless store_path.nil?
h[LogField::Extension] = extension unless extension.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class CarrierWave
- 2
class Download < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::CarrierWave, default: Source::CarrierWave
- 2
const :event, Event, default: Event::Download
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :storage, Symbol
- 2
const :file_id, String
- 2
const :uploader, T.nilable(String), default: nil
- 2
const :model, T.nilable(String), default: nil
- 2
const :mount_point, T.nilable(String), default: nil
- 2
const :version, T.nilable(String), default: nil
- 2
const :store_path, T.nilable(String), default: nil
- 2
const :extension, T.nilable(String), default: nil
# Event-specific fields
- 2
const :filename, T.nilable(String), default: nil
- 2
const :mime_type, T.nilable(String), default: nil
- 2
const :size, T.nilable(Integer), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::FileId] = file_id
h[LogField::Uploader] = uploader unless uploader.nil?
h[LogField::Model] = model unless model.nil?
h[LogField::MountPoint] = mount_point unless mount_point.nil?
h[LogField::Version] = version unless version.nil?
h[LogField::StorePath] = store_path unless store_path.nil?
h[LogField::Extension] = extension unless extension.nil?
h[LogField::Filename] = filename unless filename.nil?
h[LogField::MimeType] = mime_type unless mime_type.nil?
h[LogField::Size] = size unless size.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class CarrierWave
- 2
class Upload < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::CarrierWave, default: Source::CarrierWave
- 2
const :event, Event, default: Event::Upload
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :storage, Symbol
- 2
const :file_id, String
- 2
const :uploader, T.nilable(String), default: nil
- 2
const :model, T.nilable(String), default: nil
- 2
const :mount_point, T.nilable(String), default: nil
- 2
const :version, T.nilable(String), default: nil
- 2
const :store_path, T.nilable(String), default: nil
- 2
const :extension, T.nilable(String), default: nil
# Event-specific fields
- 2
const :filename, T.nilable(String), default: nil
- 2
const :mime_type, T.nilable(String), default: nil
- 2
const :size, T.nilable(Integer), default: nil
- 2
const :metadata, T.nilable(T::Hash[String, T.untyped]), default: nil
- 2
const :duration_ms, T.nilable(Float), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::FileId] = file_id
h[LogField::Uploader] = uploader unless uploader.nil?
h[LogField::Model] = model unless model.nil?
h[LogField::MountPoint] = mount_point unless mount_point.nil?
h[LogField::Version] = version unless version.nil?
h[LogField::StorePath] = store_path unless store_path.nil?
h[LogField::Extension] = extension unless extension.nil?
h[LogField::Filename] = filename unless filename.nil?
h[LogField::MimeType] = mime_type unless mime_type.nil?
h[LogField::Size] = size unless size.nil?
h[LogField::Metadata] = metadata unless metadata.nil?
h[LogField::DurationMs] = duration_ms unless duration_ms.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
- 2
require_relative "dotenv/load"
- 2
require_relative "dotenv/update"
- 2
require_relative "dotenv/save"
- 2
require_relative "dotenv/restore"
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Dotenv
- 2
class Load < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Dotenv, default: Source::Dotenv
- 2
const :event, Event, default: Event::Load
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :file, String
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 2
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 2
h[LogField::File] = file
- 2
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Dotenv
- 2
class Restore < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Dotenv, default: Source::Dotenv
- 2
const :event, Event, default: Event::Restore
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :vars, T::Array[String]
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Vars] = vars
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Dotenv
- 2
class Save < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Dotenv, default: Source::Dotenv
- 2
const :event, Event, default: Event::Save
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :snapshot, T::Boolean
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Snapshot] = snapshot
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Dotenv
- 2
class Update < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Dotenv, default: Source::Dotenv
- 2
const :event, Event, default: Event::Update
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :vars, T::Array[String]
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 2
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 2
h[LogField::Vars] = vars
- 2
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../enums/source"
- 2
require_relative "../enums/event"
- 2
require_relative "../enums/level"
- 2
require_relative "../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Error < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source
- 2
const :event, Event, default: Event::Error
- 5
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :error_class, T.class_of(StandardError)
- 2
const :message, String
- 2
const :backtrace, T.nilable(T::Array[String]), default: nil
# Additional data
- 2
include LogStruct::Log::Interfaces::AdditionalDataField
- 2
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
- 2
include LogStruct::Log::Shared::MergeAdditionalDataFields
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 8
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 8
h[LogField::ErrorClass] = error_class
- 8
h[LogField::Message] = message
- 8
h[LogField::Backtrace] = backtrace unless backtrace.nil?
- 8
h
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
- 2
require_relative "good_job/log"
- 2
require_relative "good_job/enqueue"
- 2
require_relative "good_job/start"
- 2
require_relative "good_job/finish"
- 2
require_relative "good_job/error"
- 2
require_relative "good_job/schedule"
- 2
module LogStruct
- 2
module Log
- 2
class GoodJob
- 2
class BaseFields < T::Struct
- 2
extend T::Sig
- 2
const :job_id, T.nilable(String), default: nil
- 2
const :job_class, T.nilable(String), default: nil
- 2
const :queue_name, T.nilable(Symbol), default: nil
- 2
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
- 2
const :executions, T.nilable(Integer), default: nil
- 2
Kwargs = T.type_alias do
{
- 1
job_id: T.nilable(String),
job_class: T.nilable(String),
queue_name: T.nilable(Symbol),
arguments: T.nilable(T::Array[T.untyped]),
executions: T.nilable(Integer)
}
end
- 3
sig { returns(Kwargs) }
- 2
def to_kwargs
{
- 7
job_id: job_id,
job_class: job_class,
queue_name: queue_name,
arguments: arguments,
executions: executions
}
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class GoodJob
- 2
class Enqueue < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Job, default: Source::Job
- 2
const :event, Event, default: Event::Enqueue
- 4
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :job_id, T.nilable(String), default: nil
- 2
const :job_class, T.nilable(String), default: nil
- 2
const :queue_name, T.nilable(Symbol), default: nil
- 2
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
- 2
const :executions, T.nilable(Integer), default: nil
# Event-specific fields
- 2
const :duration_ms, Float
- 2
const :scheduled_at, T.nilable(Time), default: nil
- 2
const :enqueue_caller, T.nilable(String), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 4
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 4
h[LogField::JobId] = job_id unless job_id.nil?
- 4
h[LogField::JobClass] = job_class unless job_class.nil?
- 4
h[LogField::QueueName] = queue_name unless queue_name.nil?
- 4
h[LogField::Arguments] = arguments unless arguments.nil?
- 4
h[LogField::Executions] = executions unless executions.nil?
- 4
h[LogField::DurationMs] = duration_ms
- 4
h[LogField::ScheduledAt] = scheduled_at unless scheduled_at.nil?
- 4
h[LogField::EnqueueCaller] = enqueue_caller unless enqueue_caller.nil?
- 4
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class GoodJob
- 2
class Error < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Job, default: Source::Job
- 2
const :event, Event, default: Event::Error
- 4
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :job_id, T.nilable(String), default: nil
- 2
const :job_class, T.nilable(String), default: nil
- 2
const :queue_name, T.nilable(Symbol), default: nil
- 2
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
- 2
const :executions, T.nilable(Integer), default: nil
# Event-specific fields
- 2
const :error_class, String
- 2
const :error_message, String
- 2
const :duration_ms, T.nilable(Float), default: nil
- 2
const :process_id, Integer
- 2
const :thread_id, String
- 2
const :exception_executions, T.nilable(Integer), default: nil
- 2
const :backtrace, T.nilable(T::Array[String]), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 4
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 4
h[LogField::JobId] = job_id unless job_id.nil?
- 4
h[LogField::JobClass] = job_class unless job_class.nil?
- 4
h[LogField::QueueName] = queue_name unless queue_name.nil?
- 4
h[LogField::Arguments] = arguments unless arguments.nil?
- 4
h[LogField::Executions] = executions unless executions.nil?
- 4
h[LogField::ErrorClass] = error_class
- 4
h[LogField::ErrorMessage] = error_message
- 4
h[LogField::DurationMs] = duration_ms unless duration_ms.nil?
- 4
h[LogField::ProcessId] = process_id
- 4
h[LogField::ThreadId] = thread_id
- 4
h[LogField::ExceptionExecutions] = exception_executions unless exception_executions.nil?
- 4
h[LogField::Backtrace] = backtrace unless backtrace.nil?
- 4
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class GoodJob
- 2
class Finish < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Job, default: Source::Job
- 2
const :event, Event, default: Event::Finish
- 5
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :job_id, T.nilable(String), default: nil
- 2
const :job_class, T.nilable(String), default: nil
- 2
const :queue_name, T.nilable(Symbol), default: nil
- 2
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
- 2
const :executions, T.nilable(Integer), default: nil
# Event-specific fields
- 2
const :duration_ms, Float
- 2
const :finished_at, Time
- 2
const :process_id, Integer
- 2
const :thread_id, String
- 2
const :result, T.nilable(String), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 3
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 3
h[LogField::JobId] = job_id unless job_id.nil?
- 3
h[LogField::JobClass] = job_class unless job_class.nil?
- 3
h[LogField::QueueName] = queue_name unless queue_name.nil?
- 3
h[LogField::Arguments] = arguments unless arguments.nil?
- 3
h[LogField::Executions] = executions unless executions.nil?
- 3
h[LogField::DurationMs] = duration_ms
- 3
h[LogField::FinishedAt] = finished_at
- 3
h[LogField::ProcessId] = process_id
- 3
h[LogField::ThreadId] = thread_id
- 3
h[LogField::Result] = result unless result.nil?
- 3
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class GoodJob
- 2
class Log < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Job, default: Source::Job
- 2
const :event, Event, default: Event::Log
- 17
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :job_id, T.nilable(String), default: nil
- 2
const :job_class, T.nilable(String), default: nil
- 2
const :queue_name, T.nilable(Symbol), default: nil
- 2
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
- 2
const :executions, T.nilable(Integer), default: nil
# Event-specific fields
- 2
const :message, String
- 2
const :process_id, Integer
- 2
const :thread_id, String
- 2
const :scheduled_at, T.nilable(Time), default: nil
- 2
const :priority, T.nilable(Integer), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 21
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 21
h[LogField::JobId] = job_id unless job_id.nil?
- 21
h[LogField::JobClass] = job_class unless job_class.nil?
- 21
h[LogField::QueueName] = queue_name unless queue_name.nil?
- 21
h[LogField::Arguments] = arguments unless arguments.nil?
- 21
h[LogField::Executions] = executions unless executions.nil?
- 21
h[LogField::Message] = message
- 21
h[LogField::ProcessId] = process_id
- 21
h[LogField::ThreadId] = thread_id
- 21
h[LogField::ScheduledAt] = scheduled_at unless scheduled_at.nil?
- 21
h[LogField::Priority] = priority unless priority.nil?
- 21
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class GoodJob
- 2
class Schedule < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Job, default: Source::Job
- 2
const :event, Event, default: Event::Schedule
- 4
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :job_id, T.nilable(String), default: nil
- 2
const :job_class, T.nilable(String), default: nil
- 2
const :queue_name, T.nilable(Symbol), default: nil
- 2
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
- 2
const :executions, T.nilable(Integer), default: nil
# Event-specific fields
- 2
const :duration_ms, Float
- 2
const :scheduled_at, Time
- 2
const :priority, T.nilable(Integer), default: nil
- 2
const :cron_key, T.nilable(String), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 2
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 2
h[LogField::JobId] = job_id unless job_id.nil?
- 2
h[LogField::JobClass] = job_class unless job_class.nil?
- 2
h[LogField::QueueName] = queue_name unless queue_name.nil?
- 2
h[LogField::Arguments] = arguments unless arguments.nil?
- 2
h[LogField::Executions] = executions unless executions.nil?
- 2
h[LogField::DurationMs] = duration_ms
- 2
h[LogField::ScheduledAt] = scheduled_at
- 2
h[LogField::Priority] = priority unless priority.nil?
- 2
h[LogField::CronKey] = cron_key unless cron_key.nil?
- 2
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class GoodJob
- 2
class Start < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Job, default: Source::Job
- 2
const :event, Event, default: Event::Start
- 6
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :job_id, T.nilable(String), default: nil
- 2
const :job_class, T.nilable(String), default: nil
- 2
const :queue_name, T.nilable(Symbol), default: nil
- 2
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
- 2
const :executions, T.nilable(Integer), default: nil
# Event-specific fields
- 2
const :process_id, Integer
- 2
const :thread_id, String
- 2
const :wait_ms, T.nilable(Float), default: nil
- 2
const :scheduled_at, T.nilable(Time), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 2
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 2
h[LogField::JobId] = job_id unless job_id.nil?
- 2
h[LogField::JobClass] = job_class unless job_class.nil?
- 2
h[LogField::QueueName] = queue_name unless queue_name.nil?
- 2
h[LogField::Arguments] = arguments unless arguments.nil?
- 2
h[LogField::Executions] = executions unless executions.nil?
- 2
h[LogField::ProcessId] = process_id
- 2
h[LogField::ThreadId] = thread_id
- 2
h[LogField::WaitMs] = wait_ms unless wait_ms.nil?
- 2
h[LogField::ScheduledAt] = scheduled_at unless scheduled_at.nil?
- 2
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "log_struct/shared/interfaces/public_common_fields"
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../enums/source"
- 2
require_relative "../enums/event"
- 2
require_relative "../enums/level"
- 2
require_relative "../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Plain < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::App, default: Source::App
- 2
const :event, Event, default: Event::Log
- 12
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :message, T.untyped
# Additional data
- 2
include LogStruct::Log::Interfaces::AdditionalDataField
- 2
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
- 2
include LogStruct::Log::Shared::MergeAdditionalDataFields
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 4
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 815
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 815
h[LogField::Message] = message
- 815
h
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
- 2
require_relative "puma/start"
- 2
require_relative "puma/shutdown"
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Puma
- 2
class Shutdown < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Puma, default: Source::Puma
- 2
const :event, Event, default: Event::Shutdown
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :process_id, T.nilable(Integer), default: nil
# Additional data
- 2
include LogStruct::Log::Interfaces::AdditionalDataField
- 2
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
- 2
include LogStruct::Log::Shared::MergeAdditionalDataFields
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::ProcessId] = process_id unless process_id.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Puma
- 2
class Start < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Puma, default: Source::Puma
- 2
const :event, Event, default: Event::Start
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :mode, T.nilable(String), default: nil
- 2
const :puma_version, T.nilable(String), default: nil
- 2
const :puma_codename, T.nilable(String), default: nil
- 2
const :ruby_version, T.nilable(String), default: nil
- 2
const :min_threads, T.nilable(Integer), default: nil
- 2
const :max_threads, T.nilable(Integer), default: nil
- 2
const :environment, T.nilable(String), default: nil
- 2
const :process_id, T.nilable(Integer), default: nil
- 2
const :listening_addresses, T.nilable(T::Array[String]), default: nil
# Additional data
- 2
include LogStruct::Log::Interfaces::AdditionalDataField
- 2
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
- 2
include LogStruct::Log::Shared::MergeAdditionalDataFields
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Mode] = mode unless mode.nil?
h[LogField::PumaVersion] = puma_version unless puma_version.nil?
h[LogField::PumaCodename] = puma_codename unless puma_codename.nil?
h[LogField::RubyVersion] = ruby_version unless ruby_version.nil?
h[LogField::MinThreads] = min_threads unless min_threads.nil?
h[LogField::MaxThreads] = max_threads unless max_threads.nil?
h[LogField::Environment] = environment unless environment.nil?
h[LogField::ProcessId] = process_id unless process_id.nil?
h[LogField::ListeningAddresses] = listening_addresses unless listening_addresses.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../enums/source"
- 2
require_relative "../enums/event"
- 2
require_relative "../enums/level"
- 2
require_relative "../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Request < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Rails, default: Source::Rails
- 2
const :event, Event, default: Event::Request
- 3
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :path, T.nilable(String), default: nil
- 2
const :http_method, T.nilable(String), default: nil
- 2
const :source_ip, T.nilable(String), default: nil
- 2
const :user_agent, T.nilable(String), default: nil
- 2
const :referer, T.nilable(String), default: nil
- 2
const :request_id, T.nilable(String), default: nil
# Event-specific fields
- 2
const :format, T.nilable(Symbol), default: nil
- 2
const :controller, T.nilable(String), default: nil
- 2
const :action, T.nilable(String), default: nil
- 2
const :status, T.nilable(Integer), default: nil
- 2
const :duration_ms, T.nilable(Float), default: nil
- 2
const :view, T.nilable(Float), default: nil
- 2
const :database, T.nilable(Float), default: nil
- 2
const :params, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
# Request fields (optional)
- 2
include LogStruct::Log::Interfaces::RequestFields
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
include LogStruct::Log::Shared::AddRequestFields
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 4
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 4
h[LogField::Path] = path unless path.nil?
- 4
h[LogField::HttpMethod] = http_method unless http_method.nil?
- 4
h[LogField::SourceIp] = source_ip unless source_ip.nil?
- 4
h[LogField::UserAgent] = user_agent unless user_agent.nil?
- 4
h[LogField::Referer] = referer unless referer.nil?
- 4
h[LogField::RequestId] = request_id unless request_id.nil?
- 4
h[LogField::Format] = format unless format.nil?
- 4
h[LogField::Controller] = controller unless controller.nil?
- 4
h[LogField::Action] = action unless action.nil?
- 4
h[LogField::Status] = status unless status.nil?
- 4
h[LogField::DurationMs] = duration_ms unless duration_ms.nil?
- 4
h[LogField::View] = view unless view.nil?
- 4
h[LogField::Database] = database unless database.nil?
- 4
h[LogField::Params] = params unless params.nil?
- 4
h
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
- 2
require_relative "security/ip_spoof"
- 2
require_relative "security/csrf_violation"
- 2
require_relative "security/blocked_host"
- 2
module LogStruct
- 2
module Log
- 2
class Security
- 2
class BaseFields < T::Struct
- 2
extend T::Sig
- 2
const :path, T.nilable(String), default: nil
- 2
const :http_method, T.nilable(String), default: nil
- 2
const :source_ip, T.nilable(String), default: nil
- 2
const :user_agent, T.nilable(String), default: nil
- 2
const :referer, T.nilable(String), default: nil
- 2
const :request_id, T.nilable(String), default: nil
- 2
Kwargs = T.type_alias do
{
path: T.nilable(String),
http_method: T.nilable(String),
source_ip: T.nilable(String),
user_agent: T.nilable(String),
referer: T.nilable(String),
request_id: T.nilable(String)
}
end
- 2
sig { returns(Kwargs) }
- 2
def to_kwargs
{
path: path,
http_method: http_method,
source_ip: source_ip,
user_agent: user_agent,
referer: referer,
request_id: request_id
}
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Security
- 2
class BlockedHost < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Security, default: Source::Security
- 2
const :event, Event, default: Event::BlockedHost
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :path, T.nilable(String), default: nil
- 2
const :http_method, T.nilable(String), default: nil
- 2
const :source_ip, T.nilable(String), default: nil
- 2
const :user_agent, T.nilable(String), default: nil
- 2
const :referer, T.nilable(String), default: nil
- 2
const :request_id, T.nilable(String), default: nil
# Event-specific fields
- 2
const :message, T.nilable(String), default: nil
- 2
const :blocked_host, T.nilable(String), default: nil
- 2
const :blocked_hosts, T.nilable(T::Array[String]), default: nil
- 2
const :x_forwarded_for, T.nilable(String), default: nil
- 2
const :allowed_hosts, T.nilable(T::Array[String]), default: nil
- 2
const :allow_ip_hosts, T.nilable(T::Boolean), default: nil
# Additional data
- 2
include LogStruct::Log::Interfaces::AdditionalDataField
- 2
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
- 2
include LogStruct::Log::Shared::MergeAdditionalDataFields
# Request fields (optional)
- 2
include LogStruct::Log::Interfaces::RequestFields
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
include LogStruct::Log::Shared::AddRequestFields
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Path] = path unless path.nil?
h[LogField::HttpMethod] = http_method unless http_method.nil?
h[LogField::SourceIp] = source_ip unless source_ip.nil?
h[LogField::UserAgent] = user_agent unless user_agent.nil?
h[LogField::Referer] = referer unless referer.nil?
h[LogField::RequestId] = request_id unless request_id.nil?
h[LogField::Message] = message unless message.nil?
h[LogField::BlockedHost] = blocked_host unless blocked_host.nil?
h[LogField::BlockedHosts] = blocked_hosts unless blocked_hosts.nil?
h[LogField::XForwardedFor] = x_forwarded_for unless x_forwarded_for.nil?
h[LogField::AllowedHosts] = allowed_hosts unless allowed_hosts.nil?
h[LogField::AllowIpHosts] = allow_ip_hosts unless allow_ip_hosts.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Security
- 2
class CSRFViolation < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Security, default: Source::Security
- 2
const :event, Event, default: Event::CSRFViolation
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :path, T.nilable(String), default: nil
- 2
const :http_method, T.nilable(String), default: nil
- 2
const :source_ip, T.nilable(String), default: nil
- 2
const :user_agent, T.nilable(String), default: nil
- 2
const :referer, T.nilable(String), default: nil
- 2
const :request_id, T.nilable(String), default: nil
# Event-specific fields
- 2
const :message, T.nilable(String), default: nil
# Additional data
- 2
include LogStruct::Log::Interfaces::AdditionalDataField
- 2
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
- 2
include LogStruct::Log::Shared::MergeAdditionalDataFields
# Request fields (optional)
- 2
include LogStruct::Log::Interfaces::RequestFields
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
include LogStruct::Log::Shared::AddRequestFields
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Path] = path unless path.nil?
h[LogField::HttpMethod] = http_method unless http_method.nil?
h[LogField::SourceIp] = source_ip unless source_ip.nil?
h[LogField::UserAgent] = user_agent unless user_agent.nil?
h[LogField::Referer] = referer unless referer.nil?
h[LogField::RequestId] = request_id unless request_id.nil?
h[LogField::Message] = message unless message.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Security
- 2
class IPSpoof < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Security, default: Source::Security
- 2
const :event, Event, default: Event::IPSpoof
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
- 2
const :path, T.nilable(String), default: nil
- 2
const :http_method, T.nilable(String), default: nil
- 2
const :source_ip, T.nilable(String), default: nil
- 2
const :user_agent, T.nilable(String), default: nil
- 2
const :referer, T.nilable(String), default: nil
- 2
const :request_id, T.nilable(String), default: nil
# Event-specific fields
- 2
const :message, T.nilable(String), default: nil
- 2
const :client_ip, T.nilable(String), default: nil
- 2
const :x_forwarded_for, T.nilable(String), default: nil
# Additional data
- 2
include LogStruct::Log::Interfaces::AdditionalDataField
- 2
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
- 2
include LogStruct::Log::Shared::MergeAdditionalDataFields
# Request fields (optional)
- 2
include LogStruct::Log::Interfaces::RequestFields
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
include LogStruct::Log::Shared::AddRequestFields
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Path] = path unless path.nil?
h[LogField::HttpMethod] = http_method unless http_method.nil?
h[LogField::SourceIp] = source_ip unless source_ip.nil?
h[LogField::UserAgent] = user_agent unless user_agent.nil?
h[LogField::Referer] = referer unless referer.nil?
h[LogField::RequestId] = request_id unless request_id.nil?
h[LogField::Message] = message unless message.nil?
h[LogField::ClientIp] = client_ip unless client_ip.nil?
h[LogField::XForwardedFor] = x_forwarded_for unless x_forwarded_for.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
- 2
require_relative "shrine/upload"
- 2
require_relative "shrine/download"
- 2
require_relative "shrine/delete"
- 2
require_relative "shrine/metadata"
- 2
require_relative "shrine/exist"
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Shrine
- 2
class Delete < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Shrine, default: Source::Shrine
- 2
const :event, Event, default: Event::Delete
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :storage, Symbol
- 2
const :location, String
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::Location] = location
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Shrine
- 2
class Download < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Shrine, default: Source::Shrine
- 2
const :event, Event, default: Event::Download
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :storage, Symbol
- 2
const :location, String
- 2
const :download_options, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::Location] = location
h[LogField::DownloadOptions] = download_options unless download_options.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Shrine
- 2
class Exist < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Shrine, default: Source::Shrine
- 2
const :event, Event, default: Event::Exist
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :storage, Symbol
- 2
const :location, String
- 2
const :exist, T.nilable(T::Boolean), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::Location] = location
h[LogField::Exist] = exist unless exist.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Shrine
- 2
class Metadata < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Shrine, default: Source::Shrine
- 2
const :event, Event, default: Event::Metadata
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :storage, Symbol
- 2
const :location, T.nilable(String), default: nil
- 2
const :metadata, T.nilable(T::Hash[String, T.untyped]), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::Location] = location unless location.nil?
h[LogField::Metadata] = metadata unless metadata.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
require_relative "../../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Shrine
- 2
class Upload < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Shrine, default: Source::Shrine
- 2
const :event, Event, default: Event::Upload
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :storage, Symbol
- 2
const :location, String
- 2
const :upload_options, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
- 2
const :options, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
- 2
const :uploader, T.nilable(String), default: nil
- 2
const :duration_ms, T.nilable(Float), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Storage] = storage
h[LogField::Location] = location
h[LogField::UploadOptions] = upload_options unless upload_options.nil?
h[LogField::Options] = options unless options.nil?
h[LogField::Uploader] = uploader unless uploader.nil?
h[LogField::DurationMs] = duration_ms unless duration_ms.nil?
h
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../enums/source"
- 2
require_relative "../enums/event"
- 2
require_relative "../enums/level"
- 2
require_relative "../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class Sidekiq < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::Sidekiq, default: Source::Sidekiq
- 2
const :event, Event, default: Event::Log
- 2
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :message, T.nilable(String), default: nil
- 2
const :context, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
- 2
const :process_id, T.nilable(Integer), default: nil
- 2
const :thread_id, T.nilable(T.any(Integer, String)), default: nil
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
h[LogField::Message] = message unless message.nil?
h[LogField::Context] = context unless context.nil?
h[LogField::ProcessId] = process_id unless process_id.nil?
h[LogField::ThreadId] = thread_id unless thread_id.nil?
h
end
end
end
end
# typed: strict
# frozen_string_literal: true
# AUTO-GENERATED: DO NOT EDIT
# Generated by scripts/generate_structs.rb
# Schemas dir: schemas/log_sources/
# Template: tools/codegen/templates/sorbet/event.rb.erb
- 2
require "log_struct/shared/interfaces/common_fields"
- 2
require "log_struct/shared/interfaces/additional_data_field"
- 2
require "log_struct/shared/interfaces/request_fields"
- 2
require "log_struct/shared/serialize_common"
- 2
require "log_struct/shared/merge_additional_data_fields"
- 2
require "log_struct/shared/add_request_fields"
- 2
require_relative "../enums/source"
- 2
require_relative "../enums/event"
- 2
require_relative "../enums/level"
- 2
require_relative "../enums/log_field"
- 2
module LogStruct
- 2
module Log
- 2
class SQL < T::Struct
- 2
extend T::Sig
# Shared/common fields
- 2
const :source, Source::App, default: Source::App
- 2
const :event, Event, default: Event::Database
- 35
const :timestamp, Time, factory: -> { Time.now }
- 2
const :level, Level, default: Level::Info
# Event-specific fields
- 2
const :message, String
- 2
const :sql, String
- 2
const :name, String
- 2
const :duration_ms, Float
- 2
const :row_count, T.nilable(Integer), default: nil
- 2
const :adapter, T.nilable(String), default: nil
- 2
const :bind_params, T.nilable(T::Array[T.untyped]), default: nil
- 2
const :database_name, T.nilable(String), default: nil
- 2
const :connection_pool_size, T.nilable(Integer), default: nil
- 2
const :active_connections, T.nilable(Integer), default: nil
- 2
const :operation_type, T.nilable(String), default: nil
- 2
const :table_names, T.nilable(T::Array[String]), default: nil
# Additional data
- 2
include LogStruct::Log::Interfaces::AdditionalDataField
- 2
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
- 2
include LogStruct::Log::Shared::MergeAdditionalDataFields
# Serialize shared fields
- 2
include LogStruct::Log::Interfaces::CommonFields
- 2
include LogStruct::Log::Shared::SerializeCommon
- 3
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
- 2
def to_h
- 10
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
- 10
h[LogField::Message] = message
- 10
h[LogField::Sql] = sql
- 10
h[LogField::Name] = name
- 10
h[LogField::DurationMs] = duration_ms
- 10
h[LogField::RowCount] = row_count unless row_count.nil?
- 10
h[LogField::Adapter] = adapter unless adapter.nil?
- 10
h[LogField::BindParams] = bind_params unless bind_params.nil?
- 10
h[LogField::DatabaseName] = database_name unless database_name.nil?
- 10
h[LogField::ConnectionPoolSize] = connection_pool_size unless connection_pool_size.nil?
- 10
h[LogField::ActiveConnections] = active_connections unless active_connections.nil?
- 10
h[LogField::OperationType] = operation_type unless operation_type.nil?
- 10
h[LogField::TableNames] = table_names unless table_names.nil?
- 10
h
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "active_support/tagged_logging"
# Monkey-patch ActiveSupport::TaggedLogging::Formatter to support hash inputs
# This allows us to pass structured data to the logger and have tags incorporated
# directly into the hash instead of being prepended as strings
- 2
module ActiveSupport
- 2
module TaggedLogging
- 2
extend T::Sig
# Add class-level current_tags method for compatibility with Rails code
# that expects to call ActiveSupport::TaggedLogging.current_tags
# Use thread-local storage directly like Rails does internally
- 3
sig { returns(T::Array[T.any(String, Symbol)]) }
- 2
def self.current_tags
- 10
Thread.current[:activesupport_tagged_logging_tags] || []
end
- 2
module FormatterExtension
- 2
extend T::Sig
- 2
extend T::Helpers
- 2
requires_ancestor { ::ActiveSupport::TaggedLogging::Formatter }
# Override the call method to support hash input/output, and wrap
# plain strings in a Hash under a `msg` key.
# The data is then passed to our custom log formatter that transforms it
# into a JSON string before logging.
- 2
sig { params(severity: T.any(String, Symbol), time: Time, progname: T.untyped, data: T.untyped).returns(String) }
- 2
def call(severity, time, progname, data)
# Convert data to a hash if it's not already one
data = {message: data.to_s} unless data.is_a?(Hash)
# Add current tags to the hash if present
# Use thread-local storage directly as fallback if current_tags method doesn't exist
tags = T.unsafe(self).respond_to?(:current_tags) ? current_tags : (Thread.current[:activesupport_tagged_logging_tags] || [])
data[:tags] = tags if tags.present?
# Call the original formatter with our enhanced data
super
end
end
end
end
- 2
ActiveSupport::TaggedLogging::Formatter.prepend(ActiveSupport::TaggedLogging::FormatterExtension)
# typed: strict
# frozen_string_literal: true
- 2
require_relative "enums/error_reporter"
- 2
require_relative "handlers"
# Try to require all supported error reporting libraries
# Users may have multiple installed, so we should load all of them
- 2
%w[sentry-ruby bugsnag rollbar honeybadger].each do |gem_name|
- 8
require gem_name
rescue LoadError
# If a particular gem is not available, we'll still load the others
end
- 2
module LogStruct
# MultiErrorReporter provides a unified interface for reporting errors to various services.
# You can also override this with your own error reporter by setting
# LogStruct#.config.error_reporting_handler
# NOTE: This is used for cases where an error should be reported
# but the operation should be allowed to continue (e.g. scrubbing log data.)
- 2
class MultiErrorReporter
# Class variable to store the selected reporter
- 2
class CallableReporterWrapper
- 2
extend T::Sig
- 3
sig { params(callable: T.untyped).void }
- 2
def initialize(callable)
- 2
@callable = callable
end
- 3
sig { returns(T.untyped) }
- 2
attr_reader :callable
- 2
alias_method :original, :callable
- 3
sig { params(error: StandardError, context: T.nilable(T::Hash[Symbol, T.untyped]), source: Source).void }
- 2
def call(error, context, source)
- 2
case callable_arity
when 3
- 1
callable.call(error, context, source)
when 2
- 1
callable.call(error, context)
when 1
callable.call(error)
else
callable.call(error, context, source)
end
end
- 2
private
- 3
sig { returns(Integer) }
- 2
def callable_arity
- 2
callable.respond_to?(:arity) ? callable.arity : -1
end
end
- 4
ReporterImpl = T.type_alias { T.any(ErrorReporter, CallableReporterWrapper) }
- 2
@reporter_impl = T.let(nil, T.nilable(ReporterImpl))
- 2
class << self
- 2
extend T::Sig
- 3
sig { returns(ReporterImpl) }
- 2
def reporter
- 7
reporter_impl
end
# Set the reporter to use (user-friendly API that accepts symbols)
- 3
sig { params(reporter_type: T.any(ErrorReporter, Symbol, Handlers::ErrorReporter)).returns(ReporterImpl) }
- 2
def reporter=(reporter_type)
- 7
@reporter_impl = case reporter_type
when ErrorReporter
reporter_type
when Symbol
- 5
resolve_symbol_reporter(reporter_type)
else
- 2
wrap_callable_reporter(reporter_type)
end
end
# Auto-detect which error reporting service to use
- 3
sig { returns(ErrorReporter) }
- 2
def detect_reporter
- 1
if defined?(::Sentry)
- 1
ErrorReporter::Sentry
elsif defined?(::Bugsnag)
ErrorReporter::Bugsnag
elsif defined?(::Rollbar)
ErrorReporter::Rollbar
elsif defined?(::Honeybadger)
ErrorReporter::Honeybadger
else
ErrorReporter::RailsLogger
end
end
# Report an error to the configured error reporting service
- 3
sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
- 2
def report_error(error, context = {})
# Call the appropriate reporter method based on what's available
- 8
impl = reporter_impl
- 8
case impl
when ErrorReporter::Sentry
- 2
report_to_sentry(error, context)
when ErrorReporter::Bugsnag
- 1
report_to_bugsnag(error, context)
when ErrorReporter::Rollbar
- 1
report_to_rollbar(error, context)
when ErrorReporter::Honeybadger
- 1
report_to_honeybadger(error, context)
when ErrorReporter::RailsLogger
- 1
fallback_logging(error, context)
when CallableReporterWrapper
- 2
impl.call(error, context, Source::Internal)
end
end
- 2
private
- 3
sig { returns(ReporterImpl) }
- 2
def reporter_impl
- 15
@reporter_impl ||= detect_reporter
end
- 3
sig { params(symbol: Symbol).returns(ErrorReporter) }
- 2
def resolve_symbol_reporter(symbol)
- 5
case symbol
- 1
when :sentry then ErrorReporter::Sentry
- 1
when :bugsnag then ErrorReporter::Bugsnag
- 1
when :rollbar then ErrorReporter::Rollbar
- 1
when :honeybadger then ErrorReporter::Honeybadger
- 1
when :rails_logger then ErrorReporter::RailsLogger
else
valid_types = ErrorReporter.values.map { |v| ":#{v.serialize}" }.join(", ")
raise ArgumentError, "Unknown reporter type: #{symbol}. Valid types are: #{valid_types}"
end
end
- 3
sig { params(callable: T.untyped).returns(CallableReporterWrapper) }
- 2
def wrap_callable_reporter(callable)
- 2
unless callable.respond_to?(:call)
raise ArgumentError, "Reporter must respond to #call"
end
- 2
CallableReporterWrapper.new(callable)
end
# Report to Sentry
- 3
sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
- 2
def report_to_sentry(error, context = {})
- 2
return unless defined?(::Sentry)
# Use the proper Sentry interface defined in the RBI
- 2
::Sentry.capture_exception(error, extra: context)
rescue => e
- 1
fallback_logging(e, {original_error: error.class.to_s})
end
# Report to Bugsnag
- 3
sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
- 2
def report_to_bugsnag(error, context = {})
- 1
return unless defined?(::Bugsnag)
- 1
::Bugsnag.notify(error) do |report|
- 1
report.add_metadata(:context, context)
end
rescue => e
fallback_logging(e, {original_error: error.class.to_s})
end
# Report to Rollbar
- 3
sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
- 2
def report_to_rollbar(error, context = {})
- 1
return unless defined?(::Rollbar)
- 1
::Rollbar.error(error, context)
rescue => e
fallback_logging(e, {original_error: error.class.to_s})
end
# Report to Honeybadger
- 3
sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
- 2
def report_to_honeybadger(error, context = {})
- 1
return unless defined?(::Honeybadger)
- 1
::Honeybadger.notify(error, context: context)
rescue => e
fallback_logging(e, {original_error: error.class.to_s})
end
# Fallback logging when no error reporting services are available
# Uses the LogStruct.error method to properly log the error
- 3
sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
- 2
def fallback_logging(error, context = {})
- 2
return if error.nil?
# Create a proper error log entry
- 2
error_log = Log.from_exception(Source::Internal, error, context)
# Use LogStruct.error to properly log the error
- 2
LogStruct.error(error_log)
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "digest"
- 2
require_relative "hash_utils"
- 2
require_relative "config_struct/filters"
- 2
require_relative "enums/source"
- 2
module LogStruct
# This class contains methods for filtering sensitive data in logs
# It is used by Formatter to determine which keys should be filtered
- 2
class ParamFilters
- 2
class << self
- 2
extend T::Sig
# Check if a key should be filtered based on our defined sensitive keys
- 4
sig { params(key: T.untyped, value: T.untyped).returns(T::Boolean) }
- 2
def should_filter_key?(key, value = nil)
- 4585
filters = LogStruct.config.filters
- 4585
normalized_key = key.to_s
- 4585
normalized_symbol = normalized_key.downcase.to_sym
- 4585
return true if filters.filter_keys.include?(normalized_symbol)
- 4580
filters.filter_matchers.any? do |matcher|
- 3036
matcher.matches?(normalized_key, value)
rescue => e
handle_filter_matcher_error(e, matcher, normalized_key)
false
end
end
# Check if a key should be hashed rather than completely filtered
- 3
sig { params(key: T.untyped).returns(T::Boolean) }
- 2
def should_include_string_hash?(key)
- 6
LogStruct.config.filters.filter_keys_with_hashes.include?(key.to_s.downcase.to_sym)
end
# Convert a value to a filtered summary hash (e.g. { _filtered: { class: "String", ... }})
- 3
sig { params(key: T.untyped, data: T.untyped).returns(T::Hash[Symbol, T.untyped]) }
- 2
def summarize_json_attribute(key, data)
- 7
case data
when Hash
- 1
summarize_hash(data)
when Array
- 1
summarize_array(data)
when String
- 4
summarize_string(data, should_include_string_hash?(key))
else
- 1
{_class: data.class}
end
end
# Summarize a String for logging, including details and an SHA256 hash (if configured)
- 3
sig { params(string: String, include_hash: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
- 2
def summarize_string(string, include_hash)
filtered_string = {
- 6
_class: String
}
- 6
if include_hash
- 2
filtered_string[:_hash] = HashUtils.hash_value(string)
else
- 4
filtered_string[:_bytes] = string.bytesize
end
- 6
filtered_string
end
# Summarize a Hash for logging, including details about the size and keys
- 3
sig { params(hash: T::Hash[T.untyped, T.untyped]).returns(T::Hash[Symbol, T.untyped]) }
- 2
def summarize_hash(hash)
- 4
return {_class: "Hash", _empty: true} if hash.empty?
# Don't include byte size if hash contains any filtered keys
- 4
has_sensitive_keys = T.let(false, T::Boolean)
- 4
normalized_keys = []
- 4
hash.each do |key, value|
- 6
has_sensitive_keys ||= should_filter_key?(key, value)
- 6
normalized_keys << normalize_summary_key(key)
end
summary = {
- 4
_class: Hash,
_keys_count: hash.keys.size,
_keys: normalized_keys.take(10)
}
# Only add byte size if no sensitive keys are present
- 4
summary[:_bytes] = hash.to_json.bytesize unless has_sensitive_keys
- 4
summary
end
# Summarize an Array for logging, including details about the size and items
- 3
sig { params(array: T::Array[T.untyped]).returns(T::Hash[Symbol, T.untyped]) }
- 2
def summarize_array(array)
- 3
return {_class: "Array", _empty: true} if array.empty?
{
- 2
_class: Array,
_count: array.size,
_bytes: array.to_json.bytesize
}
end
- 2
private
- 3
sig { params(key: T.any(String, Symbol, Integer, T.untyped)).returns(T.any(Symbol, String)) }
- 2
def normalize_summary_key(key)
- 6
if key.is_a?(Symbol)
- 5
key
- 1
elsif key.respond_to?(:to_sym)
key.to_sym
else
- 1
key.to_s
end
rescue
key.to_s
end
- 2
sig { params(error: StandardError, matcher: ConfigStruct::FilterMatcher, key: String).void }
- 2
def handle_filter_matcher_error(error, matcher, key)
context = {
matcher: matcher.label,
key: key
}
LogStruct.handle_exception(error, source: Source::Internal, context: context)
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "sorbet-runtime"
- 2
module LogStruct
- 2
module RailsBootBannerSilencer
- 2
extend T::Sig
- 2
@installed = T.let(false, T::Boolean)
- 4
sig { void }
- 2
def self.install!
- 3
return if @installed
- 3
@installed = true
- 3
return unless ARGV.include?("server")
patch!
end
- 3
sig { returns(T::Boolean) }
- 2
def self.patch!
begin
- 2
require "rails/command"
- 1
require "rails/commands/server/server_command"
rescue LoadError
# Best-effort – if Rails isn't available yet we'll try again later
- 1
return false
end
- 1
server_command = T.let(nil, T.untyped)
# rubocop:disable Sorbet/ConstantsFromStrings
begin
- 1
server_command = ::Object.const_get("Rails::Command::ServerCommand")
rescue NameError
server_command = nil
end
# rubocop:enable Sorbet/ConstantsFromStrings
- 1
return false unless server_command
- 1
patch_server_command(server_command)
- 1
true
end
- 3
sig { params(server_command: T.untyped).void }
- 2
def self.patch_server_command(server_command)
- 6
return if server_command <= ServerCommandSilencer
- 5
server_command.prepend(ServerCommandSilencer)
end
- 2
module ServerCommandSilencer
- 2
extend T::Sig
- 3
sig { params(args: T.untyped, block: T.nilable(T.proc.returns(T.untyped))).returns(T.untyped) }
- 2
def perform(*args, &block)
- 1
::LogStruct.server_mode = true
- 1
super
end
- 3
sig { params(server: T.untyped, url: T.nilable(String)).void }
- 2
def print_boot_information(server, url)
- 2
::LogStruct.server_mode = true
- 2
consume_boot_banner(server, url)
end
- 2
private
- 3
sig { params(server: T.untyped, url: T.nilable(String)).void }
- 2
def consume_boot_banner(server, url)
- 2
return unless defined?(::LogStruct::Integrations::Puma)
begin
- 2
::LogStruct::Integrations::Puma.emit_boot_if_needed!
rescue => e
::LogStruct::Integrations::Puma.handle_integration_error(e)
end
begin
- 2
model = ::ActiveSupport::Inflector.demodulize(server)
rescue
- 1
model = "Puma"
end
lines = [
- 2
"=> Booting #{model}",
build_rails_banner_line(url),
"=> Run `#{lookup_executable} --help` for more startup options"
]
- 2
lines.each do |line|
- 6
::LogStruct::Integrations::Puma.process_line(line)
rescue => e
::LogStruct::Integrations::Puma.handle_integration_error(e)
end
end
- 3
sig { params(url: T.nilable(String)).returns(String) }
- 2
def build_rails_banner_line(url)
- 2
suffix = url ? " #{url}" : ""
- 2
"=> Rails #{::Rails.version} application starting in #{::Rails.env}#{suffix}"
rescue
- 1
"=> Rails application starting"
end
- 3
sig { returns(String) }
- 2
def lookup_executable
- 3
return "rails" unless T.unsafe(self).respond_to?(:executable, true)
- 2
T.cast(T.unsafe(self).send(:executable), String)
rescue
- 1
"rails"
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "rails"
- 2
require "semantic_logger"
- 2
require_relative "formatter"
- 2
require_relative "semantic_logger/setup"
- 2
require_relative "integrations"
- 2
module LogStruct
# Railtie to integrate with Rails
- 2
class Railtie < ::Rails::Railtie
# Configure early, right after logger initialization
- 2
initializer "logstruct.configure_logger", after: :initialize_logger do |app|
- 2
next unless LogStruct.enabled?
# Use SemanticLogger for powerful logging features
- 2
LogStruct::SemanticLogger::Setup.configure_semantic_logger(app)
end
# Setup all integrations after logger setup is complete
- 2
initializer "logstruct.setup", before: :build_middleware_stack do |app|
- 2
next unless LogStruct.enabled?
# Merge Rails filter parameters into our filters
- 2
LogStruct.merge_rails_filter_parameters!
# Set up non-middleware integrations first
- 2
Integrations.setup_integrations(stage: :non_middleware)
# Note: Host allowances are managed by the test app itself.
end
# Setup middleware integrations during Rails configuration (before middleware stack is built)
# Must be done in the Railtie class body, not in an initializer
- 2
initializer "logstruct.configure_middleware", before: :build_middleware_stack do |app|
# This runs before middleware stack is frozen, so we can configure it
- 2
next unless LogStruct.enabled?
- 2
Integrations.setup_integrations(stage: :middleware)
end
# Emit Puma lifecycle logs when running `rails server`
- 2
initializer "logstruct.puma_lifecycle", after: "logstruct.configure_logger" do
- 2
is_server = ::LogStruct.server_mode?
- 2
next unless is_server
begin
require "log_struct/log/puma"
port = T.let(nil, T.nilable(String))
ARGV.each_with_index do |arg, idx|
if arg == "-p" || arg == "--port"
port = ARGV[idx + 1]
break
elsif arg.start_with?("--port=")
port = arg.split("=", 2)[1]
break
end
end
started = LogStruct::Log::Puma::Start.new(
mode: "single",
environment: (defined?(::Rails) && ::Rails.respond_to?(:env)) ? ::Rails.env : nil,
process_id: Process.pid,
listening_addresses: port ? ["tcp://127.0.0.1:#{port}"] : nil
)
begin
warn("[logstruct] puma lifecycle init")
rescue
end
LogStruct.info(started)
at_exit do
shutdown = LogStruct::Log::Puma::Shutdown.new(
process_id: Process.pid
)
LogStruct.info(shutdown)
end
rescue
# best-effort
end
end
# Delegate integration initializers to Integrations module
- 2
LogStruct::Integrations.setup_initializers(self)
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "semantic_logger"
- 2
require_relative "formatter"
- 2
module LogStruct
- 2
module SemanticLogger
# Development-Optimized Colorized JSON Formatter
#
# This formatter extends SemanticLogger's Color formatter to provide beautiful,
# readable JSON output in development environments. It significantly improves
# the developer experience when working with structured logs.
#
# ## Benefits of Colorized Output:
#
# ### Readability
# - **Syntax highlighting**: JSON keys, values, and data types are color-coded
# - **Visual hierarchy**: Different colors help identify structure at a glance
# - **Error spotting**: Quickly identify malformed data or unexpected values
# - **Context separation**: Log entries are visually distinct from each other
#
# ### Performance in Development
# - **Faster debugging**: Quickly scan logs without reading every character
# - **Pattern recognition**: Colors help identify common log patterns
# - **Reduced cognitive load**: Less mental effort required to parse log output
# - **Improved workflow**: Spend less time reading logs, more time coding
#
# ### Customization
# - **Configurable colors**: Customize colors for keys, strings, numbers, etc.
# - **Environment-aware**: Automatically disabled in production/CI environments
# - **Fallback support**: Gracefully falls back to standard formatting if needed
#
# ## Color Mapping:
# - **Keys**: Yellow - Easy to spot field names
# - **Strings**: Green - Clear indication of text values
# - **Numbers**: Blue - Numeric values stand out
# - **Booleans**: Magenta - true/false values are distinctive
# - **Null**: Red - Missing values are immediately visible
# - **Logger names**: Cyan - Source identification
#
# ## Integration with SemanticLogger:
# This formatter preserves all SemanticLogger benefits (performance, threading,
# reliability) while adding visual enhancements. It processes LogStruct types,
# hashes, and plain messages with appropriate colorization.
#
# The formatter is automatically enabled in development when `enable_color_output`
# is true (default), providing zero-configuration enhanced logging experience.
- 2
class ColorFormatter < ::SemanticLogger::Formatters::Color
- 2
extend T::Sig
- 3
sig { params(color_map: T.nilable(T::Hash[Symbol, Symbol]), args: T.untyped).void }
- 2
def initialize(color_map: nil, **args)
- 8
super(**args)
- 8
@logstruct_formatter = T.let(LogStruct::Formatter.new, LogStruct::Formatter)
# Set up custom color mapping
- 8
@custom_colors = T.let(color_map || default_color_map, T::Hash[Symbol, Symbol])
end
- 3
sig { override.params(log: ::SemanticLogger::Log, logger: T.untyped).returns(String) }
- 2
def call(log, logger)
# Handle LogStruct types specially with colorization
- 186
if log.payload.is_a?(LogStruct::Log::Interfaces::CommonFields)
# Get the LogStruct formatted JSON
logstruct_json = @logstruct_formatter.call(log.level, log.time, log.name, log.payload)
# Parse and colorize it
begin
parsed_data = T.let(JSON.parse(logstruct_json), T::Hash[String, T.untyped])
colorized_json = colorize_json(parsed_data)
# Use SemanticLogger's prefix formatting but with our colorized content
prefix = format("%<time>s %<level>s [%<process>s] %<name>s -- ",
time: format_time(log.time),
level: format_level(log.level),
process: log.process_info,
name: format_name(log.name))
"#{prefix}#{colorized_json}\n"
rescue JSON::ParserError
# Fallback to standard formatting
super
end
- 186
elsif log.payload.is_a?(Hash) || log.payload.is_a?(T::Struct)
# Process hashes through our formatter then colorize
begin
- 16
logstruct_json = @logstruct_formatter.call(log.level, log.time, log.name, log.payload)
- 16
parsed_data = T.let(JSON.parse(logstruct_json), T::Hash[String, T.untyped])
- 16
colorized_json = colorize_json(parsed_data)
- 16
prefix = format("%<time>s %<level>s [%<process>s] %<name>s -- ",
time: format_time(log.time),
level: format_level(log.level),
process: log.process_info,
name: format_name(log.name))
- 16
"#{prefix}#{colorized_json}\n"
rescue JSON::ParserError
# Fallback to standard formatting
super
end
else
# For plain messages, use SemanticLogger's default colorization
- 170
super
end
end
- 2
private
- 2
sig { returns(LogStruct::Formatter) }
- 2
attr_reader :logstruct_formatter
# Default color mapping for LogStruct JSON
- 3
sig { returns(T::Hash[Symbol, Symbol]) }
- 2
def default_color_map
- 7
{
key: :yellow,
string: :green,
number: :blue,
bool: :magenta,
nil: :red,
name: :cyan
}
end
# Simple JSON colorizer that adds ANSI codes
- 3
sig { params(data: T::Hash[String, T.untyped]).returns(String) }
- 2
def colorize_json(data)
# For now, just return a simple colorized version of the JSON
# This is much simpler than the full recursive approach
- 16
json_str = JSON.pretty_generate(data)
# Apply basic colorization with regex
- 16
json_str.gsub(/"([^"]+)":/, colorize_text('\1', :key) + ":")
.gsub(/: "([^"]*)"/, ": " + colorize_text('\1', :string))
.gsub(/: (\d+\.?\d*)/, ": " + colorize_text('\1', :number))
.gsub(/: (true|false)/, ": " + colorize_text('\1', :bool))
.gsub(": null", ": " + colorize_text("null", :nil))
end
# Add ANSI color codes to text
- 3
sig { params(text: String, color_type: Symbol).returns(String) }
- 2
def colorize_text(text, color_type)
- 80
color = @custom_colors[color_type] || :white
- 80
"\e[#{color_code_for(color)}m#{text}\e[0m"
end
# Format timestamp
- 3
sig { params(time: Time).returns(String) }
- 2
def format_time(time)
- 186
time.strftime("%Y-%m-%d %H:%M:%S.%6N")
end
# Format log level with color
- 3
sig { params(level: T.any(String, Symbol)).returns(String) }
- 2
def format_level(level)
- 16
level_str = level.to_s.upcase[0]
- 16
color = level_color_for(level.to_sym)
- 16
"\e[#{color_code_for(color)}m#{level_str}\e[0m"
end
# Format logger name with color
- 3
sig { params(name: T.nilable(String)).returns(String) }
- 2
def format_name(name)
- 16
return "" unless name
- 16
color = @custom_colors[:name] || :cyan
- 16
"\e[#{color_code_for(color)}m#{name}\e[0m"
end
# Get color for log level
- 3
sig { params(level: Symbol).returns(Symbol) }
- 2
def level_color_for(level)
- 16
case level
- 1
when :debug then :magenta
- 10
when :info then :cyan
- 1
when :warn then :yellow
- 3
when :error then :red
- 1
when :fatal then :red
else :cyan
end
end
# Get ANSI color code for color symbol
- 3
sig { params(color: Symbol).returns(String) }
- 2
def color_code_for(color)
- 112
case color
when :black then "30"
- 20
when :red then "31"
- 16
when :green then "32"
- 16
when :yellow then "33"
- 16
when :blue then "34"
- 16
when :magenta then "35"
- 26
when :cyan then "36"
- 2
when :white then "37"
when :bright_black then "90"
when :bright_red then "91"
when :bright_green then "92"
when :bright_yellow then "93"
when :bright_blue then "94"
when :bright_magenta then "95"
when :bright_cyan then "96"
when :bright_white then "97"
else "37" # default to white
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
- 2
module SemanticLogger
- 2
module Concerns
- 2
module LogMethods
- 2
extend T::Sig
- 2
extend T::Helpers
- 2
requires_ancestor { LogStruct::SemanticLogger::Logger }
# Override log methods to handle LogStruct types and broadcast
- 4
sig { params(message: T.untyped, payload: T.untyped, block: T.nilable(T.proc.returns(String))).returns(T::Boolean) }
- 2
def debug(message = nil, payload = nil, &block)
- 7
instrument_log(message, :debug)
- 7
result = if message.is_a?(LogStruct::Log::Interfaces::CommonFields) || message.is_a?(T::Struct) || message.is_a?(Hash)
- 2
super(nil, payload: message, &block)
else
- 5
super
end
- 7
broadcasts.each do |logger|
- 1
next unless logger.respond_to?(:debug)
- 1
message.is_a?(String) ? logger.debug(message) : (logger.debug(&block) if block)
end
- 7
result
end
- 4
sig { params(message: T.untyped, payload: T.untyped, block: T.nilable(T.proc.returns(String))).returns(T::Boolean) }
- 2
def info(message = nil, payload = nil, &block)
- 636
instrument_log(message, :info)
- 636
result = if message.is_a?(LogStruct::Log::Interfaces::CommonFields) || message.is_a?(T::Struct) || message.is_a?(Hash)
- 29
super(nil, payload: message, &block)
else
- 607
super
end
- 636
broadcasts.each do |logger|
- 3
next unless logger.respond_to?(:info)
- 3
message.is_a?(String) ? logger.info(message) : (logger.info(&block) if block)
end
- 636
result
end
- 3
sig { params(message: T.untyped, payload: T.untyped, block: T.nilable(T.proc.returns(String))).returns(T::Boolean) }
- 2
def warn(message = nil, payload = nil, &block)
- 3
instrument_log(message, :warn)
- 3
result = if message.is_a?(LogStruct::Log::Interfaces::CommonFields) || message.is_a?(T::Struct) || message.is_a?(Hash)
- 2
super(nil, payload: message, &block)
else
- 1
super
end
- 3
broadcasts.each do |logger|
- 1
next unless logger.respond_to?(:warn)
- 1
message.is_a?(String) ? logger.warn(message) : (logger.warn(&block) if block)
end
- 3
result
end
- 3
sig { params(message: T.untyped, payload: T.untyped, block: T.nilable(T.proc.returns(String))).returns(T::Boolean) }
- 2
def error(message = nil, payload = nil, &block)
- 10
instrument_log(message, :error)
- 10
result = if message.is_a?(LogStruct::Log::Interfaces::CommonFields) || message.is_a?(T::Struct) || message.is_a?(Hash)
- 8
super(nil, payload: message, &block)
else
- 2
super
end
- 10
broadcasts.each do |logger|
- 1
next unless logger.respond_to?(:error)
- 1
message.is_a?(String) ? logger.error(message) : (logger.error(&block) if block)
end
- 10
result
end
- 3
sig { params(message: T.untyped, payload: T.untyped, block: T.nilable(T.proc.returns(String))).returns(T::Boolean) }
- 2
def fatal(message = nil, payload = nil, &block)
- 2
instrument_log(message, :fatal)
- 2
result = if message.is_a?(LogStruct::Log::Interfaces::CommonFields) || message.is_a?(T::Struct) || message.is_a?(Hash)
- 2
super(nil, payload: message, &block)
else
super
end
- 2
broadcasts.each do |logger|
next unless logger.respond_to?(:fatal)
message.is_a?(String) ? logger.fatal(message) : (logger.fatal(&block) if block)
end
- 2
result
end
- 2
private
# Instrument log events for subscribers
- 4
sig { params(message: T.untyped, level: Symbol).void }
- 2
def instrument_log(message, level)
- 658
return unless message.is_a?(LogStruct::Log::Interfaces::CommonFields) || message.is_a?(T::Struct)
- 36
::ActiveSupport::Notifications.instrument("log.logstruct", log: message, level: level)
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "semantic_logger"
- 2
require_relative "../formatter"
- 2
module LogStruct
- 2
module SemanticLogger
# High-Performance JSON Formatter with LogStruct Integration
#
# This formatter extends SemanticLogger's JSON formatter to provide optimal
# JSON serialization performance while preserving all LogStruct features
# including data filtering, sensitive data scrubbing, and type-safe structures.
#
# ## Performance Advantages Over Rails Logger:
#
# ### Serialization Performance
# - **Direct JSON generation**: Bypasses intermediate object creation
# - **Streaming serialization**: Memory-efficient processing of large objects
# - **Type-optimized paths**: Fast serialization for common data types
# - **Zero-copy operations**: Minimal memory allocation during serialization
#
# ### Memory Efficiency
# - **Object reuse**: Formatter instances are reused across log calls
# - **Lazy evaluation**: Only processes data that will be included in output
# - **Efficient buffering**: Optimal buffer sizes for JSON generation
# - **Garbage collection friendly**: Minimal object allocation reduces GC pressure
#
# ### Integration Benefits
# - **LogStruct compatibility**: Native support for typed log structures
# - **Filter preservation**: Maintains all LogStruct filtering capabilities
# - **Scrubbing integration**: Seamless sensitive data scrubbing
# - **Error handling**: Robust handling of serialization errors
#
# ## Feature Preservation:
# This formatter maintains full compatibility with LogStruct's features:
# - Sensitive data filtering (passwords, tokens, etc.)
# - Recursive object scrubbing and processing
# - Type-safe log structure handling
# - Custom field transformations
# - Metadata preservation and enrichment
#
# ## JSON Output Structure:
# The formatter produces consistent, parseable JSON that includes:
# - Standard log fields (timestamp, level, message, logger name)
# - LogStruct-specific fields (source, event, context)
# - SemanticLogger metadata (process ID, thread ID, tags)
# - Application-specific payload data
#
# This combination provides the performance benefits of SemanticLogger with
# the structured data benefits of LogStruct, resulting in faster, more
# reliable logging for high-traffic applications.
- 2
class Formatter < ::SemanticLogger::Formatters::Json
- 2
extend T::Sig
- 4
sig { void }
- 2
def initialize
- 42
super
- 42
@logstruct_formatter = T.let(LogStruct::Formatter.new, LogStruct::Formatter)
end
- 4
sig { params(log: ::SemanticLogger::Log, logger: T.untyped).returns(String) }
- 2
def call(log, logger)
# Handle LogStruct types specially - they get wrapped in payload hash by SemanticLogger
- 847
json = if log.payload.is_a?(Hash) && log.payload[:payload].is_a?(LogStruct::Log::Interfaces::CommonFields)
# Use our formatter to process LogStruct types
- 53
@logstruct_formatter.call(log.level, log.time, log.name, log.payload[:payload])
- 794
elsif log.payload.is_a?(LogStruct::Log::Interfaces::CommonFields)
# Direct LogStruct (fallback case)
@logstruct_formatter.call(log.level, log.time, log.name, log.payload)
- 794
elsif log.payload.is_a?(Hash) && log.payload[:payload].is_a?(T::Struct)
# T::Struct wrapped in payload hash
- 2
@logstruct_formatter.call(log.level, log.time, log.name, log.payload[:payload])
- 792
elsif log.payload.is_a?(Hash) || log.payload.is_a?(T::Struct)
# Process hashes and T::Structs through our formatter
- 7
@logstruct_formatter.call(log.level, log.time, log.name, log.payload)
else
# For plain messages, create a Plain log entry
- 785
message_data = log.payload || log.message
- 785
plain_log = ::LogStruct::Log::Plain.new(
message: message_data,
timestamp: log.time
)
- 785
@logstruct_formatter.call(log.level, log.time, log.name, plain_log)
end
# SemanticLogger appenders typically add their own newline. Avoid double newlines by stripping ours.
- 847
json.end_with?("\n") ? json.chomp : json
end
- 2
private
- 2
sig { returns(LogStruct::Formatter) }
- 2
attr_reader :logstruct_formatter
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "semantic_logger"
- 2
require_relative "concerns/log_methods"
- 2
module LogStruct
- 2
module SemanticLogger
# High-Performance Logger with LogStruct Integration
#
# This logger extends SemanticLogger::Logger to provide optimal logging performance
# while seamlessly integrating with LogStruct's typed logging system.
#
# ## Key Benefits Over Rails.logger:
#
# ### Performance
# - **10-100x faster** than Rails' default logger for high-volume applications
# - **Non-blocking I/O**: Uses background threads for actual log writes
# - **Minimal memory allocation**: Efficient object reuse and zero-copy operations
# - **Batched writes**: Reduces system calls by batching multiple log entries
#
# ### Reliability
# - **Thread-safe operations**: Safe for use in multi-threaded environments
# - **Error resilience**: Logger failures don't crash your application
# - **Graceful fallbacks**: Continues operating even if appenders fail
#
# ### Features
# - **Structured logging**: Native support for LogStruct types and hashes
# - **Rich metadata**: Automatic inclusion of process ID, thread ID, timestamps
# - **Tagged context**: Hierarchical tagging for request/job tracking
# - **Multiple destinations**: Simultaneously log to files, STDOUT, cloud services
#
# ### Development Experience
# - **Colorized output**: Beautiful ANSI-colored logs in development
# - **Detailed timing**: Built-in measurement of log processing time
# - **Context preservation**: Maintains Rails.logger compatibility
#
# ## Usage Examples
#
# The logger automatically handles LogStruct types, hashes, and plain messages:
#
# ```ruby
# logger = LogStruct::SemanticLogger::Logger.new("MyApp")
#
# # LogStruct typed logging (optimal performance)
# log_entry = LogStruct::Log::Plain.new(
# message: "User authenticated",
# source: LogStruct::Source::App,
# event: LogStruct::Event::Security
# )
# logger.info(log_entry)
#
# # Hash logging (automatically structured)
# logger.info({
# action: "user_login",
# user_id: 123,
# ip_address: "192.168.1.1"
# })
#
# # Plain string logging (backward compatibility)
# logger.info("User logged in successfully")
# ```
#
# The logger is a drop-in replacement for Rails.logger and maintains full
# API compatibility while providing significantly enhanced performance.
- 2
class Logger < ::SemanticLogger::Logger
- 2
extend T::Sig
- 4
sig { params(name: T.any(String, Symbol, Module, T::Class[T.anything]), level: T.nilable(Symbol), filter: T.untyped).void }
- 2
def initialize(name = "Application", level: nil, filter: nil)
# SemanticLogger::Logger expects positional arguments, not named arguments
- 42
super(name, level, filter)
# T.untyped because users can pass any logger: ::Logger, ActiveSupport::Logger,
# custom loggers (FakeLogger in tests), or third-party loggers
- 42
@broadcasts = T.let([], T::Array[T.untyped])
# ActiveJob expects logger.formatter to exist and respond to current_tags
- 42
@formatter = T.let(FormatterProxy.new, FormatterProxy)
end
# ActiveSupport::BroadcastLogger compatibility
# These methods allow Rails.logger to broadcast to multiple loggers
- 4
sig { returns(T::Array[T.untyped]) }
- 2
attr_reader :broadcasts
# ActiveJob compatibility - expects logger.formatter.current_tags
- 4
sig { returns(FormatterProxy) }
- 2
attr_reader :formatter
# T.untyped for logger param because we accept any logger-like object:
# ::Logger, ActiveSupport::Logger, test doubles, etc.
- 3
sig { params(logger: T.untyped).returns(T.untyped) }
- 2
def broadcast_to(logger)
- 6
@broadcasts << logger
- 6
logger
end
- 3
sig { params(logger: T.untyped).void }
- 2
def stop_broadcasting_to(logger)
- 1
@broadcasts.delete(logger)
end
- 2
include Concerns::LogMethods
# Support for tagged logging
- 3
sig { params(tags: T.untyped, block: T.proc.returns(T.untyped)).returns(T.untyped) }
- 2
def tagged(*tags, &block)
# Convert tags to array and pass individually to avoid splat issues
- 1
tag_array = tags.flatten
- 1
if tag_array.empty?
super(&block)
else
- 1
super(*T.unsafe(tag_array), &block)
end
end
# Ensure compatibility with Rails.logger interface
- 2
sig { returns(T::Array[T.any(String, Symbol)]) }
- 2
def current_tags
::SemanticLogger.tags
end
- 2
sig { void }
- 2
def clear_tags!
# SemanticLogger doesn't have clear_tags!, use pop_tags instead
count = ::SemanticLogger.tags.length
::SemanticLogger.pop_tags(count) if count > 0
end
- 2
sig { params(tags: T.untyped).returns(T::Array[T.untyped]) }
- 2
def push_tags(*tags)
flat = tags.flatten.compact
flat.each { |tag| ::SemanticLogger.push_tags(tag) }
flat
end
- 2
sig { params(count: Integer).void }
- 2
def pop_tags(count = 1)
::SemanticLogger.pop_tags(count)
end
# Support for << operator (used by RailsLogSplitter)
- 3
sig { params(msg: String).returns(T.self_type) }
- 2
def <<(msg)
- 1
info(msg)
- 2
@broadcasts.each { |logger| logger << msg if logger.respond_to?(:<<) }
- 1
self
end
end
# Proxy object to provide ActiveJob-compatible formatter interface
- 2
class FormatterProxy
- 2
extend T::Sig
- 2
sig { returns(T::Array[T.any(String, Symbol)]) }
- 2
def current_tags
Thread.current[:activesupport_tagged_logging_tags] || []
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "semantic_logger"
- 2
require_relative "formatter"
- 2
require_relative "color_formatter"
- 2
require_relative "logger"
- 2
module LogStruct
# SemanticLogger Integration
#
# LogStruct uses SemanticLogger as its core logging engine, providing significant
# performance and functionality benefits over Rails' default logger:
#
# ## Performance Benefits
# - **Asynchronous logging**: Logs are written in a background thread, eliminating
# I/O blocking in your main application threads
# - **High throughput**: Can handle 100,000+ log entries per second
# - **Memory efficient**: Structured data processing with minimal allocations
# - **Zero-copy serialization**: Direct JSON generation without intermediate objects
#
# ## Reliability Benefits
# - **Thread-safe**: All operations are thread-safe by design
# - **Graceful degradation**: Continues logging even if appenders fail
# - **Error isolation**: Logging errors don't crash your application
# - **Buffered writes**: Reduces disk I/O with intelligent batching
#
# ## Feature Benefits
# - **Multiple appenders**: Log to files, STDOUT, databases, cloud services simultaneously
# - **Structured metadata**: Rich context including process ID, thread ID, tags, and more
# - **Log filtering**: Runtime filtering by logger name, level, or custom rules
# - **Formatters**: Pluggable output formatting (JSON, colorized, custom)
# - **Metrics integration**: Built-in performance metrics and timing data
#
# ## Development Experience
# - **Colorized output**: Beautiful, readable logs in development with ANSI colors
# - **Tagged logging**: Hierarchical context tracking (requests, jobs, etc.)
# - **Debugging tools**: Detailed timing and memory usage information
# - **Hot reloading**: Configuration changes without application restart
#
# ## Production Benefits
# - **Log rotation**: Automatic file rotation with size/time-based policies
# - **Compression**: Automatic log compression to save disk space
# - **Cloud integration**: Direct integration with CloudWatch, Splunk, etc.
# - **Alerting**: Built-in support for error alerting and monitoring
#
# ## LogStruct Specific Enhancements
# - **Type safety**: Full Sorbet type annotations for compile-time error detection
# - **Structured data**: Native support for LogStruct's typed log structures
# - **Filtering integration**: Seamless integration with LogStruct's data filters
# - **Error handling**: Enhanced error reporting with full stack traces and context
#
# SemanticLogger is a production-grade logging framework used by companies processing
# millions of requests per day. It provides the performance and reliability needed
# for high-traffic Rails applications while maintaining an elegant developer experience.
- 2
module SemanticLogger
# Handles setup and configuration of SemanticLogger for Rails applications
#
# This module provides the core integration between LogStruct and SemanticLogger,
# configuring appenders, formatters, and logger replacement to provide optimal
# logging performance while maintaining full compatibility with Rails conventions.
- 2
module Setup
- 2
extend T::Sig
# Configures SemanticLogger as the primary logging engine for the Rails application
#
# This method replaces Rails' default logger with SemanticLogger, providing:
# - **10-100x performance improvement** for high-volume logging
# - **Non-blocking I/O** through background thread processing
# - **Enhanced reliability** with graceful error handling
# - **Multiple output destinations** (files, STDOUT, cloud services)
# - **Structured metadata** including process/thread IDs and timing
#
# The configuration automatically:
# - Determines optimal log levels based on environment
# - Sets up appropriate appenders (console, file, etc.)
# - Enables colorized output in development
# - Replaces Rails.logger and component loggers
# - Preserves full Rails.logger API compatibility
#
# @param app [Rails::Application] The Rails application instance
- 4
sig { params(app: T.untyped).void }
- 2
def self.configure_semantic_logger(app)
# Set SemanticLogger configuration
- 2
::SemanticLogger.application = Rails.application.class.module_parent_name
- 2
::SemanticLogger.environment = Rails.env
# Determine log level from Rails config
- 2
log_level = determine_log_level(app)
- 2
::SemanticLogger.default_level = log_level
# Clear existing appenders
- 2
::SemanticLogger.clear_appenders!
# Add appropriate appenders based on environment
- 2
add_appenders(app)
# Replace Rails.logger with SemanticLogger
- 2
replace_rails_logger(app)
end
- 4
sig { params(app: T.untyped).returns(Symbol) }
- 2
def self.determine_log_level(app)
- 2
if app.config.log_level
- 2
app.config.log_level
elsif Rails.env.production?
:info
elsif Rails.env.test?
:debug
else
:debug
end
end
- 4
sig { params(app: T.untyped).void }
- 2
def self.add_appenders(app)
- 2
config = LogStruct.config
# Determine output destination
- 2
io = determine_output(app)
- 2
if Rails.env.development?
if config.prefer_json_in_development
# Default to production-style JSON in development when enabled
::SemanticLogger.add_appender(
io: io,
formatter: LogStruct::SemanticLogger::Formatter.new,
filter: determine_filter
)
elsif config.enable_color_output
# Opt-in colorful human formatter in development
::SemanticLogger.add_appender(
io: io,
formatter: LogStruct::SemanticLogger::ColorFormatter.new(
color_map: config.color_map
),
filter: determine_filter
)
else
::SemanticLogger.add_appender(
io: io,
formatter: LogStruct::SemanticLogger::Formatter.new,
filter: determine_filter
)
end
else
# Use our custom JSON formatter in non-development environments
- 2
::SemanticLogger.add_appender(
io: io,
formatter: LogStruct::SemanticLogger::Formatter.new,
filter: determine_filter
)
end
# Add file appender if Rails has a log path configured (normal Rails behavior)
- 2
if app.config.paths["log"].first
- 2
::SemanticLogger.add_appender(
file_name: app.config.paths["log"].first,
formatter: LogStruct::SemanticLogger::Formatter.new,
filter: determine_filter
)
end
end
- 4
sig { params(app: T.untyped).returns(T.untyped) }
- 2
def self.determine_output(app)
# Always honor explicit STDOUT directive
- 2
return $stdout if ENV["RAILS_LOG_TO_STDOUT"].present?
- 2
if Rails.env.test?
# Use StringIO in test to keep stdout clean
- 2
StringIO.new
else
# Use STDOUT for app logs in dev/production
$stdout
end
end
- 4
sig { returns(T.nilable(Regexp)) }
- 2
def self.determine_filter
# Filter out noisy loggers if configured
- 4
config = LogStruct.config
- 4
return nil unless config.filter_noisy_loggers
# Common noisy loggers to filter
/\A(ActionView|ActionController::RoutingError|ActiveRecord::SchemaMigration)/
end
# Replaces Rails.logger and all component loggers with LogStruct's SemanticLogger
#
# This method provides seamless integration by replacing the default Rails logger
# throughout the entire Rails stack, ensuring all logging flows through the
# high-performance SemanticLogger system.
#
# ## Benefits of Complete Logger Replacement:
# - **Consistent performance**: All Rails components benefit from SemanticLogger speed
# - **Unified formatting**: All logs use the same structured JSON format
# - **Centralized configuration**: Single point of control for all logging
# - **Complete compatibility**: Maintains all Rails.logger API contracts
#
# ## Components Updated:
# - Rails.logger (framework core)
# - ActiveRecord::Base.logger (database queries)
# - ActionController::Base.logger (request processing)
# - ActionMailer::Base.logger (email delivery)
# - ActiveJob::Base.logger (background jobs)
# - ActionView::Base.logger (template rendering)
# - ActionCable.server.config.logger (WebSocket connections)
#
# After replacement, all Rails logging maintains API compatibility while gaining
# SemanticLogger's performance, reliability, and feature benefits.
#
# @param app [Rails::Application] The Rails application instance
- 4
sig { params(app: T.untyped).void }
- 2
def self.replace_rails_logger(app)
# Create new SemanticLogger instance
- 2
logger = LogStruct::SemanticLogger::Logger.new("Rails")
# Replace Rails.logger
- 2
Rails.logger = logger
# Also replace various component loggers
- 2
ActiveRecord::Base.logger = logger if defined?(ActiveRecord::Base)
- 2
ActionController::Base.logger = logger if defined?(ActionController::Base)
- 2
if defined?(ActionMailer::Base)
- 2
ActionMailer::Base.logger = logger
# Ensure ActionMailer.logger is also set (it might be accessed directly)
- 2
T.unsafe(::ActionMailer).logger = logger if T.unsafe(::ActionMailer).respond_to?(:logger=)
end
- 2
ActiveJob::Base.logger = logger if defined?(ActiveJob::Base)
- 2
ActionView::Base.logger = logger if defined?(ActionView::Base)
- 2
ActionCable.server.config.logger = logger if defined?(ActionCable)
# Store reference in app config
- 2
app.config.logger = logger
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require_relative "../enums/log_field"
- 2
require_relative "interfaces/request_fields"
- 2
module LogStruct
- 2
module Log
- 2
module Shared
- 2
module AddRequestFields
- 2
extend T::Sig
- 2
extend T::Helpers
- 2
requires_ancestor { Interfaces::RequestFields }
- 2
sig { params(hash: T::Hash[Symbol, T.untyped]).void }
- 2
def add_request_fields(hash)
hash[LogField::Path.serialize] = path if path
hash[LogField::HttpMethod.serialize] = http_method if http_method
hash[LogField::SourceIp.serialize] = source_ip if source_ip
hash[LogField::UserAgent.serialize] = user_agent if user_agent
hash[LogField::Referer.serialize] = referer if referer
hash[LogField::RequestId.serialize] = request_id if request_id
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
# Moved from lib/log_struct/log/interfaces/additional_data_field.rb
- 2
module LogStruct
- 2
module Log
- 2
module Interfaces
- 2
module AdditionalDataField
- 2
extend T::Sig
- 2
extend T::Helpers
- 2
interface!
- 2
requires_ancestor { T::Struct }
- 2
sig { abstract.returns(T.nilable(T::Hash[T.any(String, Symbol), T.untyped])) }
- 2
def additional_data
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require_relative "../../enums/source"
- 2
require_relative "../../enums/event"
- 2
require_relative "../../enums/level"
- 2
module LogStruct
- 2
module Log
- 2
module Interfaces
- 2
module CommonFields
- 2
extend T::Sig
- 2
extend T::Helpers
- 2
interface!
- 2
sig { abstract.returns(Source) }
- 2
def source
end
- 2
sig { abstract.returns(Event) }
- 2
def event
end
- 2
sig { abstract.returns(Level) }
- 2
def level
end
- 2
sig { abstract.returns(Time) }
- 2
def timestamp
end
- 2
sig { abstract.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
- 2
def serialize(strict = true)
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require_relative "../../enums/level"
- 2
module LogStruct
- 2
module Log
- 2
module Interfaces
- 2
module PublicCommonFields
- 2
extend T::Sig
- 2
extend T::Helpers
- 2
interface!
- 2
sig { abstract.returns(Level) }
- 2
def level
end
- 2
sig { abstract.returns(Time) }
- 2
def timestamp
end
- 3
sig { abstract.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
- 2
def serialize(strict = true)
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
- 2
module Log
- 2
module Interfaces
- 2
module RequestFields
- 2
extend T::Sig
- 2
extend T::Helpers
- 2
interface!
- 2
sig { abstract.returns(T.nilable(String)) }
- 2
def path
end
- 2
sig { abstract.returns(T.nilable(String)) }
- 2
def http_method
end
- 2
sig { abstract.returns(T.nilable(String)) }
- 2
def source_ip
end
- 2
sig { abstract.returns(T.nilable(String)) }
- 2
def user_agent
end
- 2
sig { abstract.returns(T.nilable(String)) }
- 2
def referer
end
- 2
sig { abstract.returns(T.nilable(String)) }
- 2
def request_id
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require_relative "interfaces/additional_data_field"
- 2
module LogStruct
- 2
module Log
- 2
module Shared
- 2
module MergeAdditionalDataFields
- 2
extend T::Sig
- 2
extend T::Helpers
- 2
requires_ancestor { T::Struct }
- 2
requires_ancestor { Interfaces::AdditionalDataField }
- 4
sig { params(hash: T::Hash[Symbol, T.untyped]).void }
- 2
def merge_additional_data_fields(hash)
- 840
ad = additional_data
- 840
return unless ad
- 19
ad.each do |key, value|
- 21
hash[key.to_sym] = value
end
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require_relative "../enums/log_field"
- 2
require_relative "interfaces/common_fields"
- 2
require_relative "merge_additional_data_fields"
- 2
module LogStruct
- 2
module Log
- 2
module Shared
- 2
module SerializeCommon
- 2
extend T::Sig
- 2
extend T::Helpers
- 2
requires_ancestor { Interfaces::CommonFields }
- 4
sig { params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
- 2
def serialize(strict = true)
# Start with shared fields (source, event, level, timestamp)
- 883
out = serialize_common(strict)
# Merge event/base fields from the struct-specific hash
- 883
kernel_self = T.cast(self, Kernel)
- 883
field_hash = T.cast(kernel_self.public_send(:to_h), T::Hash[LogStruct::LogField, T.untyped])
- 883
field_hash.each do |log_field, value|
- 1131
next if value.nil?
- 1131
key = log_field.serialize
# Limit backtrace to first 5 lines
- 1131
if key == :backtrace && value.is_a?(Array)
- 4
value = value.first(5)
end
- 1131
out[key] = value.is_a?(::Time) ? value.iso8601 : value
end
# Merge any additional_data at top level if available
- 883
if kernel_self.respond_to?(:merge_additional_data_fields)
# merge_additional_data_fields expects symbol keys
- 837
merge_target = T.cast(self, LogStruct::Log::Shared::MergeAdditionalDataFields)
- 837
merge_target.merge_additional_data_fields(out)
end
- 883
out
end
- 4
sig { params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
- 2
def serialize_common(strict = true)
{
- 883
LogField::Source.serialize => source.serialize.to_s,
LogField::Event.serialize => event.serialize.to_s,
LogField::Level.serialize => level.serialize.to_s,
LogField::Timestamp.serialize => timestamp.iso8601(3)
}
end
- 3
sig { params(options: T.untyped).returns(T::Hash[String, T.untyped]) }
- 2
def as_json(options = nil)
- 16
serialize.transform_keys(&:to_s)
end
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require_relative "../enums/log_field"
- 2
require_relative "interfaces/public_common_fields"
- 2
module LogStruct
- 2
module Log
# Common serialization for public custom log structs with string/symbol source/event
- 2
module SerializeCommonPublic
- 2
extend T::Sig
- 2
extend T::Helpers
- 2
requires_ancestor { Interfaces::PublicCommonFields }
- 2
requires_ancestor { Kernel }
- 3
sig { params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
- 2
def serialize_common_public(strict = true)
- 3
unless respond_to?(:source) && respond_to?(:event)
raise ArgumentError, "Public log struct must define #source and #event"
end
- 3
src_val = public_send(:source)
- 3
evt_val = public_send(:event)
- 3
src = src_val.respond_to?(:serialize) ? src_val.public_send(:serialize).to_s : src_val.to_s
- 3
evt = evt_val.respond_to?(:serialize) ? evt_val.public_send(:serialize).to_s : evt_val.to_s
- 3
lvl = level.serialize.to_s
- 3
ts = timestamp.iso8601(3)
{
- 3
LogField::Source.serialize => src,
LogField::Event.serialize => evt,
LogField::Level.serialize => lvl,
LogField::Timestamp.serialize => ts
}
end
- 3
sig { params(options: T.untyped).returns(T::Hash[String, T.untyped]) }
- 2
def as_json(options = nil)
- 1
serialize.transform_keys(&:to_s)
end
end
end
end
# typed: strict
# frozen_string_literal: true
# Note: We use T::Struct for our Log classes so Sorbet is a hard requirement,
# not an optional dependency.
- 2
require "sorbet-runtime"
- 2
require "log_struct/sorbet/serialize_symbol_keys"
# Don't extend T::Sig to all modules! We're just a library, not a private Rails application
# See: https://sorbet.org/docs/sigs
# class Module
# include T::Sig
# end
# typed: strict
# frozen_string_literal: true
- 2
module LogStruct
- 2
module Sorbet
- 2
module SerializeSymbolKeys
- 2
extend T::Sig
- 2
extend T::Helpers
- 2
requires_ancestor { T::Struct }
- 2
sig { params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
- 2
def serialize(strict = true)
super.deep_symbolize_keys
end
- 2
sig { returns(T::Hash[Symbol, T.untyped]) }
- 2
def to_h
serialize
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 2
require "digest"
- 2
module LogStruct
# StringScrubber is inspired by logstop by @ankane: https://github.com/ankane/logstop
# Enhancements:
# - Shows which type of data was filtered
# - Includes an SHA256 hash with filtered emails for request tracing
# - Uses configuration options from LogStruct.config
- 2
module StringScrubber
- 2
class << self
- 2
extend T::Sig
# Also supports URL-encoded URLs like https%3A%2F%2Fuser%3Asecret%40example.com
# cspell:ignore Fuser Asecret
- 2
URL_PASSWORD_REGEX = /((?:\/\/|%2F%2F)[^\s\/]+(?::|%3A))[^\s\/]+(@|%40)/
- 2
URL_PASSWORD_REPLACEMENT = '\1[PASSWORD]\2'
- 2
EMAIL_REGEX = /\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}\b/i
- 2
CREDIT_CARD_REGEX_SHORT = /\b[3456]\d{15}\b/
- 2
CREDIT_CARD_REGEX_DELIMITERS = /\b[3456]\d{3}[\s-]\d{4}[\s-]\d{4}[\s-]\d{4}\b/
- 2
CREDIT_CARD_REPLACEMENT = "[CREDIT_CARD]"
- 2
PHONE_REGEX = /\b\d{3}[\s-]\d{3}[\s-]\d{4}\b/
- 2
PHONE_REPLACEMENT = "[PHONE]"
- 2
SSN_REGEX = /\b\d{3}[\s-]\d{2}[\s-]\d{4}\b/
- 2
SSN_REPLACEMENT = "[SSN]"
- 2
IP_REGEX = /\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b/
- 2
IP_REPLACEMENT = "[IP]"
- 2
MAC_REGEX = /\b[0-9a-f]{2}(:[0-9a-f]{2}){5}\b/i
- 2
MAC_REPLACEMENT = "[MAC]"
# Scrub sensitive information from a string
- 4
sig { params(string: String).returns(String) }
- 2
def scrub(string)
- 4426
return string if string.empty?
- 4426
string = string.to_s.dup
- 4426
config = LogStruct.config.filters
# Passwords in URLs
- 4426
string.gsub!(URL_PASSWORD_REGEX, URL_PASSWORD_REPLACEMENT) if config.url_passwords
# Emails
- 4426
if config.email_addresses
- 4425
string.gsub!(EMAIL_REGEX) do |email|
- 11
email_hash = HashUtils.hash_value(email)
- 11
"[EMAIL:#{email_hash}]"
end
end
# Credit card numbers
- 4426
if config.credit_card_numbers
- 4425
string.gsub!(CREDIT_CARD_REGEX_SHORT, CREDIT_CARD_REPLACEMENT)
- 4425
string.gsub!(CREDIT_CARD_REGEX_DELIMITERS, CREDIT_CARD_REPLACEMENT)
end
# Phone numbers
- 4426
string.gsub!(PHONE_REGEX, PHONE_REPLACEMENT) if config.phone_numbers
# SSNs
- 4426
string.gsub!(SSN_REGEX, SSN_REPLACEMENT) if config.ssns
# IPs
- 4426
string.gsub!(IP_REGEX, IP_REPLACEMENT) if config.ip_addresses
# MAC addresses
- 4426
string.gsub!(MAC_REGEX, MAC_REPLACEMENT) if config.mac_addresses
# Custom scrubber
- 4426
custom_scrubber = LogStruct.config.string_scrubbing_handler
- 4426
string = custom_scrubber.call(string) if !custom_scrubber.nil?
- 4426
string
end
end
end
end
# typed: strict
# frozen_string_literal: true
- 1
require "log_struct"
# Add your own tasks in files placed in lib/tasks ending in .rake,
# for example lib/tasks/capistrano.rake, and they will automatically be available to Rake.
- 1
require_relative "config/application"
- 1
Rails.application.load_tasks
# typed: strict
# frozen_string_literal: true
- 1
class ApplicationController < ActionController::Base
end
# typed: true
# frozen_string_literal: true
- 1
class LoggingController < ApplicationController
# Basic logging
- 1
def test_basic
# Test standard Rails logging - this is the primary usage pattern
- 2
Rails.logger.info("Info level message")
- 2
Rails.logger.warn("Warning level message")
- 2
Rails.logger.debug("Debug level message with context")
# For structured data, use LogStruct's Log::Plain
- 2
plain_log = LogStruct::Log::Plain.new(
message: "Structured log message",
source: LogStruct::Source::App
)
- 2
Rails.logger.info(plain_log)
# Test email scrubbing in plain string
- 2
Rails.logger.info("User email is test@example.com and password is secret123")
- 2
render json: {status: "ok", message: "Basic logging completed"}
end
# Error logging
- 1
def test_error
# Since the tests run in the test environment and Rails' test behavior may catch exceptions
# differently, let's log the error but also raise it to ensure it's properly captured
- 1
Rails.logger.info("About to raise test error")
begin
- 1
raise "Test error for integration testing"
rescue => e
# Log the error first
- 1
error_log = LogStruct::Log::Error.new(
source: LogStruct::Source::App,
error_class: e.class,
message: e.message
)
- 1
Rails.logger.error(error_log)
# Then re-raise it for the test to catch
- 1
raise
end
end
# Custom log structures
- 1
def test_custom
# Create and log a custom log structure
- 1
5.times do |i|
- 5
custom_log = LogStruct::Log::Plain.new(
message: "Custom log message #{i}",
source: LogStruct::Source::App,
additional_data: {
iteration: i,
timestamp: Time.now.to_f,
random: rand(100)
}
)
- 5
Rails.logger.info(custom_log)
end
- 1
render json: {status: "ok", message: "Custom logging completed"}
end
# Request logging test - DO NOT MODIFY THIS METHOD
# This method INTENTIONALLY reproduces the SystemStackError issue
# which must be fixed in the LogStruct codebase itself.
- 1
def test_request
# This is exactly the code that was causing the infinite recursion issue
# We need to fix the library - not modify this test!
- 1
request_log = LogStruct::Log::Request.new(
http_method: "GET",
path: "/api/users",
status: 200,
duration_ms: 15.5,
source_ip: "127.0.0.1"
)
- 1
Rails.logger.info(request_log)
- 1
render json: {status: "ok", message: "Request logging completed"}
end
# Model-related logging
- 1
def test_model
# Create a test user to trigger ActiveRecord logging
user = User.create!(name: "Test User", email: "user@example.com")
# Simple string logging
Rails.logger.info("Created user #{user.id}")
# Get the existing user
found_user = User.find(user.id)
Rails.logger.info("Found user: #{found_user.name}")
render json: {status: "ok", message: "Model logging completed", user_id: user.id}
end
# Job-related logging
- 1
def test_job
# Enqueue a job to test ActiveJob integration
job = TestJob.perform_later("test_argument")
Rails.logger.info("Job enqueued with ID: #{job.job_id}")
# LogStruct will automatically enhance job enqueued/performed logs
render json: {status: "ok", message: "Job enqueued for testing", job_id: job.job_id}
end
# Context and tagging
- 1
def test_context
# TODO: Fix types for the tagged method
# Test Rails' built-in tagged logging
T.unsafe(Rails.logger).tagged("REQUEST_ID_123", "USER_456") do
Rails.logger.info("Message with tags")
# Nested tags
T.unsafe(Rails.logger).tagged("NESTED") do
Rails.logger.warn("Message with nested tags")
end
end
# Message without tags
Rails.logger.info("Message without tags")
render json: {status: "ok", message: "Context logging completed"}
end
- 1
def test_error_logging
# Also test error handling in formatter by logging to trigger fallback handlers
begin
# Raise an error
- 1
raise "Test error for recursion safety"
rescue => e
# Log the error, which would trigger the formatter code
- 1
Rails.logger.error("Error occurred: #{e.message}")
# Also try structured error logging
- 1
error_log = LogStruct::Log::Error.new(
source: LogStruct::Source::App,
message: e.message,
error_class: e.class
)
- 1
Rails.logger.error(error_log)
end
# If we got here without a SystemStackError, the infinite recursion was prevented
- 1
render json: {status: "ok", message: "Stack-safe error handling test completed"}
end
end
# typed: true
# frozen_string_literal: true
- 1
class ApplicationJob < ActiveJob::Base
end
# typed: true
# frozen_string_literal: true
- 1
class TestJob < ApplicationJob
- 1
queue_as :default
- 1
def perform(arg)
# Log job processing - standard Rails approach
logger.info("Processing job #{job_id} with argument: #{arg}")
# Simulate some work
sleep 0.1
# Test error handling in a job
begin
raise StandardError, "Test job error"
rescue => e
# Standard Rails logging
logger.error("Job error: #{e.message}")
# Example of enhanced structured logging
exception_log = LogStruct::Log::Error.new(
source: LogStruct::Source::Job,
error_class: e.class,
message: e.message,
additional_data: {job_class: self.class.name, job_id: job_id}
)
logger.error(exception_log)
end
# Log job completion
logger.info("Job #{job_id} completed successfully")
end
end
- 1
class ApplicationMailer < ActionMailer::Base
- 1
default from: "from@example.com"
- 1
layout "mailer"
end
# typed: true
# frozen_string_literal: true
- 1
class TestMailer < ApplicationMailer
- 1
def test_email_with_ids(account, user)
- 1
@account = account
- 1
@user = user
- 1
mail(to: "test@example.com", subject: "Test Email")
end
- 1
def test_email_with_organization(organization)
- 1
@organization = organization
- 1
mail(to: "test@example.com", subject: "Test Email")
end
end
# typed: true
# frozen_string_literal: true
- 1
class ApplicationRecord < ActiveRecord::Base
- 1
primary_abstract_class
- 1
self.abstract_class = true
end
# typed: strict
# frozen_string_literal: true
- 1
class Document < ApplicationRecord
- 1
extend T::Sig
- 1
has_one_attached :file
- 2
sig { params(filename: String, content: String).returns(Document) }
- 1
def self.create_with_file(filename:, content:)
- 4
document = T.let(create!, Document)
- 4
document.file.attach(
io: StringIO.new(content),
filename: filename,
content_type: "text/plain"
)
- 4
document
end
end
# typed: true
# frozen_string_literal: true
- 1
class User < ApplicationRecord
- 1
validates :name, presence: true
- 1
validates :email, presence: true, format: {with: URI::MailTo::EMAIL_REGEXP}
# Add callbacks to test logging
- 1
after_create :log_creation
- 1
after_update :log_update
- 1
private
- 1
def log_creation
Rails.logger.info("User created with ID: #{id} and email: #{attributes["email"]}")
end
- 1
def log_update
# Standard Rails logging with context
changed_attrs = previous_changes.keys.join(", ")
Rails.logger.info("User #{id} updated. Changed attributes: #{changed_attrs}")
end
end
# typed: true
- 1
require_relative "boot"
- 1
require "rails/all"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
- 1
Bundler.require(*Rails.groups)
- 1
module LogstructTestApp
- 1
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
- 1
config.load_defaults 8.0
# Configuration for the application, engines, and railties goes here.
#
# These settings can be overridden in specific environments using the files
# in config/environments, which are processed later.
#
# config.time_zone = "Central Time (US & Canada)"
# config.eager_load_paths << Rails.root.join("extras")
# Only use API mode
- 1
config.api_only = true
# Use test adapter for ActiveJob in all environments for testing
- 1
config.active_job.queue_adapter = :test
# Force all environments to log to STDOUT so development behaves like test/production
# This mirrors how many platforms and 12-factor apps expect logs to be emitted.
- 1
config.log_level = :debug
- 1
stdout_logger = ActiveSupport::Logger.new($stdout)
- 1
stdout_logger.formatter = config.log_formatter
- 1
config.logger = ActiveSupport::TaggedLogging.new(stdout_logger)
end
end
# Load the Rails application.
- 1
require_relative "application"
# Initialize the Rails application.
- 1
Rails.application.initialize!
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
- 1
Rails.application.configure do
# Host authorization for tests - allow .localhost subdomains, IPs, and www.example.com
- 1
config.hosts = [
".localhost",
"www.example.com",
IPAddr.new("0.0.0.0/0"), # IPv4
IPAddr.new("::/0"), # IPv6
]
# Settings specified here will take precedence over those in config/application.rb.
# While tests run files are not watched, reloading is not necessary.
- 1
config.enable_reloading = false
# Eager loading loads your entire application. When running a single test locally,
# this is usually not necessary, and can slow down your test suite. However, it's
# recommended that you enable it in continuous integration systems to ensure eager
# loading is working properly before deploying your code.
- 1
config.eager_load = ENV["CI"].present?
# Configure public file server for tests with cache-control for performance.
- 1
config.public_file_server.headers = { "cache-control" => "public, max-age=3600" }
# Show full error reports.
- 1
config.consider_all_requests_local = true
- 1
config.cache_store = :null_store
# Render exception templates for rescuable exceptions and raise for other exceptions.
- 1
config.action_dispatch.show_exceptions = :rescuable
# Disable request forgery protection in test environment.
- 1
config.action_controller.allow_forgery_protection = false
# Store uploaded files on the local file system in a temporary directory.
- 1
config.active_storage.service = :test
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
- 1
config.action_mailer.delivery_method = :test
# Set host to be used by links generated in mailer templates.
- 1
config.action_mailer.default_url_options = { host: "example.com" }
# Print deprecation notices to the stderr.
- 1
config.active_support.deprecation = :stderr
# Raises error for missing translations.
# config.i18n.raise_on_missing_translations = true
# Annotate rendered view with file names.
# config.action_view.annotate_rendered_view_with_filenames = true
# Raise error when a before_action's only/except options reference missing actions.
- 1
config.action_controller.raise_on_missing_callback_actions = true
end
# Be sure to restart your server when you modify this file.
# Avoid CORS issues when API is called from the frontend app.
# Handle Cross-Origin Resource Sharing (CORS) in order to accept cross-origin Ajax requests.
# Read more: https://github.com/cyu/rack-cors
# Rails.application.config.middleware.insert_before 0, Rack::Cors do
# allow do
# origins "example.com"
#
# resource "*",
# headers: :any,
# methods: [:get, :post, :put, :patch, :delete, :options, :head]
# end
# end
# Be sure to restart your server when you modify this file.
# Configure parameters to be partially matched (e.g. passw matches password) and filtered from the log file.
# Use this to limit dissemination of sensitive information.
# See the ActiveSupport::ParameterFilter documentation for supported notations and behaviors.
- 1
Rails.application.config.filter_parameters += [
:passw, :email, :secret, :token, :_key, :crypt, :salt, :certificate, :otp, :ssn, :cvv, :cvc
]
# Be sure to restart your server when you modify this file.
# Add new inflection rules using the following format. Inflections
# are locale specific, and you may define rules for as many different
# locales as you wish. All of these examples are active by default:
# ActiveSupport::Inflector.inflections(:en) do |inflect|
# inflect.plural /^(ox)$/i, "\\1en"
# inflect.singular /^(ox)en/i, "\\1"
# inflect.irregular "person", "people"
# inflect.uncountable %w( fish sheep )
# end
# These inflection rules are supported but not enabled by default:
# ActiveSupport::Inflector.inflections(:en) do |inflect|
# inflect.acronym "RESTful"
# end
# typed: strict
- 1
require "log_struct"
# Configure LogStruct
- 1
LogStruct.configure do |config|
# Specify which environments to enable in
- 1
config.enabled_environments = [:development, :test, :production]
# Specify which environments are considered local/development
- 1
config.local_environments = [:development, :test]
# Configure integrations
- 1
config.integrations.enable_lograge = true
- 1
config.integrations.enable_actionmailer = true
- 1
config.integrations.enable_activejob = true
- 1
config.integrations.enable_rack_error_handler = true
- 1
config.integrations.enable_sidekiq = !!defined?(Sidekiq)
- 1
config.integrations.enable_shrine = !!defined?(Shrine)
- 1
config.integrations.enable_carrierwave = !!defined?(CarrierWave)
- 1
config.integrations.enable_activestorage = true
# Configure string scrubbing filters
- 1
config.filters.email_addresses = true
- 1
config.filters.url_passwords = true
- 1
config.filters.credit_card_numbers = true
- 1
config.filters.phone_numbers = true
- 1
config.filters.ssns = true
- 1
config.filters.ip_addresses = true
- 1
config.filters.mac_addresses = true
# Configure error handling modes
- 1
config.error_handling_modes.logstruct_errors = LogStruct::ErrorHandlingMode::Log
- 1
config.error_handling_modes.security_errors = LogStruct::ErrorHandlingMode::Report
- 1
config.error_handling_modes.standard_errors = LogStruct::ErrorHandlingMode::LogProduction
end
# typed: strict
# frozen_string_literal: true
- 1
Rails.application.routes.draw do
# Testing routes
- 1
get "/logging/basic", to: "logging#test_basic"
- 1
get "/logging/error", to: "logging#test_error"
- 1
get "/logging/model", to: "logging#test_model"
- 1
get "/logging/job", to: "logging#test_job"
- 1
get "/logging/context", to: "logging#test_context"
- 1
get "/logging/custom", to: "logging#test_custom"
- 1
get "/logging/request", to: "logging#test_request"
- 1
get "/logging/error_logging", to: "logging#test_error_logging"
# Healthcheck route
- 3
get "/health", to: proc { [200, {}, ["OK"]] }
end
# typed: true
# frozen_string_literal: true
- 1
require "test_helper"
- 1
class ActionMailerIdMappingTest < ActiveSupport::TestCase
- 1
setup do
- 2
@original_mapping = LogStruct.config.integrations.actionmailer_id_mapping
# Use StringIO to capture log output
- 2
@log_output = StringIO.new
- 2
@original_logger = Rails.logger
# Create a new logger with our StringIO and LogStruct's formatter
- 2
logger = Logger.new(@log_output)
- 2
logger.formatter = LogStruct::Formatter.new
- 2
Rails.logger = logger
end
- 1
teardown do
- 2
LogStruct.config.integrations.actionmailer_id_mapping = @original_mapping
- 2
Rails.logger = @original_logger
end
# Helper method to parse log entries
- 1
def find_log_entries(event_type)
- 2
@log_output.rewind
- 2
logs = []
- 2
@log_output.each_line do |line|
- 4
if line =~ /(\{.+\})/
- 4
json = JSON.parse($1)
- 4
logs << json if json["src"] == "mailer" && json["evt"] == event_type
end
rescue JSON::ParserError
# Skip lines that don't contain valid JSON
end
- 2
logs
end
- 1
test "actionmailer_id_mapping extracts configured instance variables as IDs in additional_data" do
# Clear the log buffer before the test
- 1
@log_output.truncate(0)
- 1
@log_output.rewind
# Configure default ID mapping
- 1
LogStruct.config.integrations.actionmailer_id_mapping = {
account: :account_id,
user: :user_id
}
# Create test objects
- 1
account = Struct.new(:id).new(123)
- 1
user = Struct.new(:id).new(456)
# Deliver email
- 1
TestMailer.test_email_with_ids(account, user).deliver_now
# Find delivery logs in the captured output
- 1
delivery_logs = find_log_entries("delivered")
- 1
assert_not_empty delivery_logs, "Expected delivery logs to be generated"
- 1
delivered_log = delivery_logs.first
# Check that account_id and user_id are in the log
- 1
assert_equal 123, delivered_log["account_id"]
- 1
assert_equal 456, delivered_log["user_id"]
end
- 1
test "actionmailer_id_mapping uses custom field names" do
# Clear the log buffer before the test
- 1
@log_output.truncate(0)
- 1
@log_output.rewind
# Configure custom ID mapping
- 1
LogStruct.config.integrations.actionmailer_id_mapping = {
organization: :org_id
}
# Create test object
- 1
organization = Struct.new(:id).new(789)
# Deliver email
- 1
TestMailer.test_email_with_organization(organization).deliver_now
# Find delivery logs in the captured output
- 1
delivery_logs = find_log_entries("delivered")
- 1
assert_not_empty delivery_logs, "Expected delivery logs to be generated"
- 1
delivered_log = delivery_logs.first
# Check that org_id is in the log
- 1
assert_equal 789, delivered_log["org_id"]
# Should not have account_id or user_id
- 1
assert_nil delivered_log["account_id"]
- 1
assert_nil delivered_log["user_id"]
end
end
# typed: true
# frozen_string_literal: true
- 1
require "test_helper"
- 1
class ActiveStorageTest < ActiveSupport::TestCase
- 1
setup do
# Use StringIO to capture log output
- 5
@log_output = StringIO.new
- 5
@original_logger = Rails.logger
# Create a new logger with our StringIO and LogStruct's formatter
- 5
logger = Logger.new(@log_output)
- 5
logger.formatter = LogStruct::Formatter.new
- 5
Rails.logger = logger
end
- 1
teardown do
# Restore the original logger
- 5
Rails.logger = @original_logger
end
# Helper method to parse log entries
- 1
def find_log_entries(event_type)
# Reset the StringIO position to the beginning
- 5
@log_output.rewind
# Parse the log contents looking for JSON data
- 5
logs = []
- 5
@log_output.each_line do |line|
# Log lines might have timestamps or other text before the JSON
- 5
if line =~ /(\{.+\})/
- 5
json = JSON.parse($1)
# Only include active storage logs with the specified event
- 5
logs << json if json["src"] == "storage" && json["evt"] == event_type
end
rescue JSON::ParserError
# Skip lines that don't contain valid JSON
end
- 5
logs
end
- 1
test "logs are created when uploading a file" do
# Clear the log buffer before the test
- 1
@log_output.truncate(0)
- 1
@log_output.rewind
# Create a document with an attached file, which should trigger upload
- 1
Document.create_with_file(
filename: "test_file.txt",
content: "This is test content for Active Storage"
)
# Give some time for the async events to process
- 1
sleep(0.2)
# Find upload logs in the captured output
- 1
upload_logs = find_log_entries("upload")
- 1
assert_not_empty upload_logs, "Expected upload logs to be generated"
- 1
upload_log = upload_logs.first
- 1
assert_equal "storage", upload_log["src"]
- 1
assert_equal "upload", upload_log["evt"]
- 1
assert_equal "Disk", upload_log["storage"]
- 1
assert_not_nil upload_log["file_id"]
- 1
assert_not_nil upload_log["checksum"]
- 1
assert_not_nil upload_log["duration_ms"]
end
- 1
test "logs are created when downloading a file" do
# Create a document with a file for testing
- 1
document = Document.create_with_file(
filename: "download_test.txt",
content: "This is content to download"
)
# Clear the log buffer before the test
- 1
@log_output.truncate(0)
- 1
@log_output.rewind
# Download the file
- 1
document.file.download
# Give some time for the async events to process
- 1
sleep(0.2)
# Find download logs in the captured output
- 1
download_logs = find_log_entries("download")
- 1
assert_not_empty download_logs, "Expected download logs to be generated"
- 1
download_log = download_logs.first
- 1
assert_equal "storage", download_log["src"]
- 1
assert_equal "download", download_log["evt"]
- 1
assert_equal "Disk", download_log["storage"]
- 1
assert_not_nil download_log["file_id"]
- 1
assert_not_nil download_log["duration_ms"]
end
- 1
test "logs are created when checking if a file exists" do
# Create a document with a file for testing
- 1
document = Document.create_with_file(
filename: "exist_test.txt",
content: "This is content to check existence"
)
# Clear the log buffer before the test
- 1
@log_output.truncate(0)
- 1
@log_output.rewind
# Check if file exists - we need to hit the storage service directly to trigger the exist event
# In ActiveStorage, we need to directly check through the storage service
- 1
storage = ActiveStorage::Blob.service
- 1
storage.exist?(document.file.key)
# Give some time for the async events to process
- 1
sleep(0.2)
# Find existence check logs in the captured output
- 1
exist_logs = find_log_entries("exist")
- 1
assert_not_empty exist_logs, "Expected existence check logs to be generated"
- 1
exist_log = exist_logs.first
- 1
assert_equal "storage", exist_log["src"]
- 1
assert_equal "exist", exist_log["evt"]
- 1
assert_equal "Disk", exist_log["storage"]
- 1
assert_not_nil exist_log["file_id"]
end
- 1
test "logs are created when deleting a file" do
# Create a document with a file for testing
- 1
document = Document.create_with_file(
filename: "delete_test.txt",
content: "This is content to delete"
)
# Clear the log buffer before the test
- 1
@log_output.truncate(0)
- 1
@log_output.rewind
# Delete the file
- 1
document.file.purge
# Give some time for the async events to process
- 1
sleep(0.2)
# Find delete logs in the captured output
- 1
delete_logs = find_log_entries("delete")
- 1
assert_not_empty delete_logs, "Expected delete logs to be generated"
- 1
delete_log = delete_logs.first
- 1
assert_equal "storage", delete_log["src"]
- 1
assert_equal "delete", delete_log["evt"]
- 1
assert_equal "Disk", delete_log["storage"]
- 1
assert_not_nil delete_log["file_id"]
end
- 1
test "logs contain expected metadata fields" do
# Clear the log buffer before the test
- 1
@log_output.truncate(0)
- 1
@log_output.rewind
# Create a document with specific metadata
- 1
document = Document.create!
# Clear the buffer again to make sure we only capture the attach operation
- 1
@log_output.truncate(0)
- 1
@log_output.rewind
# Now attach the file with our known metadata
- 1
document.file.attach(
io: StringIO.new("Test content with specific metadata"),
filename: "metadata_test.txt",
content_type: "text/plain"
)
# Give some time for the async events to process
- 1
sleep(0.2)
# Find upload logs in the captured output
- 1
upload_logs = find_log_entries("upload")
- 1
assert_not_empty upload_logs, "Expected upload logs to be generated"
- 1
upload_log = upload_logs.first
# Verify upload log contains the expected fields
# The checksum should be present
- 1
assert_not_nil upload_log["checksum"]
# Check file size if available - from the blob service
- 1
if upload_log["size"]
assert_kind_of Integer, upload_log["size"]
end
# Check for duration which should always be present
- 1
assert_not_nil upload_log["duration_ms"]
end
end
# typed: true
- 1
require "test_helper"
- 1
require "open3"
- 1
class BootLogsIntegrationTest < ActiveSupport::TestCase
- 1
def test_rails_runner_emits_dotenv_structured_logs_and_ends_with_true
env = {
- 1
"LOGSTRUCT_ENABLED" => "true",
"RAILS_ENV" => "test",
"RAILS_LOG_TO_STDOUT" => "1"
}
- 1
cmd = ["bundle", "exec", "rails", "runner", "puts LogStruct.enabled?"]
- 1
stdout_str, stderr_str, status = Open3.capture3(env, *cmd)
- 1
assert_predicate status, :success?, "rails runner failed: #{stderr_str}"
- 1
output = stdout_str.to_s
- 1
refute_empty output, "Expected some output from rails runner"
- 1
lines = output.split("\n").map(&:strip).reject(&:empty?)
- 1
lines.reject! do |line|
- 3
line.start_with?("Coverage report generated", "Line Coverage:", "Branch Coverage:")
end
# Ensure the last non-empty line is 'true'
- 1
last_line = lines.last
- 1
assert_equal "true", last_line, "Expected final line to be 'true'"
- 1
before = lines[0...-1] || []
- 1
refute_empty before, "Expected logs before the final result"
- 1
json_logs = before.filter_map { |l|
begin
- 2
JSON.parse(l)
rescue
nil
end
}
- 3
dotenv_logs = json_logs.select { |h| h["src"] == "dotenv" }
- 1
assert_equal 2, dotenv_logs.size, "Expected two dotenv logs"
- 3
assert dotenv_logs.any? { |h| h["evt"] == "load" }, "Expected a load event"
- 2
assert dotenv_logs.any? { |h| h["evt"] == "update" }, "Expected an update event"
end
- 1
def test_rails_runner_emits_original_dotenv_logs_when_disabled
env = {
- 1
"LOGSTRUCT_ENABLED" => "false",
"RAILS_ENV" => "development",
"RAILS_LOG_TO_STDOUT" => "1"
}
- 1
cmd = ["bundle", "exec", "rails", "runner", "puts LogStruct.enabled?"]
- 1
stdout_str, stderr_str, status = Open3.capture3(env, *cmd)
- 1
assert_predicate status, :success?, "rails runner failed: #{stderr_str}"
- 1
output = stdout_str.to_s
- 1
refute_empty output, "Expected some output from rails runner"
- 1
lines = output.split("\n").map(&:strip).reject(&:empty?)
- 1
lines.reject! do |line|
- 3
line.start_with?("Coverage report generated", "Line Coverage:", "Branch Coverage:")
end
- 1
last_line = lines.last
- 1
assert_equal "false", last_line, "Expected final line to be 'false'"
- 1
before = lines[0...-1] || []
- 1
refute_empty before, "Expected logs before the final result"
# Expect original dotenv log lines (not JSON)
- 3
dotenv_lines = before.select { |l| l.start_with?("[dotenv]") }
- 1
assert_equal 2, dotenv_lines.size, "Expected two original dotenv lines"
- 2
assert dotenv_lines.any? { |l| l.include?("Set ") }, "Expected a 'Set ...' line"
- 3
assert dotenv_lines.any? { |l| l.include?("Loaded ") }, "Expected a 'Loaded ...' line"
end
end
# typed: true
- 1
require "test_helper"
- 1
class DotenvIntegrationTest < ActiveSupport::TestCase
- 1
def setup
- 1
@io = StringIO.new
- 1
::SemanticLogger.clear_appenders!
- 1
::SemanticLogger.add_appender(io: @io, formatter: LogStruct::SemanticLogger::Formatter.new, async: false)
end
- 1
def test_emits_structured_dotenv_logs_and_suppresses_unstructured_messages
# Simulate a dotenv update event after boot
- 1
diff = Struct.new(:env).new({"BOOT_FLAG" => "1", "REGION" => "ap-southeast-2"})
- 1
ActiveSupport::Notifications.instrument("update.dotenv", diff: diff) {}
- 1
::SemanticLogger.flush
- 1
@io.rewind
- 1
lines = @io.read.to_s.split("\n").map(&:strip).reject(&:empty?)
- 1
refute_empty lines, "Expected logs to be captured during test"
- 1
json_logs = lines.filter_map { |l|
begin
- 1
JSON.parse(l)
rescue
nil
end
}
- 2
dotenv_updates = json_logs.select { |h| h["src"] == "dotenv" && h["evt"] == "update" }
- 1
refute_empty dotenv_updates, "Expected a structured dotenv update log"
# Vars should include at least BOOT_FLAG
- 2
assert dotenv_updates.any? { |h| Array(h["vars"]).include?("BOOT_FLAG") }, "Expected BOOT_FLAG in vars"
# Ensure no plain unstructured "Set ..." messages slipped through
- 2
no_unstructured = json_logs.none? { |h| h["msg"].is_a?(String) && h["msg"].start_with?("Set ") }
- 1
assert no_unstructured, "Found unstructured 'Set ...' message in logs"
end
end
# typed: true
# frozen_string_literal: true
- 1
require "test_helper"
- 1
class HostAuthorizationTest < ActionDispatch::IntegrationTest
- 1
def setup
# Capture JSON output via a dedicated SemanticLogger appender
- 3
@io = StringIO.new
- 3
::SemanticLogger.clear_appenders!
# Use synchronous appender to avoid timing issues in tests
- 3
::SemanticLogger.add_appender(io: @io, formatter: LogStruct::SemanticLogger::Formatter.new, async: false)
end
- 1
def test_blocked_host_is_logged_with_logstruct
# Make a request with a blocked host
- 1
host! "blocked-host.example.com"
- 1
get "/health"
# Should return 403 Forbidden
- 1
assert_response :forbidden
# Ensure all logs are flushed from buffers
- 1
::SemanticLogger.flush
# Read all logged lines
- 1
@io.rewind
- 1
lines = @io.read.to_s.split("\n").map(&:strip).reject(&:empty?)
# Parse JSON logs
- 1
parsed_logs = lines.filter_map { |l|
begin
- 1
JSON.parse(l)
rescue
nil
end
}
# Find blocked host logs
- 2
blocked_host_logs = parsed_logs.select { |log| log["evt"] == "blocked_host" }
- 1
assert_equal 1, blocked_host_logs.size, "Expected exactly one blocked host log entry"
- 1
log_entry = blocked_host_logs.first
# Verify the log entry has the correct structure
- 1
assert_equal "security", log_entry["src"]
- 1
assert_equal "blocked_host", log_entry["evt"]
- 1
assert_equal "blocked-host.example.com", log_entry["blocked_host"]
- 1
assert_equal "/health", log_entry["path"]
- 1
assert_equal "GET", log_entry["method"]
end
- 1
def test_allowed_host_is_not_blocked
# Make a request with an allowed host (.localhost is allowed by default)
- 1
host! "www.localhost"
- 1
get "/health"
# Should return 200 OK
- 1
assert_response :success
# Ensure all logs are flushed from buffers
- 1
::SemanticLogger.flush
# Read all logged lines
- 1
@io.rewind
- 1
lines = @io.read.to_s.split("\n").map(&:strip).reject(&:empty?)
# Parse JSON logs
- 1
parsed_logs = lines.filter_map { |l|
begin
JSON.parse(l)
rescue
nil
end
}
# Find blocked host logs
- 1
blocked_host_logs = parsed_logs.select { |log| log["evt"] == "blocked_host" }
- 1
assert_equal 0, blocked_host_logs.size, "Should not log blocked host for allowed hosts"
end
- 1
def test_blocked_host_log_can_be_serialized
- 1
host! "malicious.example.com"
- 1
get "/health"
- 1
assert_response :forbidden
# Ensure all logs are flushed from buffers
- 1
::SemanticLogger.flush
# Read all logged lines
- 1
@io.rewind
- 1
lines = @io.read.to_s.split("\n").map(&:strip).reject(&:empty?)
# Parse JSON logs
- 1
parsed_logs = lines.filter_map { |l|
begin
- 1
JSON.parse(l)
rescue
nil
end
}
# Find blocked host logs
- 2
blocked_host_logs = parsed_logs.select { |log| log["evt"] == "blocked_host" }
- 1
assert_equal 1, blocked_host_logs.size
- 1
log_entry = blocked_host_logs.first
# Verify it's a properly serialized hash
- 1
assert_kind_of Hash, log_entry
# Verify key fields are in serialized output
- 1
assert_equal "security", log_entry["src"]
- 1
assert_equal "blocked_host", log_entry["evt"]
- 1
assert_equal "malicious.example.com", log_entry["blocked_host"]
- 1
assert_equal "/health", log_entry["path"]
- 1
assert_equal "GET", log_entry["method"]
end
end
# typed: true
# frozen_string_literal: true
- 1
require "test_helper"
- 1
class LoggingIntegrationTest < ActionDispatch::IntegrationTest
# Basic test to ensure the Rails app is working
- 1
def test_healthcheck_works
- 1
get "/health"
- 1
assert_response :success
- 1
assert_equal "OK", response.body
end
# More detailed test to verify basic logging
- 1
def test_basic_logging_endpoint_works
- 1
get "/logging/basic"
- 1
assert_response :success
- 1
response_json = JSON.parse(response.body)
- 1
assert_equal "ok", response_json["status"]
- 1
assert_equal "Basic logging completed", response_json["message"]
end
# Test error logging
- 1
def test_error_logging_endpoint_works
# The error will be raised and we should see it
- 1
error_raised = false
begin
- 1
get "/logging/error"
rescue RuntimeError => e
- 1
error_raised = true
- 1
assert_equal "Test error for integration testing", e.message
end
- 1
assert error_raised, "Expected an error to be raised"
end
# Test custom log structures
- 1
def test_custom_log_class_work
- 1
get "/logging/custom"
- 1
assert_response :success
- 1
response_json = JSON.parse(response.body)
- 1
assert_equal "ok", response_json["status"]
- 1
assert_equal "Custom logging completed", response_json["message"]
end
# Test request logging
- 1
def test_request_logging_works
- 1
get "/logging/request"
- 1
assert_response :success
- 1
response_json = JSON.parse(response.body)
- 1
assert_equal "ok", response_json["status"]
- 1
assert_equal "Request logging completed", response_json["message"]
end
# Test that error handling is stack-safe
- 1
def test_error_logging
# This test intentionally creates a situation that would cause
# an infinite loop if error handling is not implemented correctly
- 1
get "/logging/error_logging"
- 1
assert_response :success
- 1
response_json = JSON.parse(response.body)
- 1
assert_equal "ok", response_json["status"]
- 1
assert_equal "Stack-safe error handling test completed", response_json["message"]
end
end
# typed: true
- 1
require "test_helper"
- 1
class LogrageFormatterIntegrationTest < ActionDispatch::IntegrationTest
- 1
def setup
# Capture JSON output via a dedicated SemanticLogger appender
- 1
@io = StringIO.new
- 1
::SemanticLogger.clear_appenders!
# Use synchronous appender to avoid timing issues in tests
- 1
::SemanticLogger.add_appender(io: @io, formatter: LogStruct::SemanticLogger::Formatter.new, async: false)
end
- 1
def test_request_through_stack_emits_json_request_log
- 1
get "/logging/basic", params: {format: :json}
- 1
assert_response :success
# Ensure all logs are flushed from buffers
- 1
::SemanticLogger.flush
# Read all logged lines
- 1
@io.rewind
- 1
lines = @io.read.to_s.split("\n").map(&:strip).reject(&:empty?)
- 1
refute_empty lines, "Expected some JSON logs to be emitted"
# Find the request log entry
- 1
request_log = lines.filter_map { |l|
begin
- 6
JSON.parse(l)
rescue
nil
end
- 6
}.find { |h| h["evt"] == "request" }
- 1
refute_nil request_log, "Expected a request log entry"
# Validate normalized types
- 1
assert_equal "GET", request_log["method"]
- 1
assert_equal "json", request_log["format"]
- 1
assert_kind_of Hash, request_log["params"]
end
end
# typed: true
- 1
require "test_helper"
- 1
require "open3"
- 1
require "timeout"
- 1
class PumaIntegrationTest < ActiveSupport::TestCase
- 1
def test_rails_server_emits_structured_puma_logs_and_on_exit
- 1
port = 32123
env = {
- 1
"LOGSTRUCT_ENABLED" => "true",
"RAILS_ENV" => "test",
"RAILS_LOG_TO_STDOUT" => "1"
}
- 1
cmd = ["bundle", "exec", "rails", "server", "-p", port.to_s]
- 1
Open3.popen3(env, *cmd) do |_stdin, stdout, stderr, wait_thr| # cspell:disable-line
begin
- 1
lines = []
- 1
Timeout.timeout(10) do
- 8
while (line = stdout.gets)
- 7
lines << line.strip
- 7
break if line.include?("Use Ctrl-C to stop")
end
end
# Send TERM to trigger graceful shutdown
begin
- 1
Process.kill("TERM", wait_thr.pid)
rescue Errno::ESRCH
# Process already exited
end
# Collect shutdown output
- 1
Timeout.timeout(10) do
- 4
while (line = stdout.gets)
- 2
lines << line.strip
end
end
rescue Timeout::Error
# Fall through and ensure process is terminated
ensure
begin
- 1
Process.kill("TERM", wait_thr.pid)
rescue Errno::ESRCH
# already dead
end
end
- 1
output = lines.join("\n")
- 1
lines.filter_map { |l|
begin
- 9
JSON.parse(l)
rescue
- 5
nil
end
}
# Consider only logs after the first JSON line
- 1
first_json_index = lines.find_index { |l|
- 4
l.strip.start_with?("{") && begin
- 1
JSON.parse(l)
rescue
nil
end
}
- 1
assert first_json_index, "Did not find any JSON log lines. Output: #{output}\nSTDERR: #{stderr.read}"
- 1
after_lines = lines[first_json_index..]
- 1
after_json = after_lines.filter_map do |l|
- 6
JSON.parse(l)
rescue JSON::ParserError
- 2
nil
end
- 5
puma_logs = after_json.select { |h| h["src"] == "puma" }
# Expect exactly 2 structured logs: start, shutdown
- 1
assert_equal 2, puma_logs.length, "Expected exactly 2 Puma logs. Output: #{output}\nSTDERR: #{stderr.read}"
- 3
events = puma_logs.map { |h| h["evt"] }
- 1
assert_equal ["start", "shutdown"], events, "Expected Puma events in order: start, shutdown"
- 1
start = puma_logs[0]
- 1
assert_equal "puma", start["src"]
- 1
assert_equal "info", start["lvl"]
- 1
assert_equal "single", start["mode"]
- 1
assert_equal "test", start["environment"]
- 1
assert_kind_of Integer, start["pid"]
- 1
assert_kind_of Array, start["listening_addresses"]
- 2
assert start["listening_addresses"].any? { |a| a.include?(":#{port}") }, "Expected listening address to include :#{port}"
- 1
shutdown = puma_logs[1]
- 1
assert_equal "puma", shutdown["src"]
- 1
assert_equal "info", shutdown["lvl"]
- 1
assert_kind_of Integer, shutdown["pid"]
end
end
end
# typed: true
- 1
require "test_helper"
- 1
require "open3"
- 1
require "timeout"
- 1
require "fileutils"
- 1
class TestLoggingIntegrationTest < ActiveSupport::TestCase
- 1
def test_test_logs_go_to_file_not_stdout
# Clean up log file before test
- 1
log_file = Rails.root.join("log/test.log")
- 1
FileUtils.rm_f(log_file)
- 1
FileUtils.touch(log_file)
env = {
- 1
"LOGSTRUCT_ENABLED" => "true",
"RAILS_ENV" => "test"
}
# Run a simple test that will generate logs
- 1
cmd = ["bundle", "exec", "rails", "test", "test/models/user_test.rb"]
- 1
Open3.popen3(env, *cmd, chdir: Rails.root.to_s) do |_stdin, stdout, stderr, wait_thr|
begin
- 1
Timeout.timeout(30) do
- 1
wait_thr.value # Wait for process to complete
end
rescue Timeout::Error
begin
Process.kill("TERM", wait_thr.pid)
rescue
nil
end
flunk "Test process timed out"
end
- 1
stdout_output = stdout.read
- 1
stderr.read
# Check that stdout doesn't contain JSON logs
- 1
json_lines_in_stdout = stdout_output.lines.select { |line|
- 7
line.strip.start_with?("{") && begin
JSON.parse(line)
rescue
nil
end
}
- 1
assert_equal 0,
json_lines_in_stdout.length,
"Expected no JSON logs in stdout, but found #{json_lines_in_stdout.length} lines. First few:\n#{json_lines_in_stdout.first(3).join}"
# Check that log/test.log contains JSON logs
- 1
assert_path_exists log_file, "Expected log/test.log to exist"
- 1
log_contents = File.read(log_file)
- 1
json_lines_in_file = log_contents.lines.select { |line|
- 11
line.strip.start_with?("{") && begin
- 11
JSON.parse(line)
rescue
nil
end
}
- 1
assert_operator json_lines_in_file.length, :>, 0, "Expected JSON logs in log/test.log, but found none. File size: #{log_contents.bytesize} bytes"
# Verify at least one structured log exists
- 12
parsed_logs = json_lines_in_file.map { |line| JSON.parse(line) }
- 2
assert parsed_logs.any? { |log| log["src"] && log["evt"] && log["lvl"] },
"Expected at least one properly structured log in log/test.log"
end
ensure
# Clean up
- 1
FileUtils.rm_f(log_file) if log_file
end
end
# typed: true
- 1
require "test_helper"
- 1
class UserTest < ActiveSupport::TestCase
- 1
test "simple test that generates logs" do # rubocop:disable Minitest/NoAssertions
# This test just needs to run and generate some logs
- 1
Rails.logger.info("Test log message")
end
end
# typed: true
- 1
require "simplecov" unless defined?(SimpleCov)
- 1
require "simplecov-json"
- 1
require "sorbet-runtime"
- 1
require "debug"
- 1
unless SimpleCov.running
SimpleCov.formatters = [
SimpleCov::Formatter::HTMLFormatter,
SimpleCov::Formatter::JSONFormatter
]
SimpleCov.start do
T.bind(self, T.all(SimpleCov::Configuration, Kernel))
gem_path = File.expand_path("../../../../", __FILE__)
SimpleCov.root(gem_path)
add_filter "rails_test_app"
coverage_dir "coverage_rails"
enable_coverage :branch
primary_coverage :branch
end
SimpleCov.at_exit do
SimpleCov.result
end
end
# Require logstruct after starting SimpleCov
- 1
require "logstruct"
- 1
ENV["RAILS_ENV"] ||= "test"
- 1
require_relative "../config/environment"
- 1
require "rails/test_help"
- 1
require "minitest/reporters"
# Configure colorful test output
- 1
Minitest::Reporters.use! Minitest::Reporters::SpecReporter.new
# Configure the test database
- 1
class ActiveSupport::TestCase
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
# fixtures :all
# Add more helper methods to be used by all tests here...
# Helper method to run jobs synchronously
- 1
def perform_enqueued_jobs
jobs = ActiveJob::Base.queue_adapter.enqueued_jobs
jobs.each do |job|
ActiveJob::Base.execute job
end
end
end
# Ensure LogStruct is enabled and emits JSON in tests across Rails versions
begin
- 1
LogStruct.configure do |config|
- 1
config.enabled = true
# Prefer production-style JSON in development/test
- 1
config.prefer_json_in_development = true
end
rescue NameError
# LogStruct not loaded; ignore
end