All Files ( 83.53% covered at 35.8 hits/line )
151 files in total.
5269 relevant lines,
4401 lines covered and
868 lines missed.
(
83.53%
)
-
# typed: strict
-
# frozen_string_literal: true
-
-
# Core library files
-
1
require "log_struct/sorbet"
-
1
require "log_struct/version"
-
1
require "log_struct/enums"
-
1
require "log_struct/configuration"
-
1
require "log_struct/formatter"
-
1
require "log_struct/railtie"
-
1
require "log_struct/concerns/error_handling"
-
1
require "log_struct/concerns/configuration"
-
1
require "log_struct/concerns/logging"
-
-
# Require integrations
-
1
require "log_struct/integrations"
-
-
# SemanticLogger integration - core feature for high-performance logging
-
1
require "log_struct/semantic_logger/formatter"
-
1
require "log_struct/semantic_logger/color_formatter"
-
1
require "log_struct/semantic_logger/logger"
-
1
require "log_struct/semantic_logger/setup"
-
1
require "log_struct/rails_boot_banner_silencer"
-
-
# Monkey patches for Rails compatibility
-
1
require "log_struct/monkey_patches/active_support/tagged_logging/formatter"
-
-
1
module LogStruct
-
1
extend T::Sig
-
-
1
@server_mode = T.let(false, T::Boolean)
-
-
1
class Error < StandardError; end
-
-
1
extend Concerns::ErrorHandling::ClassMethods
-
1
extend Concerns::Configuration::ClassMethods
-
1
extend Concerns::Logging::ClassMethods
-
-
2
sig { returns(T::Boolean) }
-
1
def self.server_mode?
-
29
@server_mode
-
end
-
-
2
sig { params(value: T::Boolean).void }
-
1
def self.server_mode=(value)
-
23
@server_mode = value
-
end
-
-
# Set enabled at require time based on current Rails environment.
-
# (Users can override this in their initializer which runs before the Railtie checks enabled)
-
1
set_enabled_from_rails_env!
-
-
# Silence Rails boot banners for cleaner server output
-
1
LogStruct::RailsBootBannerSilencer.install!
-
-
# Patch Puma immediately for server runs so we can convert its lifecycle
-
# messages into structured logs reliably.
-
1
if ARGV.include?("server")
-
begin
-
require "log_struct/integrations/puma"
-
LogStruct::Integrations::Puma.install_patches!
-
-
# Patches installed now; Rack handler patch covers server boot path
-
rescue => e
-
if defined?(::Rails) && ::Rails.respond_to?(:env) && ::Rails.env.test?
-
raise e
-
else
-
LogStruct.handle_exception(e, source: LogStruct::Source::Puma)
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
# Collects structured logs during very early boot before the logger is ready.
-
1
module BootBuffer
-
1
extend T::Sig
-
-
1
@@logs = T.let([], T::Array[LogStruct::Log::Interfaces::CommonFields])
-
-
2
sig { params(log: LogStruct::Log::Interfaces::CommonFields).void }
-
1
def self.add(log)
-
2
@@logs << log
-
end
-
-
2
sig { void }
-
1
def self.flush
-
3
return if @@logs.empty?
-
2
@@logs.each { |l| LogStruct.info(l) }
-
1
@@logs.clear
-
end
-
-
2
sig { void }
-
1
def self.clear
-
5
@@logs.clear
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require_relative "../configuration"
-
-
1
module LogStruct
-
1
module Concerns
-
# Concern for handling errors according to configured modes
-
1
module Configuration
-
1
module ClassMethods
-
1
extend T::Sig
-
-
1
CONSOLE_COMMAND_ARGS = T.let(["console", "c"].freeze, T::Array[String])
-
1
EMPTY_ARGV = T.let([].freeze, T::Array[String])
-
1
CI_FALSE_VALUES = T.let(["false", "0", "no"].freeze, T::Array[String])
-
-
2
sig { params(block: T.proc.params(config: LogStruct::Configuration).void).void }
-
1
def configure(&block)
-
32
yield(config)
-
end
-
-
2
sig { returns(LogStruct::Configuration) }
-
1
def config
-
12711
LogStruct::Configuration.instance
-
end
-
-
# (Can't use alias_method since this module is extended into LogStruct)
-
2
sig { returns(LogStruct::Configuration) }
-
1
def configuration
-
58
config
-
end
-
-
# Setter method to replace the configuration (for testing purposes)
-
2
sig { params(config: LogStruct::Configuration).void }
-
1
def configuration=(config)
-
124
LogStruct::Configuration.set_instance(config)
-
end
-
-
2
sig { returns(T::Boolean) }
-
1
def enabled?
-
9
config.enabled
-
end
-
-
2
sig { void }
-
1
def set_enabled_from_rails_env!
-
# Set enabled based on current Rails environment and the LOGSTRUCT_ENABLED env var.
-
# Precedence:
-
# 1. Check if LOGSTRUCT_ENABLED env var is defined (not an empty string)
-
# - Sets enabled=true only when value is "true", "yes", "1", etc.
-
# - Sets enabled=false when value is any other value
-
# 2. Otherwise, check if current Rails environment is in enabled_environments
-
# AND one of: Rails::Server is defined, OR test environment with CI=true
-
# BUT NOT Rails::Console (to exclude interactive console)
-
# 3. Otherwise, leave as config.enabled (defaults to true)
-
-
# Then check if LOGSTRUCT_ENABLED env var is set
-
21
config.enabled = if ENV["LOGSTRUCT_ENABLED"]
-
4
%w[true t yes y 1].include?(ENV["LOGSTRUCT_ENABLED"]&.strip&.downcase)
-
else
-
17
is_console = console_process?
-
17
is_server = server_process?
-
17
ci_build?
-
17
in_enabled_env = config.enabled_environments.include?(::Rails.env.to_sym)
-
-
17
in_enabled_env && !is_console && (is_server || ::Rails.env.test?)
-
end
-
end
-
-
2
sig { returns(T::Boolean) }
-
1
def is_local?
-
1
config.local_environments.include?(::Rails.env.to_sym)
-
end
-
-
2
sig { returns(T::Boolean) }
-
1
def is_production?
-
1
!is_local?
-
end
-
-
2
sig { void }
-
1
def merge_rails_filter_parameters!
-
3
return unless ::Rails.application.config.respond_to?(:filter_parameters)
-
-
3
rails_filter_params = ::Rails.application.config.filter_parameters
-
3
return unless rails_filter_params.is_a?(Array)
-
3
return if rails_filter_params.empty?
-
-
3
symbol_filters = T.let([], T::Array[Symbol])
-
3
matchers = T.let([], T::Array[ConfigStruct::FilterMatcher])
-
3
leftovers = T.let([], T::Array[T.untyped])
-
-
3
rails_filter_params.each do |entry|
-
12
matcher = build_filter_matcher(entry)
-
-
12
if matcher
-
1
matchers << matcher
-
1
next
-
end
-
-
11
normalized_symbol = normalize_filter_symbol(entry)
-
11
if normalized_symbol
-
11
symbol_filters << normalized_symbol
-
else
-
leftovers << entry
-
end
-
end
-
-
3
if symbol_filters.any?
-
2
config.filters.filter_keys |= symbol_filters
-
end
-
-
3
if matchers.any?
-
1
matchers.each do |matcher|
-
1
existing = config.filters.filter_matchers.any? do |registered|
-
registered.label == matcher.label
-
end
-
1
config.filters.filter_matchers << matcher unless existing
-
end
-
end
-
-
3
replace_filter_parameters(rails_filter_params, leftovers)
-
end
-
-
1
private
-
-
2
sig { returns(T::Boolean) }
-
1
def console_process?
-
17
return true if defined?(::Rails::Console)
-
-
58
current_argv.any? { |arg| CONSOLE_COMMAND_ARGS.include?(arg) }
-
end
-
-
2
sig { returns(T::Boolean) }
-
1
def server_process?
-
17
return true if logstruct_server_mode?
-
17
return true if puma_server?
-
15
return true if defined?(::Unicorn::HttpServer)
-
15
return true if defined?(::Thin::Server)
-
15
return true if defined?(::Falcon::Server)
-
15
return true if defined?(::Rails::Server)
-
14
return true if sidekiq_server?
-
-
14
false
-
end
-
-
2
sig { returns(T::Boolean) }
-
1
def puma_server?
-
# Just checking defined?(::Puma::Server) is not reliable - Puma might be installed
-
# but not running. Check $PROGRAM_NAME and ARGV to verify we're actually running puma.
-
# ARGV check is needed when running through wrapper scripts like gosu.
-
17
return true if $PROGRAM_NAME.include?("puma")
-
45
return true if current_argv.any? { |arg| arg.include?("puma") }
-
-
15
false
-
end
-
-
2
sig { returns(T::Boolean) }
-
1
def sidekiq_server?
-
14
!!(defined?(::Sidekiq) && ::Sidekiq.respond_to?(:server?) && ::Sidekiq.server?)
-
end
-
-
2
sig { returns(T::Boolean) }
-
1
def logstruct_server_mode?
-
17
::LogStruct.server_mode?
-
end
-
-
2
sig { returns(T::Array[String]) }
-
1
def current_argv
-
32
raw = ::ARGV
-
118
strings = raw.map { |arg| arg.to_s }
-
32
T.let(strings, T::Array[String])
-
rescue NameError
-
EMPTY_ARGV
-
end
-
-
2
sig { returns(T::Boolean) }
-
1
def ci_build?
-
17
value = ENV["CI"]
-
17
return false if value.nil?
-
-
16
normalized = value.strip.downcase
-
16
return false if normalized.empty?
-
-
15
!CI_FALSE_VALUES.include?(normalized)
-
end
-
-
2
sig { params(filter: T.untyped).returns(T.nilable(Symbol)) }
-
1
def normalize_filter_symbol(filter)
-
11
return filter if filter.is_a?(Symbol)
-
2
return filter.downcase.to_sym if filter.is_a?(String)
-
-
return nil unless filter.respond_to?(:to_sym)
-
-
begin
-
sym = filter.to_sym
-
sym.is_a?(Symbol) ? sym : nil
-
rescue
-
nil
-
end
-
end
-
-
2
sig { params(filter: T.untyped).returns(T.nilable(ConfigStruct::FilterMatcher)) }
-
1
def build_filter_matcher(filter)
-
12
case filter
-
when ::Regexp
-
1
callable = Kernel.lambda do |key, _value|
-
1
filter.match?(key)
-
end
-
1
return ConfigStruct::FilterMatcher.new(callable: callable, label: filter.inspect)
-
else
-
11
return build_callable_filter_matcher(filter) if callable_filter?(filter)
-
end
-
-
11
nil
-
end
-
-
2
sig { params(filter: T.untyped).returns(T::Boolean) }
-
1
def callable_filter?(filter)
-
11
filter.respond_to?(:call)
-
end
-
-
1
sig { params(filter: T.untyped).returns(T.nilable(ConfigStruct::FilterMatcher)) }
-
1
def build_callable_filter_matcher(filter)
-
callable = Kernel.lambda do |key, value|
-
call_args = case arity_for_filter(filter)
-
when 0
-
[]
-
when 1
-
[key]
-
else
-
[key, value]
-
end
-
-
result = filter.call(*call_args)
-
!!result
-
rescue ArgumentError
-
begin
-
!!filter.call(key)
-
rescue => e
-
handle_filter_error(e, filter, key)
-
false
-
end
-
rescue => e
-
handle_filter_error(e, filter, key)
-
false
-
end
-
ConfigStruct::FilterMatcher.new(callable: callable, label: filter.inspect)
-
end
-
-
1
sig { params(filter: T.untyped).returns(Integer) }
-
1
def arity_for_filter(filter)
-
filter.respond_to?(:arity) ? filter.arity : 2
-
end
-
-
2
sig { params(filter_params: T::Array[T.untyped], leftovers: T::Array[T.untyped]).void }
-
1
def replace_filter_parameters(filter_params, leftovers)
-
3
filter_params.clear
-
3
filter_params.concat(leftovers)
-
end
-
-
1
sig { params(error: StandardError, filter: T.untyped, key: String).void }
-
1
def handle_filter_error(error, filter, key)
-
context = {
-
filter: filter.class.name,
-
key: key,
-
filter_label: begin
-
filter.inspect
-
rescue
-
"unknown"
-
end
-
}
-
-
LogStruct.handle_exception(error, source: Source::Internal, context: context)
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
1
module Concerns
-
# Concern for handling errors according to configured modes
-
1
module ErrorHandling
-
1
module ClassMethods
-
1
extend T::Sig
-
1
extend T::Helpers
-
-
# Needed for raise
-
1
requires_ancestor { Module }
-
-
# Get the error handling mode for a given source
-
2
sig { params(source: Source).returns(ErrorHandlingMode) }
-
1
def error_handling_mode_for(source)
-
9
config = LogStruct.config
-
-
# Use a case statement for type-safety
-
9
case source
-
when Source::TypeChecking
-
1
config.error_handling_modes.type_checking_errors
-
when Source::Internal
-
config.error_handling_modes.logstruct_errors
-
when Source::Security
-
config.error_handling_modes.security_errors
-
when Source::Rails, Source::App, Source::Job, Source::Storage, Source::Mailer,
-
Source::Shrine, Source::CarrierWave, Source::Sidekiq, Source::Dotenv, Source::Puma
-
8
config.error_handling_modes.standard_errors
-
else
-
# Ensures the case statement is exhaustive
-
T.absurd(source)
-
end
-
end
-
-
# Log an errors with structured data
-
2
sig { params(error: StandardError, source: Source, context: T.nilable(T::Hash[Symbol, T.untyped])).void }
-
1
def log_error(error, source:, context: nil)
-
# Create structured log entry
-
3
error_log = Log.from_exception(source, error, context || {})
-
3
LogStruct.error(error_log)
-
end
-
-
# Report an error using the configured handler or MultiErrorReporter
-
2
sig { params(error: StandardError, source: Source, context: T.nilable(T::Hash[Symbol, T.untyped])).void }
-
1
def log_and_report_error(error, source:, context: nil)
-
1
log_error(error, source: source, context: context)
-
1
error_handler = LogStruct.config.error_reporting_handler
-
1
if error_handler
-
# Use the configured handler
-
error_handler.call(error, context, source)
-
else
-
# Fall back to MultiErrorReporter (detects Sentry, Bugsnag, etc.)
-
1
LogStruct::MultiErrorReporter.report_error(error, context || {})
-
end
-
end
-
-
# Handle an error according to the configured error handling mode (log, report, raise, etc)
-
2
sig { params(error: StandardError, source: Source, context: T.nilable(T::Hash[Symbol, T.untyped])).void }
-
1
def handle_exception(error, source:, context: nil)
-
8
mode = error_handling_mode_for(source)
-
-
# Log / report in production, raise locally (dev/test)
-
8
if mode == ErrorHandlingMode::LogProduction || mode == ErrorHandlingMode::ReportProduction
-
3
raise(error) if !LogStruct.is_production?
-
end
-
-
6
case mode
-
when ErrorHandlingMode::Ignore
-
# Do nothing
-
-
when ErrorHandlingMode::Raise
-
2
raise(error)
-
-
when ErrorHandlingMode::Log, ErrorHandlingMode::LogProduction
-
2
log_error(error, source: source, context: context)
-
-
when ErrorHandlingMode::Report, ErrorHandlingMode::ReportProduction
-
1
log_and_report_error(error, source: source, context: context)
-
-
else
-
# Ensures the case statement is exhaustive
-
T.absurd(mode)
-
end
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require_relative "../log"
-
-
1
module LogStruct
-
1
module Concerns
-
# Concern for handling errors according to configured modes
-
1
module Logging
-
1
module ClassMethods
-
1
extend T::Sig
-
-
# Log a log struct at debug level
-
2
sig { params(log: T.any(Log::Interfaces::CommonFields, Log::Interfaces::PublicCommonFields)).void }
-
1
def debug(log)
-
1
Rails.logger.debug(log)
-
end
-
-
# Log a log struct at info level
-
2
sig { params(log: T.any(Log::Interfaces::CommonFields, Log::Interfaces::PublicCommonFields)).void }
-
1
def info(log)
-
13
Rails.logger.info(log)
-
end
-
-
# Log a log struct at warn level
-
2
sig { params(log: T.any(Log::Interfaces::CommonFields, Log::Interfaces::PublicCommonFields)).void }
-
1
def warn(log)
-
1
Rails.logger.warn(log)
-
end
-
-
# Log a log struct at error level
-
2
sig { params(log: T.any(Log::Interfaces::CommonFields, Log::Interfaces::PublicCommonFields)).void }
-
1
def error(log)
-
5
Rails.logger.error(log)
-
end
-
-
# Log a log struct at fatal level
-
2
sig { params(log: T.any(Log::Interfaces::CommonFields, Log::Interfaces::PublicCommonFields)).void }
-
1
def fatal(log)
-
1
Rails.logger.fatal(log)
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
1
module ConfigStruct
-
1
class ErrorHandlingModes < T::Struct
-
1
include Sorbet::SerializeSymbolKeys
-
-
# How to handle different types of errors
-
# Modes:
-
# - Ignore - Ignore the error
-
# - Log - Log the error
-
# - Report - Log and report to error tracking service (but don't crash)
-
# - LogProduction - Log error in production, raise locally (dev/test)
-
# - ReportProduction - Report error in production, raise locally (dev/test)
-
# - Raise - Always raise the error
-
-
# Configurable error handling categories
-
1
prop :type_checking_errors, ErrorHandlingMode, default: ErrorHandlingMode::LogProduction
-
1
prop :logstruct_errors, ErrorHandlingMode, default: ErrorHandlingMode::LogProduction
-
1
prop :security_errors, ErrorHandlingMode, default: ErrorHandlingMode::Report
-
1
prop :standard_errors, ErrorHandlingMode, default: ErrorHandlingMode::Raise
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
1
module ConfigStruct
-
1
class FilterMatcher < T::Struct
-
1
extend T::Sig
-
-
1
const :callable, T.proc.params(key: String, value: T.untyped).returns(T::Boolean)
-
1
const :label, String
-
-
2
sig { params(key: String, value: T.untyped).returns(T::Boolean) }
-
1
def matches?(key, value)
-
1
callable.call(key, value)
-
end
-
end
-
-
1
class Filters < T::Struct
-
1
include Sorbet::SerializeSymbolKeys
-
-
# Keys that should be filtered in nested structures such as request params and job arguments.
-
# Filtered data includes information about Hashes and Arrays.
-
#
-
# { _filtered: {
-
# _class: "Hash", # Class of the filtered value
-
# _bytes: 1234, # Length of JSON string in bytes
-
# _keys_count: 3, # Number of keys in the hash
-
# _keys: [:key1, :key2, :key3], # First 10 keys in the hash
-
# }
-
# }
-
#
-
# Default: [:password, :password_confirmation, :pass, :pw, :token, :secret,
-
# :credentials, :creds, :auth, :authentication, :authorization]
-
#
-
1
prop :filter_keys,
-
T::Array[Symbol],
-
factory: -> {
-
49
%i[
-
password password_confirmation pass pw token secret
-
credentials auth authentication authorization
-
credit_card ssn social_security
-
]
-
}
-
-
# Keys where string values should include an SHA256 hash.
-
# Useful for tracing emails across requests (e.g. sign in, sign up) while protecting privacy.
-
# Default: [:email, :email_address]
-
1
prop :filter_keys_with_hashes,
-
T::Array[Symbol],
-
49
factory: -> { %i[email email_address] }
-
-
# Hash salt for SHA256 hashing (typically used for email addresses)
-
# Used for both param filters and string scrubbing
-
# Default: "l0g5t0p"
-
1
prop :hash_salt, String, default: "l0g5t0p"
-
-
# Hash length for SHA256 hashing (typically used for email addresses)
-
# Used for both param filters and string scrubbing
-
# Default: 12
-
1
prop :hash_length, Integer, default: 12
-
-
# Filter email addresses. Also controls email filtering for the ActionMailer integration
-
# (to, from, recipient fields, etc.)
-
# Default: true
-
1
prop :email_addresses, T::Boolean, default: true
-
-
# Filter URL passwords
-
# Default: true
-
1
prop :url_passwords, T::Boolean, default: true
-
-
# Filter credit card numbers
-
# Default: true
-
1
prop :credit_card_numbers, T::Boolean, default: true
-
-
# Filter phone numbers
-
# Default: true
-
1
prop :phone_numbers, T::Boolean, default: true
-
-
# Filter social security numbers
-
# Default: true
-
1
prop :ssns, T::Boolean, default: true
-
-
# Filter IP addresses
-
# Default: false
-
1
prop :ip_addresses, T::Boolean, default: false
-
-
# Filter MAC addresses
-
# Default: false
-
1
prop :mac_addresses, T::Boolean, default: false
-
-
# Additional filter matchers built from Rails filter_parameters entries that aren't simple symbols.
-
# Each matcher receives the key (String) and optional value, returning true when the pair should be filtered.
-
1
prop :filter_matchers,
-
T::Array[FilterMatcher],
-
49
factory: -> { [] }
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "active_support/notifications"
-
-
1
module LogStruct
-
1
module ConfigStruct
-
1
class Integrations < T::Struct
-
1
include Sorbet::SerializeSymbolKeys
-
-
# Enable or disable Sorbet error handler integration
-
# Default: true
-
1
prop :enable_sorbet_error_handlers, T::Boolean, default: true
-
-
# Enable or disable Lograge integration
-
# Default: true
-
1
prop :enable_lograge, T::Boolean, default: true
-
-
# Custom options for Lograge
-
# Default: nil
-
1
prop :lograge_custom_options, T.nilable(Handlers::LogrageCustomOptions), default: nil
-
-
# Enable or disable ActionMailer integration
-
# Default: true
-
1
prop :enable_actionmailer, T::Boolean, default: true
-
-
# Map instance variables on mailer to ID fields in additional_data
-
# Default: { account: :account_id, user: :user_id }
-
# Example: { organization: :org_id, company: :company_id }
-
51
prop :actionmailer_id_mapping, T::Hash[Symbol, Symbol], factory: -> { {account: :account_id, user: :user_id} }
-
-
# Enable or disable host authorization logging
-
# Default: true
-
1
prop :enable_host_authorization, T::Boolean, default: true
-
-
# Enable or disable ActiveJob integration
-
# Default: true
-
1
prop :enable_activejob, T::Boolean, default: true
-
-
# Enable or disable Rack middleware
-
# Default: true
-
1
prop :enable_rack_error_handler, T::Boolean, default: true
-
-
# Enable or disable Sidekiq integration
-
# Default: true
-
1
prop :enable_sidekiq, T::Boolean, default: true
-
-
# Enable or disable Shrine integration
-
# Default: true
-
1
prop :enable_shrine, T::Boolean, default: true
-
-
# Enable or disable ActiveStorage integration
-
# Default: true
-
1
prop :enable_activestorage, T::Boolean, default: true
-
-
# Enable or disable CarrierWave integration
-
# Default: true
-
1
prop :enable_carrierwave, T::Boolean, default: true
-
-
# Enable or disable GoodJob integration
-
# Default: true
-
1
prop :enable_goodjob, T::Boolean, default: true
-
-
# Enable SemanticLogger integration for high-performance logging
-
# Default: true
-
1
prop :enable_semantic_logger, T::Boolean, default: true
-
-
# Enable SQL query logging through ActiveRecord instrumentation
-
# Default: false (can be resource intensive)
-
1
prop :enable_sql_logging, T::Boolean, default: false
-
-
# Only log SQL queries slower than this threshold (in milliseconds)
-
# Set to 0 or nil to log all queries
-
# Default: 100.0 (log queries taking >100ms)
-
1
prop :sql_slow_query_threshold, T.nilable(Float), default: 100.0
-
-
# Include bind parameters in SQL logs (disable in production for security)
-
# Default: true in development/test, false in production
-
51
prop :sql_log_bind_params, T::Boolean, factory: -> { !defined?(::Rails) || !::Rails.respond_to?(:env) || !::Rails.env.production? }
-
-
# Enable Ahoy (analytics events) integration
-
# Default: true (safe no-op unless Ahoy is defined)
-
1
prop :enable_ahoy, T::Boolean, default: true
-
-
# Enable ActiveModelSerializers integration
-
# Default: true (safe no-op unless ActiveModelSerializers is defined)
-
1
prop :enable_active_model_serializers, T::Boolean, default: true
-
-
# Enable dotenv-rails integration (convert to structured logs)
-
# Default: true
-
1
prop :enable_dotenv, T::Boolean, default: true
-
-
# Enable Puma integration (convert server lifecycle logs)
-
# Default: true
-
1
prop :enable_puma, T::Boolean, default: true
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require_relative "handlers"
-
1
require_relative "config_struct/error_handling_modes"
-
1
require_relative "config_struct/integrations"
-
1
require_relative "config_struct/filters"
-
-
1
module LogStruct
-
# Core configuration class that provides a type-safe API
-
1
class Configuration < T::Struct
-
1
extend T::Sig
-
-
1
include Sorbet::SerializeSymbolKeys
-
-
# -------------------------------------------------------------------------------------
-
# Props
-
# -------------------------------------------------------------------------------------
-
-
1
prop :enabled, T::Boolean, default: true
-
51
prop :enabled_environments, T::Array[Symbol], factory: -> { [:test, :production] }
-
51
prop :local_environments, T::Array[Symbol], factory: -> { [:development, :test] }
-
-
# Prefer production-style JSON in development when LogStruct is enabled
-
1
prop :prefer_json_in_development, T::Boolean, default: true
-
-
# Enable colorful human formatter in development
-
1
prop :enable_color_output, T::Boolean, default: true
-
-
# Custom color map for the color formatter
-
1
prop :color_map, T.nilable(T::Hash[Symbol, Symbol]), default: nil
-
-
# Filter noisy loggers (ActionView, etc.)
-
1
prop :filter_noisy_loggers, T::Boolean, default: false
-
-
51
const :integrations, ConfigStruct::Integrations, factory: -> { ConfigStruct::Integrations.new }
-
39
const :filters, ConfigStruct::Filters, factory: -> { ConfigStruct::Filters.new }
-
-
# Custom log scrubbing handler for any additional string scrubbing
-
# Default: nil
-
1
prop :string_scrubbing_handler, T.nilable(Handlers::StringScrubber)
-
-
# Custom handler for error reporting
-
# Default: Errors are handled by MultiErrorReporter
-
# (auto-detects Sentry, Bugsnag, Rollbar, Honeybadger, etc.)
-
1
prop :error_reporting_handler, T.nilable(Handlers::ErrorReporter), default: nil
-
-
# How to handle errors from various sources
-
1
const :error_handling_modes,
-
ConfigStruct::ErrorHandlingModes,
-
factory: -> {
-
50
ConfigStruct::ErrorHandlingModes.new
-
}
-
-
# -------------------------------------------------------------------------------------
-
# Class Methods
-
# -------------------------------------------------------------------------------------
-
-
# Class‐instance variable
-
1
@instance = T.let(nil, T.nilable(Configuration))
-
-
2
sig { returns(Configuration) }
-
1
def self.instance
-
12721
@instance ||= T.let(Configuration.new, T.nilable(Configuration))
-
end
-
-
2
sig { params(config: Configuration).void }
-
1
def self.set_instance(config)
-
124
@instance = config
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# Require all enums in this directory
-
1
require_relative "enums/error_handling_mode"
-
1
require_relative "enums/error_reporter"
-
1
require_relative "enums/event"
-
1
require_relative "enums/level"
-
1
require_relative "enums/source"
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
# Enum for error handling modes
-
1
class ErrorHandlingMode < T::Enum
-
1
enums do
-
# Always ignore the error
-
1
Ignore = new(:ignore)
-
# Always log the error
-
1
Log = new(:log)
-
# Always report to tracking service and continue
-
1
Report = new(:report)
-
# Log in production, raise locally (dev/test)
-
1
LogProduction = new(:log_production)
-
# Report in production, raise locally (dev/test)
-
1
ReportProduction = new(:report_production)
-
# Always raise regardless of environment
-
1
Raise = new(:raise)
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
1
class ErrorReporter < T::Enum
-
1
enums do
-
1
RailsLogger = new(:rails_logger)
-
1
Sentry = new(:sentry)
-
1
Bugsnag = new(:bugsnag)
-
1
Rollbar = new(:rollbar)
-
1
Honeybadger = new(:honeybadger)
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
# Define log event types as an enum
-
1
class Event < T::Enum
-
1
enums do
-
# Plain log messages
-
1
Log = new(:log)
-
-
# Request events
-
1
Request = new(:request)
-
-
# Job events
-
1
Enqueue = new(:enqueue)
-
1
Schedule = new(:schedule)
-
1
Start = new(:start)
-
1
Finish = new(:finish)
-
-
# File storage events (ActiveStorage, Shrine, CarrierWave, etc.)
-
1
Upload = new(:upload)
-
1
Download = new(:download)
-
1
Delete = new(:delete)
-
1
Metadata = new(:metadata)
-
1
Exist = new(:exist)
-
1
Stream = new(:stream)
-
1
Url = new(:url)
-
-
# Data generation events
-
1
Generate = new(:generate)
-
-
# Email events
-
1
Delivery = new(:delivery)
-
1
Delivered = new(:delivered)
-
-
# Configuration / boot events
-
1
Load = new(:load)
-
1
Update = new(:update)
-
1
Save = new(:save)
-
1
Restore = new(:restore)
-
-
# Server lifecycle (e.g., Puma)
-
# Start already defined above
-
1
Shutdown = new(:shutdown)
-
-
# Security events
-
1
IPSpoof = new(:ip_spoof)
-
1
CSRFViolation = new(:csrf_violation)
-
1
BlockedHost = new(:blocked_host)
-
-
# Database events
-
1
Database = new(:database)
-
-
# Error events
-
1
Error = new(:error)
-
-
# Fallback
-
1
Unknown = new(:unknown)
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "logger"
-
-
1
module LogStruct
-
# Define log levels as an enum
-
1
class Level < T::Enum
-
1
extend T::Sig
-
-
1
enums do
-
# Standard log levels
-
1
Debug = new(:debug)
-
1
Info = new(:info)
-
1
Warn = new(:warn)
-
1
Error = new(:error)
-
1
Fatal = new(:fatal)
-
1
Unknown = new(:unknown)
-
end
-
-
# Convert a Level to the corresponding Logger integer constant
-
2
sig { returns(Integer) }
-
1
def to_severity_int
-
6
case serialize
-
1
when :debug then ::Logger::DEBUG
-
1
when :info then ::Logger::INFO
-
1
when :warn then ::Logger::WARN
-
1
when :error then ::Logger::ERROR
-
1
when :fatal then ::Logger::FATAL
-
1
else ::Logger::UNKNOWN
-
end
-
end
-
-
# Convert a string or integer severity to a Level
-
2
sig { params(severity: T.any(String, Symbol, Integer, NilClass)).returns(Level) }
-
1
def self.from_severity(severity)
-
809
return Unknown if severity.nil?
-
808
return from_severity_int(severity) if severity.is_a?(Integer)
-
797
from_severity_sym(severity.downcase.to_sym)
-
end
-
-
2
sig { params(severity: Symbol).returns(Level) }
-
1
def self.from_severity_sym(severity)
-
797
case severity.to_s.downcase.to_sym
-
9
when :debug then Debug
-
751
when :info then Info
-
8
when :warn then Warn
-
20
when :error then Error
-
6
when :fatal then Fatal
-
3
else Unknown
-
end
-
end
-
-
2
sig { params(severity: Integer).returns(Level) }
-
1
def self.from_severity_int(severity)
-
11
case severity
-
1
when ::Logger::DEBUG then Debug
-
5
when ::Logger::INFO then Info
-
1
when ::Logger::WARN then Warn
-
1
when ::Logger::ERROR then Error
-
1
when ::Logger::FATAL then Fatal
-
2
else Unknown
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# NOTE:
-
# - This enum defines human‑readable field names (constants) that map to compact
-
# JSON key symbols via `serialize` (e.g., Database => :db).
-
# - The enum constant names are code‑generated into
-
# `schemas/meta/log-fields.json` by `scripts/generate_structs.rb` and
-
# referenced from `schemas/meta/log-source-schema.json` to strictly validate
-
# field keys in `schemas/log_sources/*`.
-
# - When adding or renaming fields here, run the generator so schema validation
-
# stays in sync.
-
#
-
# Use human-readable field names as the enum values and short field names for the JSON properties
-
-
1
module LogStruct
-
1
class LogField < T::Enum
-
1
enums do
-
# Shared fields
-
1
Source = new(:src)
-
1
Event = new(:evt)
-
1
Timestamp = new(:ts)
-
1
Level = new(:lvl)
-
-
# Common fields
-
1
Message = new(:msg)
-
1
Data = new(:data)
-
-
# Request-related fields
-
1
Path = new(:path)
-
1
HttpMethod = new(:method) # property name was http_method
-
1
SourceIp = new(:source_ip)
-
1
UserAgent = new(:user_agent)
-
1
Referer = new(:referer)
-
1
RequestId = new(:request_id)
-
-
# HTTP-specific fields
-
1
Format = new(:format)
-
1
Controller = new(:controller)
-
1
Action = new(:action)
-
1
Status = new(:status)
-
# DurationMs already defined below for general metrics
-
1
View = new(:view)
-
1
Database = new(:db)
-
1
Params = new(:params)
-
-
# Security-specific fields
-
1
BlockedHost = new(:blocked_host)
-
1
BlockedHosts = new(:blocked_hosts)
-
1
AllowedHosts = new(:allowed_hosts)
-
1
AllowIpHosts = new(:allow_ip_hosts)
-
1
ClientIp = new(:client_ip)
-
1
XForwardedFor = new(:x_forwarded_for)
-
-
# Email-specific fields
-
1
To = new(:to)
-
1
From = new(:from)
-
1
Subject = new(:subject)
-
1
MessageId = new(:msg_id)
-
1
MailerClass = new(:mailer)
-
1
MailerAction = new(:mailer_action)
-
1
AttachmentCount = new(:attachments)
-
-
# Error fields
-
1
ErrorClass = new(:error_class)
-
1
Backtrace = new(:backtrace)
-
-
# Job-specific fields
-
1
JobId = new(:job_id)
-
1
JobClass = new(:job_class)
-
1
QueueName = new(:queue_name)
-
1
Arguments = new(:arguments)
-
1
RetryCount = new(:retry_count)
-
1
Retries = new(:retries)
-
1
Attempt = new(:attempt)
-
1
Executions = new(:executions)
-
1
ExceptionExecutions = new(:exception_executions)
-
1
ProviderJobId = new(:provider_job_id)
-
1
ScheduledAt = new(:scheduled_at)
-
1
StartedAt = new(:started_at)
-
1
FinishedAt = new(:finished_at)
-
1
DurationMs = new(:duration_ms)
-
1
WaitMs = new(:wait_ms)
-
# Deprecated: ExecutionTime/WaitTime/RunTime
-
1
ExecutionTime = new(:execution_time)
-
1
WaitTime = new(:wait_time)
-
1
RunTime = new(:run_time)
-
1
Priority = new(:priority)
-
1
CronKey = new(:cron_key)
-
1
ErrorMessage = new(:error_message)
-
1
Result = new(:result)
-
1
EnqueueCaller = new(:enqueue_caller)
-
-
# Dotenv fields
-
1
File = new(:file)
-
1
Vars = new(:vars)
-
1
Snapshot = new(:snapshot)
-
-
# Sidekiq-specific fields
-
1
ProcessId = new(:pid)
-
1
ThreadId = new(:tid)
-
1
Context = new(:ctx)
-
-
# Storage-specific fields (ActiveStorage)
-
1
Checksum = new(:checksum)
-
1
Exist = new(:exist)
-
1
Url = new(:url)
-
1
Prefix = new(:prefix)
-
1
Range = new(:range)
-
-
# Storage-specific fields (Shrine)
-
1
Storage = new(:storage)
-
1
Operation = new(:op)
-
1
FileId = new(:file_id)
-
1
Filename = new(:filename)
-
1
MimeType = new(:mime_type)
-
1
Size = new(:size)
-
1
Metadata = new(:metadata)
-
1
Location = new(:location)
-
1
UploadOptions = new(:upload_opts)
-
1
DownloadOptions = new(:download_opts)
-
1
Options = new(:opts)
-
1
Uploader = new(:uploader)
-
-
# CarrierWave-specific fields
-
1
Model = new(:model)
-
1
MountPoint = new(:mount_point)
-
1
Version = new(:version)
-
1
StorePath = new(:store_path)
-
1
Extension = new(:ext)
-
-
# SQL-specific fields
-
1
Sql = new(:sql)
-
1
Name = new(:name)
-
1
RowCount = new(:row_count)
-
# Use Adapter for both AMS and SQL adapter name
-
1
BindParams = new(:bind_params)
-
1
DatabaseName = new(:db_name)
-
1
ConnectionPoolSize = new(:pool_size)
-
1
ActiveConnections = new(:active_count)
-
1
OperationType = new(:op_type)
-
1
TableNames = new(:table_names)
-
-
# ActiveModelSerializers fields
-
1
Serializer = new(:serializer)
-
1
Adapter = new(:adapter)
-
1
ResourceClass = new(:resource_class)
-
-
# Ahoy-specific fields
-
1
AhoyEvent = new(:ahoy_event)
-
1
Properties = new(:properties)
-
-
# Puma / server lifecycle fields
-
1
Mode = new(:mode)
-
1
PumaVersion = new(:puma_version)
-
1
PumaCodename = new(:puma_codename)
-
1
RubyVersion = new(:ruby_version)
-
1
MinThreads = new(:min_threads)
-
1
MaxThreads = new(:max_threads)
-
1
Environment = new(:environment)
-
1
ListeningAddresses = new(:listening_addresses)
-
1
Address = new(:addr)
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
# Combined Source class that unifies log and error sources
-
1
class Source < T::Enum
-
1
enums do
-
# Error sources
-
1
TypeChecking = new(:type_checking) # For type checking errors (Sorbet)
-
1
Security = new(:security) # Security-related events
-
# Errors from LogStruct. (Cannot use LogStruct here because it confuses tapioca.)
-
1
Internal = new(:logstruct)
-
-
# Application sources
-
1
Rails = new(:rails) # For request-related logs/errors
-
1
Job = new(:job) # ActiveJob logs/errors
-
1
Storage = new(:storage) # ActiveStorage logs/errors
-
1
Mailer = new(:mailer) # ActionMailer logs/errors
-
1
App = new(:app) # General application logs/errors
-
-
# Third-party gem sources
-
1
Shrine = new(:shrine)
-
1
CarrierWave = new(:carrierwave)
-
1
Sidekiq = new(:sidekiq)
-
1
Dotenv = new(:dotenv)
-
1
Puma = new(:puma)
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "logger"
-
1
require "active_support/core_ext/object/blank"
-
1
require "json"
-
1
require "globalid"
-
1
require_relative "enums/source"
-
1
require_relative "enums/event"
-
1
require_relative "string_scrubber"
-
1
require_relative "log"
-
1
require_relative "param_filters"
-
1
require_relative "multi_error_reporter"
-
-
1
module LogStruct
-
1
class Formatter < ::Logger::Formatter
-
1
extend T::Sig
-
-
# Add current_tags method to support ActiveSupport::TaggedLogging
-
2
sig { returns(T::Array[String]) }
-
1
def current_tags
-
7
Thread.current[:activesupport_tagged_logging_tags] ||= []
-
end
-
-
# Add tagged method to support ActiveSupport::TaggedLogging
-
2
sig { params(tags: T::Array[String], blk: T.proc.params(formatter: Formatter).void).returns(T.untyped) }
-
1
def tagged(*tags, &blk)
-
1
new_tags = tags.flatten
-
1
current_tags.concat(new_tags) if new_tags.any?
-
1
yield self
-
ensure
-
1
current_tags.pop(new_tags.size) if new_tags&.any?
-
end
-
-
# Add clear_tags! method to support ActiveSupport::TaggedLogging
-
2
sig { void }
-
1
def clear_tags!
-
1
Thread.current[:activesupport_tagged_logging_tags] = []
-
end
-
-
1
sig { params(tags: T::Array[String]).returns(T.untyped) }
-
1
def push_tags(*tags)
-
current_tags.concat(tags)
-
end
-
-
2
sig { params(string: String).returns(String) }
-
1
def scrub_string(string)
-
# Use StringScrubber module to scrub sensitive information from strings
-
4011
StringScrubber.scrub(string)
-
end
-
-
2
sig { params(arg: T.untyped, recursion_depth: Integer).returns(T.untyped) }
-
1
def process_values(arg, recursion_depth: 0)
-
# Prevent infinite recursion in case any args have circular references
-
# or are too deeply nested. Just return args.
-
5012
return arg if recursion_depth > 20
-
-
5010
case arg
-
when Hash
-
823
result = {}
-
-
# Process each key-value pair
-
823
arg.each do |key, value|
-
# Check if this key should be filtered at any depth
-
4155
result[key] = if ParamFilters.should_filter_key?(key, value)
-
# Filter the value
-
2
{_filtered: ParamFilters.summarize_json_attribute(key, value)}
-
else
-
# Process the value normally
-
4153
process_values(value, recursion_depth: recursion_depth + 1)
-
end
-
end
-
-
823
result
-
when Array
-
22
process_array(arg, recursion_depth: recursion_depth)
-
when GlobalID::Identification
-
begin
-
5
arg.to_global_id
-
rescue
-
begin
-
1
case arg
-
when ActiveRecord::Base
-
"#{arg.class}(##{arg.id})"
-
else
-
# For non-ActiveRecord objects that failed to_global_id, try to get a string representation
-
# If this also fails, we want to catch it and return the error placeholder
-
1
String(T.cast(arg, Object))
-
end
-
rescue => e
-
1
LogStruct.handle_exception(e, source: Source::Internal)
-
1
"[GLOBALID_ERROR]"
-
end
-
end
-
when Source, Event
-
arg.serialize
-
when String
-
4011
scrub_string(arg)
-
when Time
-
arg.iso8601(3)
-
else
-
# Any other type (e.g. Symbol, Integer, Float, Boolean etc.)
-
149
arg
-
end
-
rescue => e
-
# Report error through LogStruct's framework
-
context = {
-
processor_method: "process_values",
-
value_type: arg.class.name,
-
recursion_depth: recursion_depth
-
}
-
LogStruct.handle_exception(e, source: Source::Internal, context: context)
-
arg
-
end
-
-
2
sig { params(log_value: T.untyped, time: Time).returns(T::Hash[Symbol, T.untyped]) }
-
1
def log_value_to_hash(log_value, time:)
-
793
case log_value
-
when Log::Interfaces::CommonFields
-
# Our log classes all implement a custom #serialize method that use symbol keys
-
771
log_value.serialize
-
-
when T::Struct
-
# Default T::Struct.serialize methods returns a hash with string keys, so convert them to symbols
-
1
log_value.serialize.deep_symbolize_keys
-
-
when Hash
-
# Use hash as is and convert string keys to symbols
-
13
log_value.dup.deep_symbolize_keys
-
-
else
-
# Create a Plain log with the message as a string and serialize it with symbol keys
-
# log_value can be literally anything: Integer, Float, Boolean, NilClass, etc.
-
8
log_message = case log_value
-
# Handle all the basic types without any further processing
-
when String, Symbol, TrueClass, FalseClass, NilClass, Array, Hash, Time, Numeric
-
5
log_value
-
else
-
# Handle the serialization of complex objects in a useful way:
-
#
-
# 1. For ActiveRecord models: Use as_json which includes attributes
-
# 2. For objects with custom as_json implementations: Use their implementation
-
# 3. For basic objects that only have ActiveSupport's as_json: Use to_s
-
begin
-
3
method_owner = log_value.method(:as_json).owner
-
-
# If it's ActiveRecord, ActiveModel, or a custom implementation, use as_json
-
2
if method_owner.to_s.include?("ActiveRecord") ||
-
method_owner.to_s.include?("ActiveModel") ||
-
method_owner.to_s.exclude?("ActiveSupport::CoreExtensions") &&
-
method_owner.to_s.exclude?("Object")
-
1
log_value.as_json
-
else
-
# For plain objects with only the default ActiveSupport as_json
-
1
log_value.to_s
-
end
-
rescue => e
-
# Handle serialization errors
-
context = {
-
1
object_class: log_value.class.name,
-
object_inspect: log_value.inspect.truncate(100)
-
}
-
1
LogStruct.handle_exception(e, source: Source::Internal, context: context)
-
-
# Fall back to the string representation to ensure we continue processing
-
1
log_value.to_s
-
end
-
end
-
-
8
Log::Plain.new(
-
message: log_message,
-
timestamp: time
-
).serialize
-
end
-
end
-
-
# Serializes Log (or string) into JSON
-
2
sig { params(severity: T.any(String, Symbol, Integer), time: Time, progname: T.nilable(String), log_value: T.untyped).returns(String) }
-
1
def call(severity, time, progname, log_value)
-
786
level_enum = Level.from_severity(severity)
-
-
786
data = log_value_to_hash(log_value, time: time)
-
-
# Filter params, scrub sensitive values, format ActiveJob GlobalID arguments
-
786
data = process_values(data)
-
-
# Add standard fields if not already present
-
786
data[:src] ||= Source::App
-
786
data[:evt] ||= Event::Log
-
786
data[:ts] ||= time.iso8601(3)
-
786
data[:lvl] = level_enum # Set level from severity parameter
-
786
data[:prog] = progname if progname.present?
-
-
786
generate_json(data)
-
end
-
-
# Output as JSON with a newline. We mock this method in tests so we can
-
# inspect the data right before it gets turned into a JSON string.
-
2
sig { params(data: T::Hash[T.untyped, T.untyped]).returns(String) }
-
1
def generate_json(data)
-
787
"#{data.to_json}\n"
-
end
-
-
2
sig { params(array: T::Array[T.untyped], recursion_depth: Integer).returns(T::Array[T.untyped]) }
-
1
def process_array(array, recursion_depth:)
-
22
return [] if array.empty?
-
-
22
if looks_like_backtrace_array?(array)
-
20
array.map { |value| process_values(value, recursion_depth: recursion_depth + 1) }
-
else
-
18
processed = []
-
18
array.each_with_index do |value, index|
-
52
break if index >= 10
-
-
50
processed << process_values(value, recursion_depth: recursion_depth + 1)
-
end
-
-
18
if array.size > 10
-
2
processed << "... and #{array.size - 10} more items"
-
end
-
-
18
processed
-
end
-
end
-
-
# Check if an array looks like a backtrace (array of strings with file:line pattern)
-
2
sig { params(array: T::Array[T.untyped]).returns(T::Boolean) }
-
1
def looks_like_backtrace_array?(array)
-
22
backtrace_like_count = array.first(5).count do |element|
-
56
element.is_a?(String) && element.match?(/\A[^:\s]+:\d+/)
-
end
-
-
22
backtrace_like_count >= 3
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
# Module for custom handlers used throughout the library
-
1
module Handlers
-
# Type for Lograge custom options
-
1
LogrageCustomOptions = T.type_alias {
-
1
T.proc.params(
-
event: ActiveSupport::Notifications::Event,
-
options: T::Hash[Symbol, T.untyped]
-
).returns(T.untyped)
-
}
-
-
# Type for error reporting handlers
-
1
ErrorReporter = T.type_alias {
-
1
T.proc.params(
-
error: StandardError,
-
context: T.nilable(T::Hash[Symbol, T.untyped]),
-
source: Source
-
).void
-
}
-
-
# Type for string scrubbing handlers
-
2
StringScrubber = T.type_alias { T.proc.params(string: String).returns(String) }
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "digest"
-
-
1
module LogStruct
-
# Utility module for hashing sensitive data
-
1
module HashUtils
-
1
class << self
-
1
extend T::Sig
-
-
# Create a hash of a string value for tracing while preserving privacy
-
2
sig { params(value: String).returns(String) }
-
1
def hash_value(value)
-
16
salt = LogStruct.config.filters.hash_salt
-
16
length = LogStruct.config.filters.hash_length
-
16
Digest::SHA256.hexdigest("#{salt}#{value}")[0...length] || "error"
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require_relative "integrations/integration_interface"
-
1
require_relative "integrations/active_job"
-
1
require_relative "integrations/active_record"
-
1
require_relative "integrations/rack_error_handler"
-
1
require_relative "integrations/host_authorization"
-
1
require_relative "integrations/action_mailer"
-
1
require_relative "integrations/lograge"
-
1
require_relative "integrations/shrine"
-
1
require_relative "integrations/sidekiq"
-
1
require_relative "integrations/good_job"
-
1
require_relative "integrations/active_storage"
-
1
require_relative "integrations/carrierwave"
-
1
require_relative "integrations/sorbet"
-
1
require_relative "integrations/ahoy"
-
1
require_relative "integrations/active_model_serializers"
-
1
require_relative "integrations/dotenv"
-
1
require_relative "integrations/puma"
-
-
1
module LogStruct
-
1
module Integrations
-
1
extend T::Sig
-
-
# Register generic initializers on the Railtie to keep integration
-
# wiring centralized (boot replay interception and resolution).
-
2
sig { params(railtie: T.untyped).void }
-
1
def self.setup_initializers(railtie)
-
# Intercept any boot-time replays (e.g., dotenv) before those railties run
-
1
railtie.initializer "logstruct.intercept_boot_replays", before: "dotenv" do
-
1
LogStruct::Integrations::Dotenv.intercept_logger_setter!
-
end
-
-
# Decide which set of boot logs to emit after user initializers
-
1
railtie.initializer "logstruct.resolve_boot_logs", after: :load_config_initializers do
-
1
LogStruct::Integrations::Dotenv.resolve_boot_logs!
-
end
-
end
-
-
2
sig { params(stage: Symbol).void }
-
1
def self.setup_integrations(stage: :all)
-
2
config = LogStruct.config
-
-
2
case stage
-
when :non_middleware
-
1
setup_non_middleware_integrations(config)
-
when :middleware
-
1
setup_middleware_integrations(config)
-
when :all
-
setup_non_middleware_integrations(config)
-
setup_middleware_integrations(config)
-
else
-
raise ArgumentError, "Unknown integration stage: #{stage}"
-
end
-
end
-
-
2
sig { params(config: LogStruct::Configuration).void }
-
1
def self.setup_non_middleware_integrations(config)
-
1
Integrations::Lograge.setup(config) if config.integrations.enable_lograge
-
1
Integrations::ActionMailer.setup(config) if config.integrations.enable_actionmailer
-
1
Integrations::ActiveJob.setup(config) if config.integrations.enable_activejob
-
1
Integrations::ActiveRecord.setup(config) if config.integrations.enable_sql_logging
-
1
Integrations::Sidekiq.setup(config) if config.integrations.enable_sidekiq
-
1
Integrations::GoodJob.setup(config) if config.integrations.enable_goodjob
-
1
Integrations::Ahoy.setup(config) if config.integrations.enable_ahoy
-
1
Integrations::ActiveModelSerializers.setup(config) if config.integrations.enable_active_model_serializers
-
1
Integrations::Shrine.setup(config) if config.integrations.enable_shrine
-
1
Integrations::ActiveStorage.setup(config) if config.integrations.enable_activestorage
-
1
Integrations::CarrierWave.setup(config) if config.integrations.enable_carrierwave
-
1
Integrations::Sorbet.setup(config) if config.integrations.enable_sorbet_error_handlers
-
1
if config.enabled && config.integrations.enable_dotenv
-
1
Integrations::Dotenv.setup(config)
-
end
-
1
Integrations::Puma.setup(config) if config.integrations.enable_puma
-
end
-
-
2
sig { params(config: LogStruct::Configuration).void }
-
1
def self.setup_middleware_integrations(config)
-
1
Integrations::HostAuthorization.setup(config) if config.integrations.enable_host_authorization
-
1
Integrations::RackErrorHandler.setup(config) if config.integrations.enable_rack_error_handler
-
end
-
-
1
private_class_method :setup_non_middleware_integrations, :setup_middleware_integrations
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
begin
-
1
require "action_mailer"
-
rescue LoadError
-
# actionmailer gem is not available, integration will be skipped
-
end
-
-
1
if defined?(::ActionMailer)
-
1
require "logger"
-
1
require_relative "action_mailer/metadata_collection"
-
1
require_relative "action_mailer/event_logging"
-
1
require_relative "action_mailer/error_handling"
-
end
-
-
1
module LogStruct
-
1
module Integrations
-
# ActionMailer integration for structured logging
-
1
module ActionMailer
-
1
extend T::Sig
-
1
extend IntegrationInterface
-
-
# Set up ActionMailer structured logging
-
2
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def self.setup(config)
-
8
return nil unless defined?(::ActionMailer)
-
8
return nil unless config.enabled
-
8
return nil unless config.integrations.enable_actionmailer
-
-
# Silence default ActionMailer logs (we use our own structured logging)
-
# This is required because we replace the logging using our own callbacks
-
7
if defined?(::ActionMailer::Base)
-
7
::ActionMailer::Base.logger = ::Logger.new(File::NULL)
-
end
-
-
# Register our custom observers and handlers
-
# Registering these at the class level means all mailers will use them
-
7
ActiveSupport.on_load(:action_mailer) do
-
7
prepend LogStruct::Integrations::ActionMailer::EventLogging
-
7
prepend LogStruct::Integrations::ActionMailer::ErrorHandling
-
7
prepend LogStruct::Integrations::ActionMailer::MetadataCollection
-
end
-
-
# If ActionMailer::Base is already loaded, the on_load hooks won't run
-
# So we need to apply the modules directly
-
7
if defined?(::ActionMailer::Base)
-
7
::ActionMailer::Base.prepend(LogStruct::Integrations::ActionMailer::EventLogging)
-
7
::ActionMailer::Base.prepend(LogStruct::Integrations::ActionMailer::ErrorHandling)
-
7
::ActionMailer::Base.prepend(LogStruct::Integrations::ActionMailer::MetadataCollection)
-
end
-
-
7
true
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
1
module Integrations
-
1
module ActionMailer
-
# Handles error handling for ActionMailer
-
#
-
# IMPORTANT LIMITATIONS:
-
# 1. This module must be included BEFORE users define rescue_from handlers
-
# to ensure proper handler precedence (user handlers are checked first)
-
# 2. Rails rescue_from handlers don't bubble to parent class handlers after reraise
-
# 3. Handler order matters: Rails checks rescue_from handlers in reverse declaration order
-
1
module ErrorHandling
-
1
extend T::Sig
-
1
extend ActiveSupport::Concern
-
-
2
sig { returns(T.nilable(T::Boolean)) }
-
1
attr_accessor :logstruct_mail_failed
-
-
# NOTE: rescue_from handlers are checked in reverse order of declaration.
-
# We want LogStruct handlers to be checked AFTER user handlers (lower priority),
-
# so we need to add them BEFORE user handlers are declared.
-
-
# This will be called when the module is included/prepended
-
2
sig { params(base: T.untyped).void }
-
1
def self.install_handler(base)
-
# Only add the handler once per class
-
14
return if base.instance_variable_get(:@_logstruct_handler_installed)
-
-
# Add our handler FIRST so it has lower priority than user handlers
-
1
base.rescue_from StandardError, with: :log_and_reraise_error
-
-
# Mark as installed to prevent duplicates
-
1
base.instance_variable_set(:@_logstruct_handler_installed, true)
-
end
-
-
1
included do
-
LogStruct::Integrations::ActionMailer::ErrorHandling.install_handler(self)
-
end
-
-
# Also support prepended (used by tests and manual setup)
-
2
sig { params(base: T.untyped).void }
-
1
def self.prepended(base)
-
14
install_handler(base)
-
end
-
-
1
protected
-
-
# Just log the error without reporting or retrying
-
2
sig { params(ex: StandardError).void }
-
1
def log_and_ignore_error(ex)
-
1
self.logstruct_mail_failed = true
-
1
log_email_delivery_error(ex, notify: false, report: false, reraise: false)
-
end
-
-
# Log and report to error service, but doesn't reraise.
-
1
sig { params(ex: StandardError).void }
-
1
def log_and_report_error(ex)
-
log_email_delivery_error(ex, notify: false, report: true, reraise: false)
-
end
-
-
# Log, report to error service, and reraise for retry
-
2
sig { params(ex: StandardError).void }
-
1
def log_and_reraise_error(ex)
-
1
log_email_delivery_error(ex, notify: false, report: true, reraise: true)
-
end
-
-
1
private
-
-
# Handle an error from a mailer
-
2
sig { params(mailer: T.untyped, error: StandardError, message: String).void }
-
1
def log_structured_error(mailer, error, message)
-
# Get message if available
-
2
mailer_message = mailer.respond_to?(:message) ? mailer.message : nil
-
-
# Prepare universal mailer fields
-
2
message_data = {}
-
2
MetadataCollection.add_message_metadata(mailer, message_data)
-
-
# Prepare app-specific context data for additional_data
-
2
context_data = {}
-
2
MetadataCollection.add_context_metadata(mailer, context_data)
-
-
# Extract email fields
-
2
to = mailer_message&.to
-
2
from = mailer_message&.from&.first
-
2
subject = mailer_message&.subject
-
2
message_id = extract_message_id_from_mailer(mailer)
-
-
# Create ActionMailer-specific error struct
-
2
exception_data = Log::ActionMailer::Error.new(
-
to: to,
-
from: from,
-
subject: subject,
-
message_id: message_id,
-
mailer_class: mailer.class.to_s,
-
2
mailer_action: mailer.respond_to?(:action_name) ? mailer.action_name&.to_s : nil,
-
attachment_count: message_data[:attachment_count],
-
error_class: error.class,
-
message: message,
-
backtrace: error.backtrace,
-
additional_data: context_data.presence,
-
timestamp: Time.now
-
)
-
-
# Log the structured error
-
2
LogStruct.error(exception_data)
-
end
-
-
# Extract message ID from the mailer
-
2
sig { params(mailer: T.untyped).returns(T.nilable(String)) }
-
1
def extract_message_id_from_mailer(mailer)
-
3
return nil unless mailer.respond_to?(:message)
-
-
3
mail_message = mailer.message
-
3
return nil unless mail_message.respond_to?(:message_id)
-
-
3
mail_message.message_id
-
end
-
-
# Log when email delivery fails
-
2
sig { params(error: StandardError, notify: T::Boolean, report: T::Boolean, reraise: T::Boolean).void }
-
1
def log_email_delivery_error(error, notify: false, report: true, reraise: true)
-
# Generate appropriate error message
-
2
message = error_message_for(error, reraise)
-
-
# Use structured error logging
-
2
log_structured_error(self, error, message)
-
-
# Handle notifications and reporting
-
2
handle_error_notifications(error, notify, report, reraise)
-
end
-
-
# Generate appropriate error message based on error handling strategy
-
2
sig { params(error: StandardError, reraise: T::Boolean).returns(String) }
-
1
def error_message_for(error, reraise)
-
2
if reraise
-
1
"#{error.class}: Email delivery error, will retry. Recipients: #{recipients(error)}. Error message: #{error.message}"
-
else
-
1
"#{error.class}: Cannot send email to #{recipients(error)}. Error message: #{error.message}"
-
end
-
end
-
-
# Handle error notifications, reporting, and reraising
-
2
sig { params(error: StandardError, notify: T::Boolean, report: T::Boolean, reraise: T::Boolean).void }
-
1
def handle_error_notifications(error, notify, report, reraise)
-
# Log a notification event if requested
-
2
log_notification_event(error) if notify
-
-
# Report to error reporting service if requested
-
2
if report
-
# Get message if available
-
1
mailer_message = respond_to?(:message) ? message : nil
-
-
# Prepare universal mailer fields
-
1
message_data = {}
-
1
MetadataCollection.add_message_metadata(self, message_data)
-
-
# Prepare app-specific context data
-
1
context_data = {recipients: recipients(error)}
-
1
MetadataCollection.add_context_metadata(self, context_data)
-
-
# Extract email fields
-
1
to = mailer_message&.to
-
1
from = mailer_message&.from&.first
-
1
subject = mailer_message&.subject
-
1
message_id = extract_message_id_from_mailer(self)
-
-
# Create ActionMailer-specific error struct
-
1
exception_data = Log::ActionMailer::Error.new(
-
to: to,
-
from: from,
-
subject: subject,
-
message_id: message_id,
-
mailer_class: self.class.to_s,
-
1
mailer_action: respond_to?(:action_name) ? action_name&.to_s : nil,
-
attachment_count: message_data[:attachment_count],
-
error_class: error.class,
-
message: error.message,
-
backtrace: error.backtrace,
-
additional_data: context_data.presence,
-
timestamp: Time.now
-
)
-
-
# Log the exception with structured data
-
1
LogStruct.error(exception_data)
-
-
# Call the error handler with flat context for compatibility
-
context = {
-
1
mailer_class: self.class.to_s,
-
1
mailer_action: respond_to?(:action_name) ? action_name : nil,
-
recipients: recipients(error)
-
}
-
1
LogStruct.handle_exception(error, source: Source::Mailer, context: context)
-
end
-
-
# Re-raise the error if requested
-
1
Kernel.raise error if reraise
-
end
-
-
# Log a notification event that can be picked up by external systems
-
1
sig { params(error: StandardError).void }
-
1
def log_notification_event(error)
-
# Get message if available
-
mailer_message = respond_to?(:message) ? message : nil
-
-
# Prepare universal mailer fields
-
message_data = {}
-
MetadataCollection.add_message_metadata(self, message_data)
-
-
# Prepare app-specific context data
-
context_data = {
-
mailer: self.class.to_s,
-
action: action_name&.to_s,
-
recipients: recipients(error)
-
}
-
MetadataCollection.add_context_metadata(self, context_data)
-
-
# Extract email fields
-
to = mailer_message&.to
-
from = mailer_message&.from&.first
-
subject = mailer_message&.subject
-
message_id = extract_message_id_from_mailer(self)
-
-
# Create ActionMailer-specific error struct
-
exception_data = Log::ActionMailer::Error.new(
-
to: to,
-
from: from,
-
subject: subject,
-
message_id: message_id,
-
mailer_class: self.class.to_s,
-
mailer_action: respond_to?(:action_name) ? action_name&.to_s : nil,
-
attachment_count: message_data[:attachment_count],
-
error_class: error.class,
-
message: error.message,
-
backtrace: error.backtrace,
-
additional_data: context_data.presence,
-
timestamp: Time.now,
-
level: Level::Info
-
)
-
-
# Log the error at info level since it's not a critical error
-
LogStruct.info(exception_data)
-
end
-
-
2
sig { params(error: StandardError).returns(String) }
-
1
def recipients(error)
-
# Extract recipient info if available
-
4
if error.respond_to?(:recipients) && T.unsafe(error).recipients.present?
-
T.unsafe(error).recipients.join(", ")
-
else
-
4
"unknown"
-
end
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
1
module Integrations
-
1
module ActionMailer
-
# Handles logging of email delivery events
-
1
module EventLogging
-
1
extend ActiveSupport::Concern
-
1
extend T::Sig
-
1
extend T::Helpers
-
1
requires_ancestor { ::ActionMailer::Base }
-
1
requires_ancestor { ErrorHandling }
-
-
1
included do
-
T.bind(self, T.class_of(::ActionMailer::Base))
-
-
# Add callbacks for delivery events
-
before_deliver :log_email_delivery
-
after_deliver :log_email_delivered
-
end
-
-
# When this module is prepended (our integration uses prepend), ensure callbacks are registered
-
1
if respond_to?(:prepended)
-
1
prepended do
-
1
T.bind(self, T.class_of(::ActionMailer::Base))
-
-
# Add callbacks for delivery events
-
1
before_deliver :log_email_delivery
-
1
after_deliver :log_email_delivered
-
end
-
end
-
-
1
protected
-
-
# Log when an email is about to be delivered
-
2
sig { void }
-
1
def log_email_delivery
-
5
log_mailer_event(Event::Delivery)
-
end
-
-
# Log when an email is delivered
-
2
sig { void }
-
1
def log_email_delivered
-
# Don't log delivered event if the delivery failed (error was handled with log_and_ignore_error)
-
5
return if logstruct_mail_failed
-
-
4
log_mailer_event(Event::Delivered)
-
end
-
-
1
private
-
-
# Log a mailer event with the given event type
-
2
sig { params(event_type: LogStruct::Event, level: Symbol, additional_data: T::Hash[Symbol, T.untyped]).returns(T.untyped) }
-
1
def log_mailer_event(event_type, level = :info, additional_data = {})
-
# Get message (self refers to the mailer instance)
-
9
mailer_message = message if respond_to?(:message)
-
-
# Prepare universal mailer fields
-
9
message_data = {}
-
9
MetadataCollection.add_message_metadata(self, message_data)
-
-
# Prepare app-specific context data for additional_data
-
9
context_data = {}
-
9
MetadataCollection.add_context_metadata(self, context_data)
-
9
context_data.merge!(additional_data) if additional_data.present?
-
-
# Extract email fields (these will be filtered if email_addresses=true)
-
9
to = mailer_message&.to
-
9
from = mailer_message&.from&.first
-
9
subject = mailer_message&.subject
-
-
9
base_fields = Log::ActionMailer::BaseFields.new(
-
to: to,
-
from: from,
-
subject: subject,
-
message_id: extract_message_id,
-
mailer_class: self.class.to_s,
-
mailer_action: action_name.to_s,
-
attachment_count: message_data[:attachment_count]
-
)
-
-
9
log = case event_type
-
when Event::Delivery
-
5
Log::ActionMailer::Delivery.new(
-
**base_fields.to_kwargs,
-
additional_data: context_data.presence,
-
timestamp: Time.now
-
)
-
when Event::Delivered
-
4
Log::ActionMailer::Delivered.new(
-
**base_fields.to_kwargs,
-
additional_data: context_data.presence,
-
timestamp: Time.now
-
)
-
else
-
return
-
end
-
9
LogStruct.info(log)
-
9
log
-
end
-
-
# Extract message ID from the mailer
-
2
sig { returns(T.nilable(String)) }
-
1
def extract_message_id
-
9
return nil unless respond_to?(:message)
-
-
9
mail_message = message
-
9
return nil unless mail_message.respond_to?(:message_id)
-
-
9
mail_message.message_id
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
1
module Integrations
-
1
module ActionMailer
-
# Handles collection of metadata for email logging
-
1
module MetadataCollection
-
1
extend T::Sig
-
# Add message-specific metadata to log data
-
2
sig { params(mailer: T.untyped, log_data: T::Hash[Symbol, T.untyped]).void }
-
1
def self.add_message_metadata(mailer, log_data)
-
14
message = mailer.respond_to?(:message) ? mailer.message : nil
-
-
# Add attachment count if message is available
-
14
log_data[:attachment_count] = if message
-
13
message.attachments&.count || 0
-
else
-
1
0
-
end
-
end
-
-
# Add context metadata to log data
-
2
sig { params(mailer: T.untyped, log_data: T::Hash[Symbol, T.untyped]).void }
-
1
def self.add_context_metadata(mailer, log_data)
-
# Add account ID information if available (but not user email)
-
14
extract_ids_to_log_data(mailer, log_data)
-
-
# Add any current tags from ActiveJob or ActionMailer
-
14
add_current_tags_to_log_data(log_data)
-
end
-
-
2
sig { params(mailer: T.untyped, log_data: T::Hash[Symbol, T.untyped]).void }
-
1
def self.extract_ids_to_log_data(mailer, log_data)
-
# Use configured ID mapping from LogStruct configuration
-
14
id_mapping = LogStruct.config.integrations.actionmailer_id_mapping
-
-
14
id_mapping.each do |ivar_name, log_key|
-
28
ivar = :"@#{ivar_name}"
-
28
next unless mailer.instance_variable_defined?(ivar)
-
-
obj = mailer.instance_variable_get(ivar)
-
log_data[log_key] = obj.id if obj.respond_to?(:id)
-
end
-
end
-
-
2
sig { params(log_data: T::Hash[Symbol, T.untyped]).void }
-
1
def self.add_current_tags_to_log_data(log_data)
-
# Get current tags from thread-local storage or ActiveSupport::TaggedLogging
-
14
tags = if ::ActiveSupport::TaggedLogging.respond_to?(:current_tags)
-
14
T.unsafe(::ActiveSupport::TaggedLogging).current_tags
-
else
-
Thread.current[:activesupport_tagged_logging_tags] || []
-
end
-
14
log_data[:tags] = tags if tags.present?
-
-
# Get request_id from ActionDispatch if available
-
14
if ::ActionDispatch::Request.respond_to?(:current_request_id) &&
-
T.unsafe(::ActionDispatch::Request).current_request_id.present?
-
1
log_data[:request_id] = T.unsafe(::ActionDispatch::Request).current_request_id
-
end
-
-
# Get job_id from ActiveJob if available
-
14
if defined?(::ActiveJob::Logging) && ::ActiveJob::Logging.respond_to?(:job_id) &&
-
T.unsafe(::ActiveJob::Logging).job_id.present?
-
2
log_data[:job_id] = T.unsafe(::ActiveJob::Logging).job_id
-
end
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
begin
-
1
require "active_job"
-
1
require "active_job/log_subscriber"
-
rescue LoadError
-
# ActiveJob gem is not available, integration will be skipped
-
end
-
-
1
require_relative "active_job/log_subscriber" if defined?(::ActiveJob::LogSubscriber)
-
-
1
module LogStruct
-
1
module Integrations
-
# ActiveJob integration for structured logging
-
1
module ActiveJob
-
1
extend T::Sig
-
1
extend IntegrationInterface
-
-
# Set up ActiveJob structured logging
-
2
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def self.setup(config)
-
1
return nil unless defined?(::ActiveJob::LogSubscriber)
-
1
return nil unless config.enabled
-
1
return nil unless config.integrations.enable_activejob
-
-
1
::ActiveSupport.on_load(:active_job) do
-
# Detach the default text formatter
-
1
::ActiveJob::LogSubscriber.detach_from :active_job
-
-
# Attach our structured formatter
-
1
Integrations::ActiveJob::LogSubscriber.attach_to :active_job
-
end
-
1
true
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../log/active_job"
-
1
require_relative "../../log/error"
-
-
1
module LogStruct
-
1
module Integrations
-
1
module ActiveJob
-
# Structured logging for ActiveJob
-
1
class LogSubscriber < ::ActiveJob::LogSubscriber
-
1
extend T::Sig
-
-
1
sig { params(event: ::ActiveSupport::Notifications::Event).void }
-
1
def enqueue(event)
-
job = T.cast(event.payload[:job], ::ActiveJob::Base)
-
ts = event.time ? Time.at(event.time) : Time.now
-
base_fields = build_base_fields(job)
-
logger.info(Log::ActiveJob::Enqueue.new(
-
**base_fields.to_kwargs,
-
timestamp: ts
-
))
-
end
-
-
1
sig { params(event: ::ActiveSupport::Notifications::Event).void }
-
1
def enqueue_at(event)
-
job = T.cast(event.payload[:job], ::ActiveJob::Base)
-
ts = event.time ? Time.at(event.time) : Time.now
-
base_fields = build_base_fields(job)
-
logger.info(Log::ActiveJob::Schedule.new(
-
**base_fields.to_kwargs,
-
scheduled_at: job.scheduled_at,
-
timestamp: ts
-
))
-
end
-
-
1
sig { params(event: ::ActiveSupport::Notifications::Event).void }
-
1
def perform(event)
-
job = T.cast(event.payload[:job], ::ActiveJob::Base)
-
exception = event.payload[:exception_object]
-
-
if exception
-
# Log the exception with the job context
-
log_exception(exception, job, event)
-
else
-
start_float = event.time
-
end_float = event.end
-
ts = start_float ? Time.at(start_float) : Time.now
-
finished_at = end_float ? Time.at(end_float) : Time.now
-
base_fields = build_base_fields(job)
-
logger.info(Log::ActiveJob::Finish.new(
-
**base_fields.to_kwargs,
-
duration_ms: event.duration.to_f,
-
finished_at: finished_at,
-
timestamp: ts
-
))
-
end
-
end
-
-
1
sig { params(event: ::ActiveSupport::Notifications::Event).void }
-
1
def perform_start(event)
-
job = T.cast(event.payload[:job], ::ActiveJob::Base)
-
ts = event.time ? Time.at(event.time) : Time.now
-
started_at = ts
-
attempt = job.executions
-
base_fields = build_base_fields(job)
-
logger.info(Log::ActiveJob::Start.new(
-
**base_fields.to_kwargs,
-
started_at: started_at,
-
attempt: attempt,
-
timestamp: ts
-
))
-
end
-
-
1
private
-
-
1
sig { params(job: ::ActiveJob::Base).returns(Log::ActiveJob::BaseFields) }
-
1
def build_base_fields(job)
-
Log::ActiveJob::BaseFields.new(
-
job_id: job.job_id,
-
job_class: job.class.to_s,
-
queue_name: job.queue_name&.to_sym,
-
executions: job.executions,
-
provider_job_id: job.provider_job_id,
-
arguments: ((job.class.respond_to?(:log_arguments?) && job.class.log_arguments?) ? job.arguments : nil)
-
)
-
end
-
-
1
sig { params(exception: StandardError, job: ::ActiveJob::Base, _event: ::ActiveSupport::Notifications::Event).void }
-
1
def log_exception(exception, job, _event)
-
base_fields = build_base_fields(job)
-
job_context = base_fields.to_kwargs
-
-
log_data = Log.from_exception(Source::Job, exception, job_context)
-
-
logger.error(log_data)
-
end
-
-
1
sig { returns(T.untyped) }
-
1
def logger
-
::ActiveJob::Base.logger
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "active_support/notifications"
-
-
1
module LogStruct
-
1
module Integrations
-
# ActiveModelSerializers integration. Subscribes to AMS notifications and
-
# emits structured logs with serializer/adapter/duration details.
-
1
module ActiveModelSerializers
-
1
extend T::Sig
-
-
2
sig { params(config: LogStruct::Configuration).returns(T.nilable(TrueClass)) }
-
1
def self.setup(config)
-
2
return nil unless defined?(::ActiveSupport::Notifications)
-
-
# Only activate if AMS appears to be present
-
2
return nil unless defined?(::ActiveModelSerializers)
-
-
# Subscribe to common AMS notification names; keep broad but specific
-
1
pattern = /\.active_model_serializers\z/
-
-
1
::ActiveSupport::Notifications.subscribe(pattern) do |_name, started, finished, _unique_id, payload|
-
# started/finished are Time; convert to ms
-
1
duration_ms = ((finished - started) * 1000.0).round(3)
-
-
1
serializer = payload[:serializer] || payload[:serializer_class]
-
1
adapter = payload[:adapter]
-
1
resource = payload[:resource] || payload[:object]
-
-
1
LogStruct.info(
-
LogStruct::Log::ActiveModelSerializers.new(
-
message: "ams.render",
-
serializer: serializer&.to_s,
-
adapter: adapter&.to_s,
-
resource_class: resource&.class&.name,
-
duration_ms: duration_ms,
-
timestamp: started
-
)
-
)
-
rescue => e
-
LogStruct.handle_exception(e, source: LogStruct::Source::Rails, context: {integration: :active_model_serializers})
-
end
-
-
1
true
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "active_support/notifications"
-
-
1
module LogStruct
-
1
module Integrations
-
# ActiveRecord Integration for SQL Query Logging
-
#
-
# This integration captures and structures all SQL queries executed through ActiveRecord,
-
# providing detailed performance and debugging information in a structured format.
-
#
-
# ## Features:
-
# - Captures all SQL queries with execution time
-
# - Safely filters sensitive data from bind parameters
-
# - Extracts database operation metadata
-
# - Provides connection pool monitoring information
-
# - Identifies query types and table names
-
#
-
# ## Performance Considerations:
-
# - Minimal overhead on query execution
-
# - Async logging prevents I/O blocking
-
# - Configurable to disable in production if needed
-
# - Smart filtering reduces log volume for repetitive queries
-
#
-
# ## Security:
-
# - SQL queries are always parameterized (safe)
-
# - Bind parameters filtered through LogStruct's param filters
-
# - Sensitive patterns automatically scrubbed
-
#
-
# ## Configuration:
-
# ```ruby
-
# LogStruct.configure do |config|
-
# config.integrations.enable_sql_logging = true
-
# config.integrations.sql_slow_query_threshold = 100.0 # ms
-
# config.integrations.sql_log_bind_params = false # disable in production
-
# end
-
# ```
-
1
module ActiveRecord
-
1
extend T::Sig
-
1
extend IntegrationInterface
-
-
# Track subscription state keyed to the current Notifications.notifier instance
-
1
State = ::Struct.new(:subscribed, :notifier_id)
-
1
STATE = T.let(State.new(false, nil), State)
-
-
# Set up SQL query logging integration
-
2
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def self.setup(config)
-
16
return nil unless config.integrations.enable_sql_logging
-
15
return nil unless defined?(::ActiveRecord::Base)
-
-
# Detach Rails' default ActiveRecord log subscriber to prevent
-
# duplicate/unstructured SQL debug output when LogStruct SQL logging
-
# is enabled. We still receive notifications via ActiveSupport.
-
14
if defined?(::ActiveRecord::LogSubscriber)
-
begin
-
::ActiveRecord::LogSubscriber.detach_from(:active_record)
-
rescue => e
-
LogStruct.handle_exception(e, source: LogStruct::Source::Internal)
-
end
-
end
-
-
# Disable verbose query logs ("↳ caller") since LogStruct provides
-
# structured context and these lines are noisy/unstructured.
-
14
if ::ActiveRecord::Base.respond_to?(:verbose_query_logs=)
-
T.unsafe(::ActiveRecord::Base).verbose_query_logs = false
-
end
-
-
14
subscribe_to_sql_notifications
-
14
true
-
end
-
-
1
private_class_method
-
-
# Subscribe to ActiveRecord's sql.active_record notifications
-
2
sig { void }
-
1
def self.subscribe_to_sql_notifications
-
# Avoid duplicate subscriptions; re-subscribe if the notifier was reset
-
14
notifier = ::ActiveSupport::Notifications.notifier
-
14
current_id = notifier&.object_id
-
14
if STATE.subscribed && STATE.notifier_id == current_id
-
return
-
end
-
-
14
::ActiveSupport::Notifications.subscribe("sql.active_record") do |name, start, finish, id, payload|
-
9
handle_sql_event(name, start, finish, id, payload)
-
rescue => error
-
1
LogStruct.handle_exception(error, source: LogStruct::Source::Internal)
-
end
-
14
STATE.subscribed = true
-
14
STATE.notifier_id = current_id
-
end
-
-
# Process SQL notification event and create structured log
-
2
sig { params(name: String, start: T.untyped, finish: T.untyped, id: String, payload: T::Hash[Symbol, T.untyped]).void }
-
1
def self.handle_sql_event(name, start, finish, id, payload)
-
# Skip schema queries and Rails internal queries
-
31
return if skip_query?(payload)
-
-
24
duration_ms = ((finish - start) * 1000.0).round(2)
-
-
# Skip fast queries if threshold is configured
-
24
config = LogStruct.config
-
24
if config.integrations.sql_slow_query_threshold&.positive?
-
2
return if duration_ms < config.integrations.sql_slow_query_threshold
-
end
-
-
23
sql_log = Log::SQL.new(
-
message: format_sql_message(payload),
-
source: Source::App,
-
event: Event::Database,
-
sql: payload[:sql]&.strip || "",
-
name: payload[:name] || "SQL Query",
-
duration_ms: duration_ms,
-
row_count: extract_row_count(payload),
-
adapter: extract_adapter_name(payload),
-
bind_params: extract_and_filter_binds(payload),
-
database_name: extract_database_name(payload),
-
connection_pool_size: extract_pool_size(payload),
-
active_connections: extract_active_connections(payload),
-
operation_type: extract_operation_type(payload),
-
table_names: extract_table_names(payload)
-
)
-
-
22
LogStruct.info(sql_log)
-
end
-
-
# Determine if query should be skipped from logging
-
2
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T::Boolean) }
-
1
def self.skip_query?(payload)
-
31
query_name = payload[:name]
-
31
sql = payload[:sql]
-
-
# Skip Rails schema queries
-
31
return true if query_name&.include?("SCHEMA")
-
30
return true if query_name&.include?("CACHE")
-
-
# Skip common Rails internal queries
-
29
return true if sql&.include?("schema_migrations")
-
28
return true if sql&.include?("ar_internal_metadata")
-
-
# Skip SHOW/DESCRIBE queries
-
27
return true if sql&.match?(/\A\s*(SHOW|DESCRIBE|EXPLAIN)\s/i)
-
-
24
false
-
end
-
-
# Format a readable message for the SQL log
-
2
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(String) }
-
1
def self.format_sql_message(payload)
-
23
operation_name = payload[:name] || "SQL Query"
-
23
"#{operation_name} executed"
-
end
-
-
# Extract row count from payload
-
2
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(Integer)) }
-
1
def self.extract_row_count(payload)
-
23
row_count = payload[:row_count]
-
23
row_count.is_a?(Integer) ? row_count : nil
-
end
-
-
# Extract database adapter name
-
2
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(String)) }
-
1
def self.extract_adapter_name(payload)
-
23
connection = payload[:connection]
-
23
return nil unless connection
-
-
22
adapter_name = connection.class.name
-
22
adapter_name&.split("::")&.last
-
end
-
-
# Extract and filter bind parameters
-
2
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(T::Array[T.untyped])) }
-
1
def self.extract_and_filter_binds(payload)
-
23
return nil unless LogStruct.config.integrations.sql_log_bind_params
-
-
# Prefer type_casted_binds as they're more readable
-
22
binds = payload[:type_casted_binds] || payload[:binds]
-
22
return nil unless binds
-
-
# Filter sensitive data from bind parameters
-
2
binds.map do |bind|
-
4
filter_bind_parameter(bind)
-
end
-
end
-
-
# Extract database name from connection
-
2
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(String)) }
-
1
def self.extract_database_name(payload)
-
23
connection = payload[:connection]
-
23
return nil unless connection
-
-
22
if connection.respond_to?(:current_database)
-
22
connection.current_database
-
elsif connection.respond_to?(:database)
-
connection.database
-
end
-
rescue
-
nil
-
end
-
-
# Extract connection pool size
-
2
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(Integer)) }
-
1
def self.extract_pool_size(payload)
-
23
connection = payload[:connection]
-
23
return nil unless connection
-
-
22
pool = connection.pool if connection.respond_to?(:pool)
-
22
pool&.size
-
rescue
-
nil
-
end
-
-
# Extract active connection count
-
2
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(Integer)) }
-
1
def self.extract_active_connections(payload)
-
23
connection = payload[:connection]
-
23
return nil unless connection
-
-
22
pool = connection.pool if connection.respond_to?(:pool)
-
22
pool&.stat&.[](:busy)
-
rescue
-
nil
-
end
-
-
# Extract SQL operation type (SELECT, INSERT, etc.)
-
2
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(String)) }
-
1
def self.extract_operation_type(payload)
-
23
sql = payload[:sql]
-
23
return nil unless sql
-
-
# Extract first word of SQL query
-
23
match = sql.strip.match(/\A\s*(\w+)/i)
-
23
match&.captures&.first&.upcase
-
end
-
-
# Extract table names from SQL query
-
2
sig { params(payload: T::Hash[Symbol, T.untyped]).returns(T.nilable(T::Array[String])) }
-
1
def self.extract_table_names(payload)
-
23
sql = payload[:sql]
-
23
return nil unless sql
-
-
# Simple regex to extract table names (basic implementation)
-
# This covers most common cases but could be enhanced
-
23
tables = []
-
-
# Match FROM, JOIN, UPDATE, INSERT INTO, DELETE FROM patterns
-
23
sql.scan(/(?:FROM|JOIN|UPDATE|INTO|DELETE\s+FROM)\s+["`]?(\w+)["`]?/i) do |match|
-
23
table_name = match[0]
-
23
tables << table_name unless tables.include?(table_name)
-
end
-
-
23
tables.empty? ? nil : tables
-
end
-
-
# Filter individual bind parameter values to remove sensitive data
-
2
sig { params(value: T.untyped).returns(T.untyped) }
-
1
def self.filter_bind_parameter(value)
-
4
case value
-
when String
-
# Filter strings that look like passwords, tokens, secrets, etc.
-
2
if looks_sensitive?(value)
-
1
"[FILTERED]"
-
else
-
1
value
-
end
-
else
-
2
value
-
end
-
end
-
-
# Check if a string value looks sensitive and should be filtered
-
2
sig { params(value: String).returns(T::Boolean) }
-
1
def self.looks_sensitive?(value)
-
# Filter very long strings that might be tokens
-
2
return true if value.length > 50
-
-
# Filter strings that look like hashed passwords, API keys, tokens
-
2
return true if value.match?(/\A[a-f0-9]{32,}\z/i) # MD5, SHA, etc.
-
2
return true if value.match?(/\A[A-Za-z0-9+\/]{20,}={0,2}\z/) # Base64
-
2
return true if value.match?(/(password|secret|token|key|auth)/i)
-
-
1
false
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require_relative "../enums/source"
-
1
require_relative "../enums/event"
-
1
require_relative "../log/active_storage"
-
-
1
module LogStruct
-
1
module Integrations
-
# Integration for ActiveStorage structured logging
-
1
module ActiveStorage
-
1
extend T::Sig
-
1
extend IntegrationInterface
-
-
# Set up ActiveStorage structured logging
-
2
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def self.setup(config)
-
1
return nil unless defined?(::ActiveStorage)
-
return nil unless config.enabled
-
return nil unless config.integrations.enable_activestorage
-
-
# Subscribe to all ActiveStorage service events
-
::ActiveSupport::Notifications.subscribe(/service_.*\.active_storage/) do |*args|
-
process_active_storage_event(::ActiveSupport::Notifications::Event.new(*args), config)
-
end
-
-
true
-
end
-
-
1
private_class_method
-
-
# Process ActiveStorage events and create structured logs
-
1
sig { params(event: ActiveSupport::Notifications::Event, config: LogStruct::Configuration).void }
-
1
def self.process_active_storage_event(event, config)
-
return unless config.enabled
-
return unless config.integrations.enable_activestorage
-
-
# Extract key information from the event
-
event_name = event.name.sub(/\.active_storage$/, "")
-
service_name = event.payload[:service]
-
duration_ms = event.duration
-
-
# Map service events to log event types
-
event_type = case event_name
-
when "service_upload"
-
Event::Upload
-
when "service_download"
-
Event::Download
-
when "service_delete"
-
Event::Delete
-
when "service_delete_prefixed"
-
Event::Delete
-
when "service_exist"
-
Event::Exist
-
when "service_url"
-
Event::Url
-
when "service_download_chunk"
-
Event::Download
-
when "service_stream"
-
Event::Stream
-
when "service_update_metadata"
-
Event::Metadata
-
else
-
Event::Unknown
-
end
-
-
# Map the event name to an operation
-
event_name.sub(/^service_/, "").to_sym
-
-
# Create structured log event using generated classes
-
log_data = case event_type
-
when Event::Upload
-
Log::ActiveStorage::Upload.new(
-
storage: service_name.to_sym,
-
file_id: event.payload[:key]&.to_s,
-
checksum: event.payload[:checksum]&.to_s,
-
duration_ms: duration_ms,
-
metadata: event.payload[:metadata],
-
filename: event.payload[:filename],
-
mime_type: event.payload[:content_type],
-
size: event.payload[:byte_size]
-
)
-
when Event::Download
-
Log::ActiveStorage::Download.new(
-
storage: service_name.to_sym,
-
file_id: event.payload[:key]&.to_s,
-
filename: event.payload[:filename],
-
range: event.payload[:range],
-
duration_ms: duration_ms
-
)
-
when Event::Delete
-
Log::ActiveStorage::Delete.new(
-
storage: service_name.to_sym,
-
file_id: event.payload[:key]&.to_s
-
)
-
when Event::Metadata
-
Log::ActiveStorage::Metadata.new(
-
storage: service_name.to_sym,
-
file_id: event.payload[:key]&.to_s,
-
metadata: event.payload[:metadata]
-
)
-
when Event::Exist
-
Log::ActiveStorage::Exist.new(
-
storage: service_name.to_sym,
-
file_id: event.payload[:key]&.to_s,
-
exist: event.payload[:exist]
-
)
-
when Event::Stream
-
Log::ActiveStorage::Stream.new(
-
storage: service_name.to_sym,
-
file_id: event.payload[:key]&.to_s,
-
prefix: event.payload[:prefix]
-
)
-
when Event::Url
-
Log::ActiveStorage::Url.new(
-
storage: service_name.to_sym,
-
file_id: event.payload[:key]&.to_s,
-
url: event.payload[:url]
-
)
-
else
-
Log::ActiveStorage::Metadata.new(
-
storage: service_name.to_sym,
-
file_id: event.payload[:key]&.to_s,
-
metadata: event.payload[:metadata]
-
)
-
end
-
-
# Log structured data
-
LogStruct.info(log_data)
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
1
module Integrations
-
# Ahoy analytics integration. If Ahoy is present, prepend a small hook to
-
# Ahoy::Tracker#track to emit a structured log for analytics events.
-
1
module Ahoy
-
1
extend T::Sig
-
-
2
sig { params(config: LogStruct::Configuration).returns(T.nilable(TrueClass)) }
-
1
def self.setup(config)
-
2
return nil unless defined?(::Ahoy)
-
-
1
if defined?(::Ahoy::Tracker)
-
1
mod = Module.new do
-
1
extend T::Sig
-
-
2
sig { params(name: T.untyped, properties: T.nilable(T::Hash[T.untyped, T.untyped]), options: T.untyped).returns(T.untyped) }
-
1
def track(name, properties = nil, options = {})
-
1
result = super
-
begin
-
# Emit a lightweight structured log about the analytics event
-
data = {
-
1
ahoy_event: T.let(name, T.untyped)
-
}
-
1
data[:properties] = properties if properties
-
1
LogStruct.info(
-
LogStruct::Log::Ahoy.new(
-
message: "ahoy.track",
-
ahoy_event: T.must(T.let(name, T.nilable(String))),
-
properties: T.let(
-
2
properties && properties.transform_keys { |k| k.to_sym },
-
T.nilable(T::Hash[Symbol, T.untyped])
-
)
-
)
-
)
-
rescue => e
-
# Never raise from logging; rely on global error handling policies
-
LogStruct.handle_exception(e, source: LogStruct::Source::App, context: {integration: :ahoy})
-
end
-
1
result
-
end
-
end
-
-
1
T.unsafe(::Ahoy::Tracker).prepend(mod)
-
end
-
-
1
true
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
begin
-
1
require "carrierwave"
-
rescue LoadError
-
# CarrierWave gem is not available, integration will be skipped
-
end
-
-
1
module LogStruct
-
1
module Integrations
-
# CarrierWave integration for structured logging
-
1
module CarrierWave
-
1
extend T::Sig
-
1
extend IntegrationInterface
-
-
# Set up CarrierWave structured logging
-
2
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def self.setup(config)
-
1
return nil unless defined?(::CarrierWave)
-
return nil unless config.enabled
-
return nil unless config.integrations.enable_carrierwave
-
-
# Patch CarrierWave to add logging
-
::CarrierWave::Uploader::Base.prepend(LoggingMethods)
-
-
true
-
end
-
-
# Methods to add logging to CarrierWave operations
-
1
module LoggingMethods
-
1
extend T::Sig
-
1
extend T::Helpers
-
1
requires_ancestor { ::CarrierWave::Uploader::Base }
-
-
# Log file storage operations
-
1
sig { params(args: T.untyped).returns(T.untyped) }
-
1
def store!(*args)
-
start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
-
result = super
-
duration = Process.clock_gettime(Process::CLOCK_MONOTONIC) - start_time
-
-
# Extract file information
-
file_size = file.size if file.respond_to?(:size)
-
{
-
identifier: identifier,
-
filename: file.filename,
-
content_type: file.content_type,
-
size: file_size,
-
store_path: store_path,
-
extension: file.extension
-
}
-
-
# Log the store operation with structured data
-
log_data = Log::CarrierWave::Upload.new(
-
storage: storage.class.name.split("::").last.downcase.to_sym,
-
file_id: identifier,
-
filename: file.filename,
-
mime_type: file.content_type,
-
size: file_size,
-
duration_ms: (duration * 1000.0).to_f,
-
uploader: self.class.name,
-
model: model.class.name,
-
mount_point: mounted_as.to_s,
-
version: version_name.to_s,
-
store_path: store_path,
-
extension: file.extension
-
)
-
-
::Rails.logger.info(log_data)
-
result
-
end
-
-
# Log file retrieve operations
-
1
sig { params(identifier: T.untyped, args: T.untyped).returns(T.untyped) }
-
1
def retrieve_from_store!(identifier, *args)
-
Process.clock_gettime(Process::CLOCK_MONOTONIC)
-
result = super
-
Process.clock_gettime(Process::CLOCK_MONOTONIC)
-
-
# Extract file information if available
-
file_size = file.size if file&.respond_to?(:size)
-
-
# Log the retrieve operation with structured data
-
log_data = Log::CarrierWave::Download.new(
-
storage: storage.class.name.split("::").last.downcase.to_sym,
-
file_id: identifier,
-
filename: file&.filename,
-
mime_type: file&.content_type,
-
size: file_size,
-
# No duration field on Download event schema
-
uploader: self.class.name,
-
model: model.class.name,
-
mount_point: mounted_as.to_s,
-
version: version_name.to_s,
-
store_path: store_path,
-
extension: file&.extension
-
)
-
-
::Rails.logger.info(log_data)
-
result
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# rubocop:disable Sorbet/ConstantsFromStrings
-
1
require_relative "../boot_buffer"
-
1
require "pathname"
-
-
begin
-
1
require "dotenv-rails"
-
rescue LoadError
-
# Dotenv-rails gem is not available, integration will be skipped
-
end
-
-
1
module LogStruct
-
1
module Integrations
-
# Dotenv integration: emits structured logs for load/update/save/restore events
-
1
module Dotenv
-
1
extend T::Sig
-
1
extend IntegrationInterface
-
1
@original_logger_setter = T.let(nil, T.nilable(UnboundMethod))
-
-
# Internal state holder to avoid duplicate subscriptions in a Sorbet-friendly way
-
1
State = ::Struct.new(:subscribed)
-
1
STATE = T.let(State.new(false), State)
-
-
2
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def self.setup(config)
-
# Subscribe regardless of dotenv gem presence so instrumentation via
-
# ActiveSupport::Notifications can be captured during tests and runtime.
-
1
subscribe!
-
1
true
-
end
-
-
1
class << self
-
1
extend T::Sig
-
-
2
sig { void }
-
1
def subscribe!
-
# Guard against double subscription
-
2
return if STATE.subscribed
-
-
1
instrumenter = defined?(::ActiveSupport::Notifications) ? ::ActiveSupport::Notifications : nil
-
1
return unless instrumenter
-
-
1
instrumenter.subscribe("load.dotenv") do |*args|
-
# Allow tests to stub Log::Dotenv.new to force an error path
-
LogStruct::Log::Dotenv.new
-
event = ::ActiveSupport::Notifications::Event.new(*args)
-
env = event.payload[:env]
-
abs = env.filename
-
file = begin
-
if defined?(::Rails) && ::Rails.respond_to?(:root) && ::Rails.root
-
Pathname.new(abs).relative_path_from(Pathname.new(::Rails.root.to_s)).to_s
-
else
-
abs
-
end
-
rescue
-
abs
-
end
-
-
ts = event.time ? Time.at(event.time) : Time.now
-
LogStruct.info(Log::Dotenv::Load.new(file: file, timestamp: ts))
-
rescue => e
-
if defined?(::Rails) && ::Rails.respond_to?(:env) && ::Rails.env == "test"
-
raise
-
else
-
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
-
end
-
end
-
-
1
instrumenter.subscribe("update.dotenv") do |*args|
-
LogStruct::Log::Dotenv.new
-
event = ::ActiveSupport::Notifications::Event.new(*args)
-
diff = event.payload[:diff]
-
vars = diff.env.keys.map(&:to_s)
-
-
ts = event.time ? Time.at(event.time) : Time.now
-
LogStruct.debug(Log::Dotenv::Update.new(vars: vars, timestamp: ts))
-
rescue => e
-
if defined?(::Rails) && ::Rails.respond_to?(:env) && ::Rails.env == "test"
-
raise
-
else
-
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
-
end
-
end
-
-
1
instrumenter.subscribe("save.dotenv") do |*args|
-
LogStruct::Log::Dotenv.new
-
event = ::ActiveSupport::Notifications::Event.new(*args)
-
ts = event.time ? Time.at(event.time) : Time.now
-
LogStruct.info(Log::Dotenv::Save.new(snapshot: true, timestamp: ts))
-
rescue => e
-
if defined?(::Rails) && ::Rails.respond_to?(:env) && ::Rails.env == "test"
-
raise
-
else
-
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
-
end
-
end
-
-
1
instrumenter.subscribe("restore.dotenv") do |*args|
-
LogStruct::Log::Dotenv.new
-
event = ::ActiveSupport::Notifications::Event.new(*args)
-
diff = event.payload[:diff]
-
vars = diff.env.keys.map(&:to_s)
-
-
ts = event.time ? Time.at(event.time) : Time.now
-
LogStruct.info(Log::Dotenv::Restore.new(vars: vars, timestamp: ts))
-
rescue => e
-
if defined?(::Rails) && ::Rails.respond_to?(:env) && ::Rails.env == "test"
-
raise
-
else
-
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
-
end
-
end
-
-
1
STATE.subscribed = true
-
end
-
end
-
-
# Early boot subscription to buffer structured logs until logger is ready
-
1
@@boot_subscribed = T.let(false, T::Boolean)
-
2
sig { void }
-
1
def self.setup_boot
-
1
return if @@boot_subscribed
-
1
return unless defined?(::ActiveSupport::Notifications)
-
-
1
instrumenter = if Object.const_defined?(:Dotenv)
-
dm = T.unsafe(Object.const_get(:Dotenv))
-
dm.respond_to?(:instrumenter) ? T.unsafe(dm).instrumenter : ::ActiveSupport::Notifications
-
else
-
1
::ActiveSupport::Notifications
-
end
-
-
1
instrumenter.subscribe("load.dotenv") do |*args|
-
event = ::ActiveSupport::Notifications::Event.new(*args)
-
env = event.payload[:env]
-
abs = env.filename
-
file = begin
-
if defined?(::Rails) && ::Rails.respond_to?(:root) && ::Rails.root
-
Pathname.new(abs).relative_path_from(Pathname.new(::Rails.root.to_s)).to_s
-
else
-
abs
-
end
-
rescue
-
abs
-
end
-
ts = event.time ? Time.at(event.time) : Time.now
-
LogStruct::BootBuffer.add(Log::Dotenv::Load.new(file: file, timestamp: ts))
-
rescue => e
-
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
-
end
-
-
1
instrumenter.subscribe("update.dotenv") do |*args|
-
event = ::ActiveSupport::Notifications::Event.new(*args)
-
diff = event.payload[:diff]
-
vars = diff.env.keys.map(&:to_s)
-
ts = event.time ? Time.at(event.time) : Time.now
-
LogStruct::BootBuffer.add(Log::Dotenv::Update.new(vars: vars, timestamp: ts))
-
rescue => e
-
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
-
end
-
-
1
instrumenter.subscribe("save.dotenv") do |*args|
-
event = ::ActiveSupport::Notifications::Event.new(*args)
-
ts = event.time ? Time.at(event.time) : Time.now
-
LogStruct::BootBuffer.add(Log::Dotenv::Save.new(snapshot: true, timestamp: ts))
-
rescue => e
-
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
-
end
-
-
1
instrumenter.subscribe("restore.dotenv") do |*args|
-
event = ::ActiveSupport::Notifications::Event.new(*args)
-
diff = event.payload[:diff]
-
vars = diff.env.keys.map(&:to_s)
-
ts = event.time ? Time.at(event.time) : Time.now
-
LogStruct::BootBuffer.add(Log::Dotenv::Restore.new(vars: vars, timestamp: ts))
-
rescue => e
-
LogStruct.handle_exception(e, source: LogStruct::Source::Dotenv)
-
end
-
-
1
@@boot_subscribed = true
-
end
-
-
# Intercept Dotenv::Rails#logger= to defer replay until we resolve policy
-
2
sig { void }
-
1
def self.intercept_logger_setter!
-
1
return unless Object.const_defined?(:Dotenv)
-
# Do not intercept when LogStruct is disabled; allow original dotenv replay
-
return unless LogStruct.enabled?
-
dotenv_mod = T.unsafe(Object.const_get(:Dotenv))
-
return unless dotenv_mod.const_defined?(:Rails)
-
klass = T.unsafe(dotenv_mod.const_get(:Rails))
-
return if klass.instance_variable_defined?(:@_logstruct_replay_patched)
-
-
original = klass.instance_method(:logger=)
-
@original_logger_setter = original
-
-
mod = Module.new do
-
define_method :logger= do |new_logger|
-
# Defer replay: store desired logger, keep ReplayLogger as current
-
instance_variable_set(:@logstruct_pending_dotenv_logger, new_logger)
-
new_logger
-
end
-
-
define_method :logstruct_pending_dotenv_logger do
-
instance_variable_get(:@logstruct_pending_dotenv_logger)
-
end
-
end
-
-
klass.prepend(mod)
-
klass.instance_variable_set(:@_logstruct_replay_patched, true)
-
end
-
-
# Decide which boot logs to emit after user initializers
-
2
sig { void }
-
1
def self.resolve_boot_logs!
-
# If LogStruct is disabled, do not alter dotenv behavior at all
-
1
return unless LogStruct.enabled?
-
1
dotenv_mod = Object.const_defined?(:Dotenv) ? T.unsafe(Object.const_get(:Dotenv)) : nil
-
1
klass = dotenv_mod&.const_defined?(:Rails) ? T.unsafe(dotenv_mod.const_get(:Rails)) : nil
-
-
1
pending_logger = nil
-
1
railtie_instance = nil
-
1
if klass&.respond_to?(:instance)
-
railtie_instance = klass.instance
-
if railtie_instance.respond_to?(:logstruct_pending_dotenv_logger)
-
pending_logger = T.unsafe(railtie_instance).logstruct_pending_dotenv_logger
-
end
-
end
-
-
1
if LogStruct.enabled? && LogStruct.config.integrations.enable_dotenv
-
# Structured path
-
1
if pending_logger && railtie_instance
-
# Clear any buffered original logs
-
current_logger = railtie_instance.logger if railtie_instance.respond_to?(:logger)
-
if current_logger && current_logger.class.name.end_with?("ReplayLogger")
-
begin
-
logs = current_logger.instance_variable_get(:@logs)
-
logs.clear if logs.respond_to?(:clear)
-
rescue
-
# best effort
-
end
-
end
-
railtie_instance.config.dotenv.logger = pending_logger
-
end
-
-
# Detach original subscriber and subscribe runtime structured
-
1
if dotenv_mod&.const_defined?(:LogSubscriber)
-
T.unsafe(dotenv_mod.const_get(:LogSubscriber)).detach_from(:dotenv)
-
end
-
1
LogStruct::Integrations::Dotenv.subscribe!
-
-
1
require_relative "../boot_buffer"
-
1
LogStruct::BootBuffer.flush
-
else
-
# Original path: replay dotenv lines, drop structured buffer
-
if railtie_instance && @original_logger_setter
-
setter = @original_logger_setter
-
new_logger = pending_logger
-
if new_logger.nil? && ENV["RAILS_LOG_TO_STDOUT"].to_s.strip != ""
-
require "logger"
-
require "active_support/tagged_logging"
-
new_logger = ActiveSupport::TaggedLogging.new(::Logger.new($stdout)).tagged("dotenv")
-
end
-
setter.bind_call(railtie_instance, new_logger) if new_logger
-
end
-
require_relative "../boot_buffer"
-
LogStruct::BootBuffer.clear
-
end
-
end
-
end
-
end
-
end
-
-
# Subscribe immediately to capture earliest dotenv events into BootBuffer
-
1
LogStruct::Integrations::Dotenv.setup_boot
-
-
# rubocop:enable Sorbet/ConstantsFromStrings
-
# typed: strict
-
# frozen_string_literal: true
-
-
begin
-
1
require "good_job"
-
rescue LoadError
-
# GoodJob gem is not available, integration will be skipped
-
end
-
-
1
require_relative "good_job/logger" if defined?(::GoodJob)
-
1
require_relative "good_job/log_subscriber" if defined?(::GoodJob)
-
-
1
module LogStruct
-
1
module Integrations
-
# GoodJob integration for structured logging
-
#
-
# GoodJob is a PostgreSQL-based ActiveJob backend that provides reliable,
-
# scalable job processing for Rails applications. This integration provides
-
# structured logging for all GoodJob operations.
-
#
-
# ## Features:
-
# - Structured logging for job execution lifecycle
-
# - Error tracking and retry logging
-
# - Performance metrics and timing data
-
# - Database operation logging
-
# - Thread and process tracking
-
# - Custom GoodJob logger with LogStruct formatting
-
#
-
# ## Integration Points:
-
# - Replaces GoodJob.logger with LogStruct-compatible logger
-
# - Subscribes to GoodJob's ActiveSupport notifications
-
# - Captures job execution events, errors, and performance metrics
-
# - Logs database operations and connection information
-
#
-
# ## Configuration:
-
# The integration is automatically enabled when GoodJob is detected and
-
# LogStruct configuration allows it. It can be disabled by setting:
-
#
-
# ```ruby
-
# config.integrations.enable_goodjob = false
-
# ```
-
1
module GoodJob
-
1
extend T::Sig
-
1
extend IntegrationInterface
-
-
# Set up GoodJob structured logging
-
#
-
# This method configures GoodJob to use LogStruct's structured logging
-
# by replacing the default logger and subscribing to job events.
-
#
-
# @param config [LogStruct::Configuration] The LogStruct configuration
-
# @return [Boolean, nil] Returns true if setup was successful, nil if skipped
-
2
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def self.setup(config)
-
4
return nil unless defined?(::GoodJob)
-
return nil unless config.enabled
-
return nil unless config.integrations.enable_goodjob
-
-
# Replace GoodJob's logger with our structured logger
-
configure_logger
-
-
# Subscribe to GoodJob's ActiveSupport notifications
-
subscribe_to_notifications
-
-
true
-
end
-
-
# Configure GoodJob to use LogStruct's structured logger
-
1
sig { void }
-
1
def self.configure_logger
-
return unless defined?(::GoodJob)
-
-
# Use direct reference to avoid const_get - GoodJob is guaranteed to be defined here
-
goodjob_module = T.unsafe(GoodJob)
-
-
# Replace GoodJob.logger with our structured logger if GoodJob is available
-
if goodjob_module.respond_to?(:logger=)
-
goodjob_module.logger = LogStruct::Integrations::GoodJob::Logger.new("GoodJob")
-
end
-
-
# Configure error handling for thread errors if GoodJob supports it
-
if goodjob_module.respond_to?(:on_thread_error=)
-
goodjob_module.on_thread_error = ->(exception) do
-
log_entry = LogStruct::Log::GoodJob::Error.new(
-
error_class: exception.class.name,
-
error_message: exception.message,
-
backtrace: exception.backtrace,
-
process_id: ::Process.pid,
-
thread_id: Thread.current.object_id.to_s(36)
-
)
-
goodjob_module.logger.error(log_entry)
-
end
-
end
-
end
-
-
# Subscribe to GoodJob's ActiveSupport notifications
-
1
sig { void }
-
1
def self.subscribe_to_notifications
-
return unless defined?(::GoodJob)
-
-
# Subscribe to our custom log subscriber for GoodJob events
-
LogStruct::Integrations::GoodJob::LogSubscriber.attach_to :good_job
-
end
-
-
1
private_class_method :configure_logger
-
1
private_class_method :subscribe_to_notifications
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
begin
-
1
require "active_support/log_subscriber"
-
rescue LoadError
-
# ActiveSupport is not available, log subscriber will be skipped
-
end
-
-
1
require_relative "../../log/good_job"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
-
1
module LogStruct
-
1
module Integrations
-
1
module GoodJob
-
# LogSubscriber for GoodJob ActiveSupport notifications
-
#
-
# This subscriber captures GoodJob's ActiveSupport notifications and converts
-
# them into structured LogStruct::Log::GoodJob entries. It provides detailed
-
# logging for job lifecycle events, performance metrics, and error tracking.
-
#
-
# ## Supported Events:
-
# - job.enqueue - Job queued for execution
-
# - job.start - Job execution started
-
# - job.finish - Job completed successfully
-
# - job.error - Job failed with error
-
# - job.retry - Job retry initiated
-
# - job.schedule - Job scheduled for future execution
-
#
-
# ## Event Data Captured:
-
# - Job identification (ID, class, queue)
-
# - Execution context (arguments, priority, scheduled time)
-
# - Performance metrics (execution time, wait time)
-
# - Error information (class, message, backtrace)
-
# - Process and thread information
-
1
class LogSubscriber < ::ActiveSupport::LogSubscriber
-
1
extend T::Sig
-
-
# Job enqueued event
-
2
sig { params(event: ::ActiveSupport::Notifications::Event).void }
-
1
def enqueue(event)
-
2
payload = T.let(event.payload, T::Hash[Symbol, T.untyped])
-
2
job = payload[:job]
-
2
base_fields = build_base_fields(job, payload)
-
2
ts = event.time ? Time.at(event.time) : Time.now
-
-
2
logger.info(Log::GoodJob::Enqueue.new(
-
**base_fields.to_kwargs,
-
2
scheduled_at: (job&.scheduled_at ? Time.at(job.scheduled_at.to_i) : nil),
-
duration_ms: event.duration.to_f,
-
enqueue_caller: job&.enqueue_caller_location,
-
timestamp: ts
-
))
-
end
-
-
# Job execution started event
-
2
sig { params(event: ::ActiveSupport::Notifications::Event).void }
-
1
def start(event)
-
1
payload = T.let(event.payload, T::Hash[Symbol, T.untyped])
-
1
job = payload[:job]
-
1
execution = payload[:execution] || payload[:good_job_execution]
-
1
base_fields = build_base_fields(job, payload)
-
1
ts = event.time ? Time.at(event.time) : Time.now
-
-
1
logger.info(Log::GoodJob::Start.new(
-
**base_fields.to_kwargs,
-
wait_ms: begin
-
1
wt = execution&.wait_time || calculate_wait_time(execution)
-
1
wt ? (wt.to_f * 1000.0) : nil
-
end,
-
1
scheduled_at: (job&.scheduled_at ? Time.at(job.scheduled_at.to_i) : nil),
-
process_id: ::Process.pid,
-
thread_id: Thread.current.object_id.to_s(36),
-
timestamp: ts
-
))
-
end
-
-
# Job completed successfully event
-
2
sig { params(event: ::ActiveSupport::Notifications::Event).void }
-
1
def finish(event)
-
1
payload = T.let(event.payload, T::Hash[Symbol, T.untyped])
-
1
job = payload[:job]
-
1
base_fields = build_base_fields(job, payload)
-
1
start_ts = event.time ? Time.at(event.time) : Time.now
-
1
end_ts = event.end ? Time.at(event.end) : Time.now
-
-
1
logger.info(Log::GoodJob::Finish.new(
-
**base_fields.to_kwargs,
-
duration_ms: event.duration.to_f,
-
finished_at: end_ts,
-
process_id: ::Process.pid,
-
thread_id: Thread.current.object_id.to_s(36),
-
result: payload[:result]&.to_s,
-
timestamp: start_ts
-
))
-
end
-
-
# Job failed with error event
-
2
sig { params(event: ::ActiveSupport::Notifications::Event).void }
-
1
def error(event)
-
2
payload = T.let(event.payload, T::Hash[Symbol, T.untyped])
-
2
job = payload[:job]
-
2
execution = payload[:execution] || payload[:good_job_execution]
-
2
exception = payload[:exception] || payload[:error]
-
2
ts = event.time ? Time.at(event.time) : Time.now
-
2
base_fields = build_base_fields(job, payload)
-
-
2
logger.error(Log::GoodJob::Error.new(
-
**base_fields.to_kwargs,
-
exception_executions: execution&.exception_executions,
-
error_class: exception&.class&.name,
-
error_message: exception&.message,
-
backtrace: exception&.backtrace,
-
duration_ms: event.duration.to_f,
-
process_id: ::Process.pid,
-
thread_id: Thread.current.object_id.to_s(36),
-
timestamp: ts
-
))
-
end
-
-
# Job scheduled for future execution event
-
2
sig { params(event: ::ActiveSupport::Notifications::Event).void }
-
1
def schedule(event)
-
1
payload = T.let(event.payload, T::Hash[Symbol, T.untyped])
-
1
job = payload[:job]
-
1
base_fields = build_base_fields(job, payload)
-
1
ts = event.time ? Time.at(event.time) : Time.now
-
-
1
logger.info(Log::GoodJob::Schedule.new(
-
**base_fields.to_kwargs,
-
1
scheduled_at: (job&.scheduled_at ? Time.at(job.scheduled_at.to_i) : nil),
-
priority: job&.priority,
-
cron_key: job&.cron_key,
-
duration_ms: event.duration.to_f,
-
timestamp: ts
-
))
-
end
-
-
1
private
-
-
# Build BaseFields from job + payload (execution)
-
2
sig { params(job: T.untyped, payload: T::Hash[Symbol, T.untyped]).returns(Log::GoodJob::BaseFields) }
-
1
def build_base_fields(job, payload)
-
7
execution = payload[:execution] || payload[:good_job_execution]
-
7
Log::GoodJob::BaseFields.new(
-
job_id: job&.job_id,
-
job_class: job&.job_class,
-
queue_name: job&.queue_name&.to_sym,
-
arguments: job&.arguments,
-
executions: execution&.executions
-
)
-
end
-
-
# Calculate wait time from job creation to execution start
-
2
sig { params(execution: T.untyped).returns(T.nilable(Float)) }
-
1
def calculate_wait_time(execution)
-
2
return nil unless execution.respond_to?(:created_at)
-
2
return nil unless execution.respond_to?(:performed_at)
-
2
return nil unless execution.created_at && execution.performed_at
-
-
1
(execution.performed_at - execution.created_at).to_f
-
rescue
-
# Return nil if calculation fails
-
nil
-
end
-
-
# Get the appropriate logger for GoodJob events
-
2
sig { returns(T.untyped) }
-
1
def logger
-
# Always use Rails.logger - in production it will be configured by the integration setup,
-
# in tests it will be set up by the test harness
-
7
Rails.logger
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require_relative "../../semantic_logger/logger"
-
1
require_relative "../../log/good_job"
-
1
require_relative "../../enums/source"
-
-
1
module LogStruct
-
1
module Integrations
-
1
module GoodJob
-
# Custom Logger for GoodJob that creates LogStruct::Log::GoodJob entries
-
#
-
# This logger extends LogStruct's SemanticLogger to provide optimal logging
-
# performance while creating structured log entries specifically for GoodJob
-
# operations and events.
-
#
-
# ## Benefits:
-
# - High-performance logging with SemanticLogger backend
-
# - Structured GoodJob-specific log entries
-
# - Automatic job context capture
-
# - Thread and process information
-
# - Performance metrics and timing data
-
#
-
# ## Usage:
-
# This logger is automatically configured when the GoodJob integration
-
# is enabled. It replaces GoodJob.logger to provide structured logging
-
# for all GoodJob operations.
-
1
class Logger < LogStruct::SemanticLogger::Logger
-
1
extend T::Sig
-
-
# Override log methods to create GoodJob-specific log structs
-
1
%i[debug info warn error fatal].each do |level|
-
5
define_method(level) do |message = nil, payload = nil, &block|
-
# Extract basic job context from thread-local variables
-
12
job_context = {}
-
-
12
if Thread.current[:good_job_execution]
-
2
execution = Thread.current[:good_job_execution]
-
2
if execution.respond_to?(:job_id)
-
2
job_context[:job_id] = execution.job_id
-
2
job_context[:job_class] = execution.job_class if execution.respond_to?(:job_class)
-
2
job_context[:queue_name] = execution.queue_name if execution.respond_to?(:queue_name)
-
2
job_context[:executions] = execution.executions if execution.respond_to?(:executions)
-
2
job_context[:scheduled_at] = execution.scheduled_at if execution.respond_to?(:scheduled_at)
-
2
job_context[:priority] = execution.priority if execution.respond_to?(:priority)
-
end
-
end
-
-
12
log_struct = Log::GoodJob::Log.new(
-
1
message: message || (block ? block.call : ""),
-
process_id: ::Process.pid,
-
thread_id: Thread.current.object_id.to_s(36),
-
job_id: job_context[:job_id],
-
job_class: job_context[:job_class],
-
queue_name: job_context[:queue_name],
-
executions: job_context[:executions],
-
scheduled_at: job_context[:scheduled_at],
-
priority: job_context[:priority]
-
)
-
-
12
super(log_struct, payload, &nil)
-
end
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "action_dispatch/middleware/host_authorization"
-
1
require_relative "../enums/event"
-
1
require_relative "../log/security/blocked_host"
-
-
1
module LogStruct
-
1
module Integrations
-
# Host Authorization integration for structured logging of blocked hosts
-
1
module HostAuthorization
-
1
extend T::Sig
-
1
extend IntegrationInterface
-
-
1
RESPONSE_HTML = T.let(
-
"<html><head><title>Blocked Host</title></head><body>" \
-
"<h1>Blocked Host</h1>" \
-
"<p>This host is not permitted to access this application.</p>" \
-
"<p>If you are the administrator, check your configuration.</p>" \
-
"</body></html>",
-
String
-
)
-
1
RESPONSE_HEADERS = T.let(
-
{
-
"Content-Type" => "text/html",
-
"Content-Length" => RESPONSE_HTML.bytesize.to_s
-
}.freeze,
-
T::Hash[String, String]
-
)
-
1
FORBIDDEN_STATUS = T.let(403, Integer)
-
-
# Set up host authorization logging
-
2
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def self.setup(config)
-
1
return nil unless config.enabled
-
1
return nil unless config.integrations.enable_host_authorization
-
-
# Define the response app as a separate variable to fix block alignment
-
1
response_app = lambda do |env|
-
request = ::ActionDispatch::Request.new(env)
-
# Include the blocked hosts app configuration in the log entry
-
# This can be helpful later when reviewing logs.
-
blocked_hosts = env["action_dispatch.blocked_hosts"]
-
-
# Build allowed_hosts array
-
allowed_hosts_array = T.let(nil, T.nilable(T::Array[String]))
-
if blocked_hosts.respond_to?(:allowed_hosts)
-
allowed_hosts_array = blocked_hosts.allowed_hosts
-
end
-
-
# Get allow_ip_hosts value
-
allow_ip_hosts_value = T.let(nil, T.nilable(T::Boolean))
-
if blocked_hosts.respond_to?(:allow_ip_hosts)
-
allow_ip_hosts_value = blocked_hosts.allow_ip_hosts
-
end
-
-
# Create structured log entry for blocked host
-
log_entry = LogStruct::Log::Security::BlockedHost.new(
-
message: "Blocked host detected: #{request.host}",
-
blocked_host: request.host,
-
path: request.path,
-
http_method: request.method,
-
source_ip: request.ip,
-
user_agent: request.user_agent,
-
referer: request.referer,
-
request_id: request.request_id,
-
x_forwarded_for: request.x_forwarded_for,
-
allowed_hosts: allowed_hosts_array&.empty? ? nil : allowed_hosts_array,
-
allow_ip_hosts: allow_ip_hosts_value
-
)
-
-
# Log the blocked host
-
LogStruct.warn(log_entry)
-
-
# Use pre-defined headers and response if we are only logging or reporting
-
# Dup the headers so they can be modified by downstream middleware
-
[FORBIDDEN_STATUS, RESPONSE_HEADERS.dup, [RESPONSE_HTML]]
-
end
-
-
# Merge our response_app into existing host_authorization config to preserve excludes
-
1
existing = Rails.application.config.host_authorization
-
1
unless existing.is_a?(Hash)
-
existing = {}
-
end
-
1
existing = existing.dup
-
1
existing[:response_app] = response_app
-
1
Rails.application.config.host_authorization = existing
-
-
1
true
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
1
module Integrations
-
# Interface that all integrations must implement
-
# This ensures consistent behavior across all integration modules
-
1
module IntegrationInterface
-
1
extend T::Sig
-
1
extend T::Helpers
-
-
# This is an interface that should be implemented by all integration modules
-
1
interface!
-
-
# All integrations must implement this method to set up their functionality
-
# @return [Boolean, nil] Returns true if setup was successful, nil if skipped
-
2
sig { abstract.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def setup(config); end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
begin
-
1
require "lograge"
-
rescue LoadError
-
# Lograge gem is not available, integration will be skipped
-
end
-
-
1
module LogStruct
-
1
module Integrations
-
# Lograge integration for structured request logging
-
1
module Lograge
-
1
extend IntegrationInterface
-
-
1
class << self
-
1
extend T::Sig
-
-
# Set up lograge for structured request logging
-
2
sig { override.params(logstruct_config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def setup(logstruct_config)
-
2
return nil unless defined?(::Lograge)
-
2
return nil unless logstruct_config.enabled
-
2
return nil unless logstruct_config.integrations.enable_lograge
-
-
2
configure_lograge(logstruct_config)
-
-
2
true
-
end
-
-
1
private_class_method
-
-
2
sig { params(logstruct_config: LogStruct::Configuration).void }
-
1
def configure_lograge(logstruct_config)
-
2
::Rails.application.configure do
-
2
config.lograge.enabled = true
-
# Use a raw formatter that just returns the log struct.
-
# The struct is converted to JSON by our Formatter (after filtering, etc.)
-
2
config.lograge.formatter = T.let(
-
lambda do |data|
-
# Coerce common fields to expected types
-
2
status = ((s = data[:status]) && s.respond_to?(:to_i)) ? s.to_i : s
-
2
duration_ms = ((d = data[:duration]) && d.respond_to?(:to_f)) ? d.to_f : d
-
2
view = ((v = data[:view]) && v.respond_to?(:to_f)) ? v.to_f : v
-
2
db = ((b = data[:db]) && b.respond_to?(:to_f)) ? b.to_f : b
-
-
2
params = data[:params]
-
2
params = params.deep_symbolize_keys if params&.respond_to?(:deep_symbolize_keys)
-
-
2
Log::Request.new(
-
http_method: data[:method]&.to_s,
-
path: data[:path]&.to_s,
-
format: data[:format]&.to_sym,
-
controller: data[:controller]&.to_s,
-
action: data[:action]&.to_s,
-
status: status,
-
duration_ms: duration_ms,
-
view: view,
-
database: db,
-
params: params,
-
timestamp: Time.now
-
)
-
end,
-
T.proc.params(hash: T::Hash[Symbol, T.untyped]).returns(Log::Request)
-
)
-
-
# Add custom options to lograge
-
2
config.lograge.custom_options = lambda do |event|
-
Integrations::Lograge.lograge_default_options(event)
-
end
-
end
-
end
-
-
1
sig { params(event: ActiveSupport::Notifications::Event).returns(T::Hash[Symbol, T.untyped]) }
-
1
def lograge_default_options(event)
-
# Extract essential fields from the payload
-
options = event.payload.slice(
-
:request_id,
-
:host,
-
:source_ip
-
).compact
-
-
if event.payload[:params].present?
-
options[:params] = event.payload[:params].except("controller", "action")
-
end
-
-
# Process headers if available
-
process_headers(event, options)
-
-
# Apply custom options from application if provided
-
apply_custom_options(event, options)
-
-
options
-
end
-
-
# Process headers from the event payload
-
1
sig { params(event: ActiveSupport::Notifications::Event, options: T::Hash[Symbol, T.untyped]).void }
-
1
def process_headers(event, options)
-
headers = event.payload[:headers]
-
return if headers.blank?
-
-
options[:user_agent] = headers["HTTP_USER_AGENT"]
-
options[:content_type] = headers["CONTENT_TYPE"]
-
options[:accept] = headers["HTTP_ACCEPT"]
-
end
-
-
# Apply custom options from the application's configuration
-
1
sig { params(event: ActiveSupport::Notifications::Event, options: T::Hash[Symbol, T.untyped]).void }
-
1
def apply_custom_options(event, options)
-
custom_options_proc = LogStruct.config.integrations.lograge_custom_options
-
return unless custom_options_proc&.respond_to?(:call)
-
-
# Call the proc with the event and options
-
# The proc can modify the options hash directly
-
custom_options_proc.call(event, options)
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
1
module Integrations
-
1
module Puma
-
1
extend T::Sig
-
1
extend T::Helpers
-
-
1
STATE = T.let(
-
{
-
installed: false,
-
boot_emitted: false,
-
shutdown_emitted: false,
-
handler_pending_started: false,
-
start_info: {
-
mode: nil,
-
puma_version: nil,
-
puma_codename: nil,
-
ruby_version: nil,
-
min_threads: nil,
-
max_threads: nil,
-
environment: nil,
-
pid: nil,
-
listening: []
-
}
-
},
-
T::Hash[Symbol, T.untyped]
-
)
-
-
1
class << self
-
1
extend T::Sig
-
-
2
sig { params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def setup(config)
-
1
return nil unless config.integrations.enable_puma
-
-
# No stdout wrapping here.
-
-
# Ensure Puma is loaded so we can patch its classes
-
begin
-
1
require "puma"
-
rescue LoadError
-
# If Puma isn't available, skip setup
-
1
return nil
-
end
-
-
install_patches!
-
-
if ARGV.include?("server")
-
# Emit deterministic boot/started events based on CLI args
-
begin
-
port = T.let(nil, T.nilable(String))
-
ARGV.each_with_index do |arg, idx|
-
if arg == "-p" || arg == "--port"
-
port = ARGV[idx + 1]
-
break
-
elsif arg.start_with?("--port=")
-
port = arg.split("=", 2)[1]
-
break
-
end
-
end
-
si = T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])
-
si[:pid] ||= Process.pid
-
si[:environment] ||= ((defined?(::Rails) && ::Rails.respond_to?(:env)) ? ::Rails.env : nil)
-
si[:mode] ||= "single"
-
if port && !T.cast(si[:listening], T::Array[T.untyped]).any? { |a| a.to_s.include?(":" + port.to_s) }
-
si[:listening] = ["tcp://127.0.0.1:#{port}"]
-
end
-
emit_boot_if_needed!
-
unless STATE[:started_emitted]
-
emit_started!
-
STATE[:started_emitted] = true
-
end
-
rescue => e
-
handle_integration_error(e)
-
end
-
begin
-
%w[TERM INT].each do |sig|
-
Signal.trap(sig) { emit_shutdown!(sig) }
-
end
-
rescue => e
-
handle_integration_error(e)
-
end
-
at_exit do
-
emit_shutdown!("Exiting")
-
rescue => e
-
handle_integration_error(e)
-
end
-
-
# Connection-based readiness: emit started once port is accepting connections
-
# No background threads or sockets; rely solely on parsing Puma output
-
end
-
true
-
end
-
-
1
sig { void }
-
1
def install_patches!
-
return if STATE[:installed]
-
STATE[:installed] = true
-
-
state_reset!
-
-
begin
-
begin
-
require "puma"
-
rescue => e
-
handle_integration_error(e)
-
end
-
puma_mod = ::Object.const_defined?(:Puma) ? T.unsafe(::Object.const_get(:Puma)) : nil # rubocop:disable Sorbet/ConstantsFromStrings
-
# rubocop:disable Sorbet/ConstantsFromStrings
-
if puma_mod&.const_defined?(:LogWriter)
-
T.unsafe(::Object.const_get("Puma::LogWriter")).prepend(LogWriterPatch)
-
end
-
if puma_mod&.const_defined?(:Events)
-
ev = T.unsafe(::Object.const_get("Puma::Events"))
-
ev.prepend(EventsPatch)
-
end
-
# Patch Rack::Handler::Puma.run to emit lifecycle logs using options
-
if ::Object.const_defined?(:Rack)
-
rack_mod = T.unsafe(::Object.const_get(:Rack))
-
if rack_mod.const_defined?(:Handler)
-
handler_mod = T.unsafe(rack_mod.const_get(:Handler))
-
if handler_mod.const_defined?(:Puma)
-
handler = T.unsafe(handler_mod.const_get(:Puma))
-
handler.singleton_class.prepend(RackHandlerPatch)
-
end
-
end
-
end
-
# Avoid patching CLI/Server; rely on log parsing
-
# Avoid patching CLI to minimize version-specific risks
-
# rubocop:enable Sorbet/ConstantsFromStrings
-
rescue => e
-
handle_integration_error(e)
-
end
-
-
# Rely on Puma patches to observe lines
-
end
-
-
1
sig { params(e: StandardError).void }
-
1
def handle_integration_error(e)
-
server_mode = ::LogStruct.server_mode?
-
if defined?(::Rails) && ::Rails.respond_to?(:env) && ::Rails.env.test? && !server_mode
-
raise e
-
else
-
LogStruct.handle_exception(e, source: Source::Puma)
-
end
-
end
-
-
# No stdout interception
-
-
2
sig { void }
-
1
def state_reset!
-
8
STATE[:boot_emitted] = false
-
8
STATE[:shutdown_emitted] = false
-
8
STATE[:started_emitted] = false
-
8
STATE[:handler_pending_started] = false
-
8
STATE[:start_info] = {
-
mode: nil,
-
puma_version: nil,
-
puma_codename: nil,
-
ruby_version: nil,
-
min_threads: nil,
-
max_threads: nil,
-
environment: nil,
-
pid: nil,
-
listening: []
-
}
-
end
-
-
2
sig { params(line: String).returns(T::Boolean) }
-
1
def process_line(line)
-
13
l = line.to_s.strip
-
13
return false if l.empty?
-
-
# Suppress non-JSON rails banners
-
13
return true if l.start_with?("=> ")
-
-
# Ignore boot line
-
12
return true if l.start_with?("=> Booting Puma")
-
-
12
if l.start_with?("Puma starting in ")
-
# Example: Puma starting in single mode...
-
1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:mode] = l.sub("Puma starting in ", "").sub(" mode...", "")
-
1
return true
-
end
-
-
11
if (m = l.match(/^(?:\*\s*)?Puma version: (\S+)(?:.*"([^\"]+)")?/))
-
1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:puma_version] = m[1]
-
1
if m[2]
-
1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:puma_codename] = m[2]
-
end
-
1
return true
-
end
-
-
10
if (m = l.match(/^\* Ruby version: (.+)$/))
-
1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:ruby_version] = m[1]
-
1
return true
-
end
-
-
9
if (m = l.match(/^(?:\*\s*)?Min threads: (\d+)/))
-
1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:min_threads] = m[1].to_i
-
1
return true
-
end
-
-
8
if (m = l.match(/^(?:\*\s*)?Max threads: (\d+)/))
-
1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:max_threads] = m[1].to_i
-
1
return true
-
end
-
-
7
if (m = l.match(/^(?:\*\s*)?Environment: (\S+)/))
-
1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:environment] = m[1]
-
1
return true
-
end
-
-
6
if (m = l.match(/^(?:\*\s*)?PID:\s+(\d+)/))
-
1
T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:pid] = m[1].to_i
-
1
return true
-
end
-
-
5
if (m = l.match(/^\*?\s*Listening on (.+)$/))
-
1
si = T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])
-
1
list = T.cast(si[:listening], T::Array[T.untyped])
-
1
address = T.must(m[1])
-
1
list << address unless list.include?(address)
-
# Emit started when we see the first listening address
-
1
if !STATE[:started_emitted]
-
1
emit_started!
-
1
STATE[:started_emitted] = true
-
end
-
1
return true
-
end
-
-
4
if l == "Use Ctrl-C to stop"
-
1
si = T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])
-
# Fallback: if no listening address captured yet, infer from ARGV
-
1
if T.cast(si[:listening], T::Array[T.untyped]).empty?
-
begin
-
1
port = T.let(nil, T.untyped)
-
1
ARGV.each_with_index do |arg, idx|
-
2
if arg == "-p" || arg == "--port"
-
1
port = ARGV[idx + 1]
-
1
break
-
1
elsif arg.start_with?("--port=")
-
port = arg.split("=", 2)[1]
-
break
-
end
-
end
-
1
if port
-
1
si[:listening] << "tcp://127.0.0.1:#{port}"
-
end
-
rescue => e
-
handle_integration_error(e)
-
end
-
end
-
1
if !STATE[:started_emitted]
-
1
emit_started!
-
1
STATE[:started_emitted] = true
-
end
-
1
return false
-
end
-
-
3
if l.start_with?("- Gracefully stopping")
-
1
emit_shutdown!(l)
-
1
return true
-
end
-
-
2
if l.start_with?("=== puma shutdown:")
-
emit_shutdown!(l)
-
return true
-
end
-
-
2
if l == "- Goodbye!"
-
# Swallow
-
1
return true
-
end
-
-
1
if l == "Exiting"
-
emit_shutdown!(l)
-
return true
-
end
-
-
1
false
-
end
-
-
1
sig { void }
-
1
def emit_boot_if_needed!
-
# Intentionally no-op: we no longer emit a boot log
-
STATE[:boot_emitted] = true
-
end
-
-
# No server hooks; rely on parsing only
-
-
2
sig { void }
-
1
def emit_started!
-
2
si = T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])
-
2
log = Log::Puma::Start.new(
-
mode: T.cast(si[:mode], T.nilable(String)),
-
puma_version: T.cast(si[:puma_version], T.nilable(String)),
-
puma_codename: T.cast(si[:puma_codename], T.nilable(String)),
-
ruby_version: T.cast(si[:ruby_version], T.nilable(String)),
-
min_threads: T.cast(si[:min_threads], T.nilable(Integer)),
-
max_threads: T.cast(si[:max_threads], T.nilable(Integer)),
-
environment: T.cast(si[:environment], T.nilable(String)),
-
process_id: T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:pid] || Process.pid,
-
listening_addresses: T.cast(T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:listening], T::Array[String]),
-
level: Level::Info,
-
timestamp: Time.now
-
)
-
2
LogStruct.info(log)
-
2
STATE[:handler_pending_started] = false
-
# Only use LogStruct; SemanticLogger routes to STDOUT in test
-
end
-
-
2
sig { params(_message: String).void }
-
1
def emit_shutdown!(_message)
-
1
return if STATE[:shutdown_emitted]
-
1
STATE[:shutdown_emitted] = true
-
1
log = Log::Puma::Shutdown.new(
-
process_id: T.cast(STATE[:start_info], T::Hash[Symbol, T.untyped])[:pid] || Process.pid,
-
level: Level::Info,
-
timestamp: Time.now
-
)
-
1
LogStruct.info(log)
-
# Only use LogStruct; SemanticLogger routes to STDOUT in test
-
# Let SemanticLogger appender write to STDOUT
-
end
-
end
-
-
# STDOUT interception is handled globally via StdoutFilter; keep Puma patches minimal
-
-
# Patch Puma::LogWriter to intercept log writes
-
1
module LogWriterPatch
-
1
extend T::Sig
-
-
1
sig { params(msg: String).returns(T.untyped) }
-
1
def log(msg)
-
consumed = ::LogStruct::Integrations::Puma.process_line(msg)
-
super unless consumed
-
end
-
-
1
sig { params(msg: String).returns(T.untyped) }
-
1
def write(msg)
-
any_consumed = T.let(false, T::Boolean)
-
msg.to_s.each_line do |l|
-
any_consumed = true if ::LogStruct::Integrations::Puma.process_line(l)
-
end
-
super unless any_consumed
-
end
-
-
1
sig { params(msg: String).returns(T.untyped) }
-
1
def <<(msg)
-
any_consumed = T.let(false, T::Boolean)
-
msg.to_s.each_line do |l|
-
any_consumed = true if ::LogStruct::Integrations::Puma.process_line(l)
-
end
-
super unless any_consumed
-
end
-
-
1
sig { params(msg: String).returns(T.untyped) }
-
1
def puts(msg)
-
consumed = ::LogStruct::Integrations::Puma.process_line(msg)
-
if consumed
-
# attempt to suppress; only forward if not consumed
-
return nil
-
end
-
if ::Kernel.instance_variables.include?(:@stdout)
-
io = T.unsafe(::Kernel.instance_variable_get(:@stdout))
-
return io.puts(msg)
-
end
-
super
-
end
-
-
1
sig { params(msg: String).returns(T.untyped) }
-
1
def info(msg)
-
consumed = ::LogStruct::Integrations::Puma.process_line(msg)
-
super unless consumed
-
end
-
end
-
-
# Patch Puma::Events as a fallback for some versions where Events handles output
-
1
module EventsPatch
-
1
extend T::Sig
-
-
1
sig { params(str: String).returns(T.untyped) }
-
1
def log(str)
-
consumed = ::LogStruct::Integrations::Puma.process_line(str)
-
super unless consumed
-
end
-
end
-
-
# Hook Rack::Handler::Puma.run to emit structured started/shutdown
-
1
module RackHandlerPatch
-
1
extend T::Sig
-
-
1
sig do
-
params(
-
app: T.untyped,
-
args: T.untyped,
-
block: T.nilable(T.proc.returns(T.untyped))
-
).returns(T.untyped)
-
end
-
1
def run(app, *args, &block)
-
rest = args
-
options = T.let({}, T::Hash[T.untyped, T.untyped])
-
rest.each do |value|
-
next unless value.is_a?(Hash)
-
options.merge!(value)
-
end
-
-
begin
-
si = T.cast(::LogStruct::Integrations::Puma::STATE[:start_info], T::Hash[Symbol, T.untyped])
-
si[:mode] ||= "single"
-
si[:environment] ||= ((defined?(::Rails) && ::Rails.respond_to?(:env)) ? ::Rails.env : nil)
-
si[:pid] ||= Process.pid
-
si[:listening] ||= []
-
port = T.let(nil, T.untyped)
-
host = T.let(nil, T.untyped)
-
if options.respond_to?(:[])
-
port = options[:Port] || options["Port"] || options[:port] || options["port"]
-
host = options[:Host] || options["Host"] || options[:host] || options["host"]
-
end
-
if port
-
list = T.cast(si[:listening], T::Array[T.untyped])
-
list.clear
-
h = (host && host != "0.0.0.0") ? host : "127.0.0.1"
-
list << "tcp://#{h}:#{port}"
-
end
-
state = ::LogStruct::Integrations::Puma::STATE
-
state[:handler_pending_started] = true unless state[:started_emitted]
-
rescue => e
-
::LogStruct::Integrations::Puma.handle_integration_error(e)
-
end
-
-
begin
-
Kernel.at_exit do
-
unless ::LogStruct::Integrations::Puma::STATE[:shutdown_emitted]
-
::LogStruct::Integrations::Puma.emit_shutdown!("Exiting")
-
::LogStruct::Integrations::Puma::STATE[:shutdown_emitted] = true
-
end
-
rescue => e
-
::LogStruct::Integrations::Puma.handle_integration_error(e)
-
end
-
rescue => e
-
::LogStruct::Integrations::Puma.handle_integration_error(e)
-
end
-
-
begin
-
result = super(app, **options, &block)
-
ensure
-
state = ::LogStruct::Integrations::Puma::STATE
-
if state[:handler_pending_started] && !state[:started_emitted]
-
begin
-
::LogStruct::Integrations::Puma.emit_started!
-
state[:started_emitted] = true
-
rescue => e
-
::LogStruct::Integrations::Puma.handle_integration_error(e)
-
ensure
-
state[:handler_pending_started] = false
-
end
-
end
-
end
-
-
result
-
end
-
end
-
-
# (No Launcher patch)
-
-
# No Server patch
-
-
# No InterceptorIO
-
-
# Removed EventsInitPatch and CLIPatch to avoid version-specific conflicts
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "rack"
-
1
require "action_dispatch/middleware/show_exceptions"
-
1
require_relative "rack_error_handler/middleware"
-
-
1
module LogStruct
-
1
module Integrations
-
# Rack middleware integration for structured logging
-
1
module RackErrorHandler
-
1
extend T::Sig
-
1
extend IntegrationInterface
-
-
# Set up Rack middleware for structured error logging
-
2
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def self.setup(config)
-
1
return nil unless config.enabled
-
1
return nil unless config.integrations.enable_rack_error_handler
-
-
# Add structured logging middleware for security violations and errors
-
# Need to insert before RemoteIp to catch IP spoofing errors it raises
-
1
::Rails.application.middleware.insert_before(
-
::ActionDispatch::RemoteIp,
-
Integrations::RackErrorHandler::Middleware
-
)
-
-
1
true
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
1
module Integrations
-
1
module RackErrorHandler
-
# Custom middleware to enhance Rails error logging with JSON format and request details
-
1
class Middleware
-
1
extend T::Sig
-
-
# IP Spoofing error response
-
1
IP_SPOOF_HTML = T.let(
-
"<html><head><title>IP Spoofing Detected</title></head><body>" \
-
"<h1>Forbidden</h1>" \
-
"<p>IP spoofing detected. This request has been blocked for security reasons.</p>" \
-
"</body></html>",
-
String
-
)
-
-
# CSRF error response
-
1
CSRF_HTML = T.let(
-
"<html><head><title>CSRF Error</title></head><body>" \
-
"<h1>Forbidden</h1>" \
-
"<p>Invalid authenticity token. This request has been blocked to prevent cross-site request forgery.</p>" \
-
"</body></html>",
-
String
-
)
-
-
# Response headers calculated at load time
-
1
IP_SPOOF_HEADERS = T.let(
-
{
-
"Content-Type" => "text/html",
-
"Content-Length" => IP_SPOOF_HTML.bytesize.to_s
-
}.freeze,
-
T::Hash[String, String]
-
)
-
-
1
CSRF_HEADERS = T.let(
-
{
-
"Content-Type" => "text/html",
-
"Content-Length" => CSRF_HTML.bytesize.to_s
-
}.freeze,
-
T::Hash[String, String]
-
)
-
-
# HTTP status code for forbidden responses
-
1
FORBIDDEN_STATUS = T.let(403, Integer)
-
-
2
sig { params(app: T.untyped).void }
-
1
def initialize(app)
-
1
@app = app
-
end
-
-
1
sig { params(env: T.untyped).returns(T.untyped) }
-
1
def call(env)
-
return @app.call(env) unless LogStruct.enabled?
-
-
request = ::ActionDispatch::Request.new(env)
-
-
begin
-
# Trigger the same spoofing checks that ActionDispatch::RemoteIp performs after
-
# it is initialized in the middleware stack. We run this manually because we
-
# execute before that middleware and still want spoofing attacks to surface here.
-
perform_remote_ip_check!(request)
-
-
@app.call(env)
-
rescue ::ActionDispatch::RemoteIp::IpSpoofAttackError => ip_spoof_error
-
# Create a security log for IP spoofing
-
security_log = Log::Security::IPSpoof.new(
-
path: env["PATH_INFO"],
-
http_method: env["REQUEST_METHOD"],
-
user_agent: env["HTTP_USER_AGENT"],
-
referer: env["HTTP_REFERER"],
-
request_id: request.request_id,
-
message: ip_spoof_error.message,
-
client_ip: env["HTTP_CLIENT_IP"],
-
x_forwarded_for: env["HTTP_X_FORWARDED_FOR"],
-
timestamp: Time.now
-
)
-
-
::Rails.logger.warn(security_log)
-
-
[FORBIDDEN_STATUS, IP_SPOOF_HEADERS.dup, [IP_SPOOF_HTML]]
-
rescue ::ActionController::InvalidAuthenticityToken => invalid_auth_token_error
-
# Create a security log for CSRF error
-
security_log = Log::Security::CSRFViolation.new(
-
path: request.path,
-
http_method: request.method,
-
source_ip: request.remote_ip,
-
user_agent: request.user_agent,
-
referer: request.referer,
-
request_id: request.request_id,
-
message: invalid_auth_token_error.message,
-
timestamp: Time.now
-
)
-
LogStruct.error(security_log)
-
-
# Report to error reporting service and/or re-raise
-
context = extract_request_context(env, request)
-
LogStruct.handle_exception(invalid_auth_token_error, source: Source::Security, context: context)
-
-
# If handle_exception raised an exception then Rails will deal with it (e.g. config.exceptions_app)
-
# If we are only logging or reporting these security errors, then return a default response
-
[FORBIDDEN_STATUS, CSRF_HEADERS.dup, [CSRF_HTML]]
-
rescue => error
-
# Extract request context for error reporting
-
context = extract_request_context(env, request)
-
-
# Create and log a structured exception with request context
-
exception_log = Log.from_exception(Source::Rails, error, context)
-
LogStruct.error(exception_log)
-
-
# Re-raise any standard errors to let Rails or error reporter handle it.
-
# Rails will also log the request details separately
-
raise error
-
end
-
end
-
-
1
private
-
-
1
sig { params(request: ::ActionDispatch::Request).void }
-
1
def perform_remote_ip_check!(request)
-
action_dispatch_config = ::Rails.application.config.action_dispatch
-
check_ip = action_dispatch_config.ip_spoofing_check
-
return unless check_ip
-
-
proxies = normalized_trusted_proxies(action_dispatch_config.trusted_proxies)
-
-
::ActionDispatch::RemoteIp::GetIp
-
.new(request, check_ip, proxies)
-
.to_s
-
end
-
-
1
sig { params(env: T::Hash[String, T.untyped], request: T.nilable(::ActionDispatch::Request)).returns(T::Hash[Symbol, T.untyped]) }
-
1
def extract_request_context(env, request = nil)
-
request ||= ::ActionDispatch::Request.new(env)
-
{
-
request_id: request.request_id,
-
path: request.path,
-
method: request.method,
-
user_agent: request.user_agent,
-
referer: request.referer
-
}
-
rescue => error
-
# If we can't extract request context, return minimal info
-
{error_extracting_context: error.message}
-
end
-
-
1
sig { params(configured_proxies: T.untyped).returns(T.untyped) }
-
1
def normalized_trusted_proxies(configured_proxies)
-
if configured_proxies.nil? || (configured_proxies.respond_to?(:empty?) && configured_proxies.empty?)
-
return ::ActionDispatch::RemoteIp::TRUSTED_PROXIES
-
end
-
-
return configured_proxies if configured_proxies.respond_to?(:any?)
-
-
raise(
-
ArgumentError,
-
<<~EOM
-
Setting config.action_dispatch.trusted_proxies to a single value isn't
-
supported. Please set this to an enumerable instead. For
-
example, instead of:
-
-
config.action_dispatch.trusted_proxies = IPAddr.new("10.0.0.0/8")
-
-
Wrap the value in an Array:
-
-
config.action_dispatch.trusted_proxies = [IPAddr.new("10.0.0.0/8")]
-
-
Note that passing an enumerable will *replace* the default set of trusted proxies.
-
EOM
-
)
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
begin
-
1
require "shrine"
-
rescue LoadError
-
# Shrine gem is not available, integration will be skipped
-
end
-
-
1
module LogStruct
-
1
module Integrations
-
# Shrine integration for structured logging
-
1
module Shrine
-
1
extend T::Sig
-
1
extend IntegrationInterface
-
-
# Set up Shrine structured logging
-
2
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def self.setup(config)
-
1
return nil unless defined?(::Shrine)
-
return nil unless config.enabled
-
return nil unless config.integrations.enable_shrine
-
-
# Create a structured log subscriber for Shrine
-
# ActiveSupport::Notifications::Event has name, time, end, transaction_id, payload, and duration
-
shrine_log_subscriber = T.unsafe(lambda do |event|
-
payload = event.payload.except(:io, :metadata, :name).dup
-
-
# Map event name to Event type
-
event_type = case event.name
-
when :upload then Event::Upload
-
when :download then Event::Download
-
when :delete then Event::Delete
-
when :metadata then Event::Metadata
-
when :exists then Event::Exist # ActiveStorage uses 'exist', may as well use that
-
else Event::Unknown
-
end
-
-
# Create structured log data
-
# Ensure storage is always a symbol
-
storage_sym = payload[:storage].to_sym
-
-
log_data = case event_type
-
when Event::Upload
-
Log::Shrine::Upload.new(
-
storage: storage_sym,
-
location: payload[:location],
-
uploader: payload[:uploader]&.to_s,
-
upload_options: payload[:upload_options],
-
options: payload[:options],
-
duration_ms: event.duration.to_f
-
)
-
when Event::Download
-
Log::Shrine::Download.new(
-
storage: storage_sym,
-
location: payload[:location],
-
download_options: payload[:download_options]
-
)
-
when Event::Delete
-
Log::Shrine::Delete.new(
-
storage: storage_sym,
-
location: payload[:location]
-
)
-
when Event::Metadata
-
metadata_params = {
-
storage: storage_sym,
-
metadata: payload[:metadata]
-
}
-
metadata_params[:location] = payload[:location] if payload[:location]
-
Log::Shrine::Metadata.new(**metadata_params)
-
when Event::Exist
-
Log::Shrine::Exist.new(
-
storage: storage_sym,
-
location: payload[:location],
-
exist: payload[:exist]
-
)
-
else
-
unknown_params = {storage: storage_sym, metadata: payload[:metadata]}
-
unknown_params[:location] = payload[:location] if payload[:location]
-
Log::Shrine::Metadata.new(**unknown_params)
-
end
-
-
# Pass the structured hash to the logger
-
# If Rails.logger has our Formatter, it will handle JSON conversion
-
::Shrine.logger.info log_data
-
end)
-
-
# Configure Shrine to use our structured log subscriber
-
::Shrine.plugin :instrumentation,
-
events: %i[upload exists download delete],
-
log_subscriber: shrine_log_subscriber
-
-
true
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
begin
-
1
require "sidekiq"
-
rescue LoadError
-
# Sidekiq gem is not available, integration will be skipped
-
end
-
1
require_relative "sidekiq/logger" if defined?(::Sidekiq)
-
-
1
module LogStruct
-
1
module Integrations
-
# Sidekiq integration for structured logging
-
1
module Sidekiq
-
1
extend T::Sig
-
1
extend IntegrationInterface
-
-
# Set up Sidekiq structured logging
-
2
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def self.setup(config)
-
1
return nil unless defined?(::Sidekiq)
-
return nil unless config.enabled
-
return nil unless config.integrations.enable_sidekiq
-
-
# Configure Sidekiq server (worker) to use our logger
-
::Sidekiq.configure_server do |sidekiq_config|
-
sidekiq_config.logger = LogStruct::Integrations::Sidekiq::Logger.new("Sidekiq-Server")
-
end
-
-
# Configure Sidekiq client (Rails app) to use our logger
-
::Sidekiq.configure_client do |sidekiq_config|
-
sidekiq_config.logger = LogStruct::Integrations::Sidekiq::Logger.new("Sidekiq-Client")
-
end
-
-
true
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "sorbet-runtime"
-
-
1
module LogStruct
-
1
module Integrations
-
# Integration for Sorbet runtime type checking error handlers
-
# This module installs error handlers that report type errors through LogStruct
-
# These handlers can be enabled/disabled using configuration
-
1
module Sorbet
-
1
extend T::Sig
-
1
extend IntegrationInterface
-
-
# Set up Sorbet error handlers to report errors through LogStruct
-
2
sig { override.params(config: LogStruct::Configuration).returns(T.nilable(T::Boolean)) }
-
1
def self.setup(config)
-
2
return nil unless config.integrations.enable_sorbet_error_handlers
-
-
2
clear_sig_error_handler!
-
2
install_error_handler!
-
-
# Install inline type error handler
-
# Called when T.let, T.cast, T.must, etc. fail
-
2
T::Configuration.inline_type_error_handler = lambda do |error, _opts|
-
LogStruct.handle_exception(error, source: LogStruct::Source::TypeChecking)
-
end
-
-
# Install call validation error handler
-
# Called when method signature validation fails
-
2
T::Configuration.call_validation_error_handler = lambda do |_signature, opts|
-
1
error = TypeError.new(opts[:pretty_message])
-
1
LogStruct.handle_exception(error, source: LogStruct::Source::TypeChecking)
-
end
-
-
# Install sig builder error handler
-
# Called when there's a problem with a signature definition
-
2
T::Configuration.sig_builder_error_handler = lambda do |error, _location|
-
LogStruct.handle_exception(error, source: LogStruct::Source::TypeChecking)
-
end
-
-
# Install sig validation error handler
-
# Called when there's a problem with a signature validation
-
2
T::Configuration.sig_validation_error_handler = lambda do |error, _opts|
-
LogStruct.handle_exception(error, source: LogStruct::Source::TypeChecking)
-
end
-
-
2
true
-
end
-
-
1
@installed = T.let(false, T::Boolean)
-
-
1
class << self
-
1
extend T::Sig
-
-
1
private
-
-
2
sig { void }
-
1
def install_error_handler!
-
2
return if installed?
-
-
2
T::Configuration.sig_builder_error_handler = lambda do |error, source|
-
1
LogStruct.handle_exception(error, source: source, context: nil)
-
end
-
-
2
@installed = true
-
end
-
-
1
sig do
-
1
returns(
-
T.nilable(
-
T.proc.params(error: StandardError, location: Thread::Backtrace::Location).void
-
)
-
)
-
end
-
1
def clear_sig_error_handler!
-
4
previous_handler = T.cast(
-
T::Configuration.instance_variable_get(:@sig_builder_error_handler),
-
T.nilable(
-
T.proc.params(error: StandardError, location: Thread::Backtrace::Location).void
-
)
-
)
-
4
T::Configuration.sig_builder_error_handler = nil
-
-
4
@installed = false
-
-
4
previous_handler
-
end
-
-
2
sig { returns(T::Boolean) }
-
1
def installed?
-
2
@installed
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# Common enums and shared interfaces
-
1
require_relative "enums/source"
-
1
require_relative "enums/event"
-
1
require_relative "enums/level"
-
1
require_relative "enums/log_field"
-
1
require_relative "log/interfaces/public_common_fields"
-
1
require_relative "shared/serialize_common_public"
-
-
# Dynamically require all top-level log structs under log/*
-
# Nested per-event files are required by their parent files.
-
1
Dir[File.join(__dir__, "log", "*.rb")].sort.each do |file|
-
16
require file
-
end
-
-
1
module LogStruct
-
1
module Log
-
1
extend T::Sig
-
-
# Build an Error log from an exception with optional context and timestamp
-
1
sig do
-
1
params(
-
source: Source,
-
ex: StandardError,
-
additional_data: T::Hash[T.any(String, Symbol), T.untyped],
-
timestamp: Time
-
).returns(LogStruct::Log::Error)
-
end
-
1
def self.from_exception(source, ex, additional_data = {}, timestamp = Time.now)
-
5
LogStruct::Log::Error.new(
-
source: source,
-
error_class: ex.class,
-
message: ex.message,
-
backtrace: ex.backtrace,
-
additional_data: additional_data,
-
timestamp: timestamp
-
)
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
-
-
1
require_relative "action_mailer/delivery"
-
1
require_relative "action_mailer/delivered"
-
1
require_relative "action_mailer/error"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActionMailer
-
1
class BaseFields < T::Struct
-
1
extend T::Sig
-
1
const :to, T.nilable(T::Array[String]), default: nil
-
1
const :from, T.nilable(String), default: nil
-
1
const :subject, T.nilable(String), default: nil
-
1
const :message_id, T.nilable(String), default: nil
-
1
const :mailer_class, T.nilable(String), default: nil
-
1
const :mailer_action, T.nilable(String), default: nil
-
1
const :attachment_count, T.nilable(Integer), default: nil
-
-
1
Kwargs = T.type_alias do
-
{
-
1
to: T.nilable(T::Array[String]),
-
from: T.nilable(String),
-
subject: T.nilable(String),
-
message_id: T.nilable(String),
-
mailer_class: T.nilable(String),
-
mailer_action: T.nilable(String),
-
attachment_count: T.nilable(Integer)
-
}
-
end
-
-
2
sig { returns(Kwargs) }
-
1
def to_kwargs
-
{
-
9
to: to,
-
from: from,
-
subject: subject,
-
message_id: message_id,
-
mailer_class: mailer_class,
-
mailer_action: mailer_action,
-
attachment_count: attachment_count
-
}
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActionMailer
-
1
class Delivered < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Mailer, default: Source::Mailer
-
1
const :event, Event, default: Event::Delivered
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :to, T.nilable(T::Array[String]), default: nil
-
1
const :from, T.nilable(String), default: nil
-
1
const :subject, T.nilable(String), default: nil
-
1
const :message_id, T.nilable(String), default: nil
-
1
const :mailer_class, T.nilable(String), default: nil
-
1
const :mailer_action, T.nilable(String), default: nil
-
1
const :attachment_count, T.nilable(Integer), default: nil
-
-
# Additional data
-
1
include LogStruct::Log::Interfaces::AdditionalDataField
-
1
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
-
1
include LogStruct::Log::Shared::MergeAdditionalDataFields
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
1
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
1
h[LogField::To] = to unless to.nil?
-
1
h[LogField::From] = from unless from.nil?
-
1
h[LogField::Subject] = subject unless subject.nil?
-
1
h[LogField::MessageId] = message_id unless message_id.nil?
-
1
h[LogField::MailerClass] = mailer_class unless mailer_class.nil?
-
1
h[LogField::MailerAction] = mailer_action unless mailer_action.nil?
-
1
h[LogField::AttachmentCount] = attachment_count unless attachment_count.nil?
-
1
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActionMailer
-
1
class Delivery < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Mailer, default: Source::Mailer
-
1
const :event, Event, default: Event::Delivery
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :to, T.nilable(T::Array[String]), default: nil
-
1
const :from, T.nilable(String), default: nil
-
1
const :subject, T.nilable(String), default: nil
-
1
const :message_id, T.nilable(String), default: nil
-
1
const :mailer_class, T.nilable(String), default: nil
-
1
const :mailer_action, T.nilable(String), default: nil
-
1
const :attachment_count, T.nilable(Integer), default: nil
-
-
# Additional data
-
1
include LogStruct::Log::Interfaces::AdditionalDataField
-
1
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
-
1
include LogStruct::Log::Shared::MergeAdditionalDataFields
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
1
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
1
h[LogField::To] = to unless to.nil?
-
1
h[LogField::From] = from unless from.nil?
-
1
h[LogField::Subject] = subject unless subject.nil?
-
1
h[LogField::MessageId] = message_id unless message_id.nil?
-
1
h[LogField::MailerClass] = mailer_class unless mailer_class.nil?
-
1
h[LogField::MailerAction] = mailer_action unless mailer_action.nil?
-
1
h[LogField::AttachmentCount] = attachment_count unless attachment_count.nil?
-
1
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActionMailer
-
1
class Error < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Mailer, default: Source::Mailer
-
1
const :event, Event, default: Event::Error
-
2
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :to, T.nilable(T::Array[String]), default: nil
-
1
const :from, T.nilable(String), default: nil
-
1
const :subject, T.nilable(String), default: nil
-
1
const :message_id, T.nilable(String), default: nil
-
1
const :mailer_class, T.nilable(String), default: nil
-
1
const :mailer_action, T.nilable(String), default: nil
-
1
const :attachment_count, T.nilable(Integer), default: nil
-
-
# Event-specific fields
-
1
const :error_class, T.class_of(StandardError)
-
1
const :message, String
-
1
const :backtrace, T.nilable(T::Array[String]), default: nil
-
-
# Additional data
-
1
include LogStruct::Log::Interfaces::AdditionalDataField
-
1
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
-
1
include LogStruct::Log::Shared::MergeAdditionalDataFields
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
2
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
2
h[LogField::To] = to unless to.nil?
-
2
h[LogField::From] = from unless from.nil?
-
2
h[LogField::Subject] = subject unless subject.nil?
-
2
h[LogField::MessageId] = message_id unless message_id.nil?
-
2
h[LogField::MailerClass] = mailer_class unless mailer_class.nil?
-
2
h[LogField::MailerAction] = mailer_action unless mailer_action.nil?
-
2
h[LogField::AttachmentCount] = attachment_count unless attachment_count.nil?
-
2
h[LogField::ErrorClass] = error_class
-
2
h[LogField::Message] = message
-
2
h[LogField::Backtrace] = backtrace unless backtrace.nil?
-
2
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
-
-
1
require_relative "active_job/enqueue"
-
1
require_relative "active_job/schedule"
-
1
require_relative "active_job/start"
-
1
require_relative "active_job/finish"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActiveJob
-
1
class BaseFields < T::Struct
-
1
extend T::Sig
-
1
const :job_id, String
-
1
const :job_class, String
-
1
const :queue_name, T.nilable(Symbol), default: nil
-
1
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
-
1
const :executions, T.nilable(Integer), default: nil
-
1
const :provider_job_id, T.nilable(String), default: nil
-
-
1
Kwargs = T.type_alias do
-
{
-
job_id: String,
-
job_class: String,
-
queue_name: T.nilable(Symbol),
-
arguments: T.nilable(T::Array[T.untyped]),
-
executions: T.nilable(Integer),
-
provider_job_id: T.nilable(String)
-
}
-
end
-
-
1
sig { returns(Kwargs) }
-
1
def to_kwargs
-
{
-
job_id: job_id,
-
job_class: job_class,
-
queue_name: queue_name,
-
arguments: arguments,
-
executions: executions,
-
provider_job_id: provider_job_id
-
}
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActiveJob
-
1
class Enqueue < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Job, default: Source::Job
-
1
const :event, Event, default: Event::Enqueue
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :job_id, String
-
1
const :job_class, String
-
1
const :queue_name, T.nilable(Symbol), default: nil
-
1
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
-
1
const :executions, T.nilable(Integer), default: nil
-
1
const :provider_job_id, T.nilable(String), default: nil
-
-
# Event-specific fields
-
1
const :retries, T.nilable(Integer), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::JobId] = job_id
-
h[LogField::JobClass] = job_class
-
h[LogField::QueueName] = queue_name unless queue_name.nil?
-
h[LogField::Arguments] = arguments unless arguments.nil?
-
h[LogField::Executions] = executions unless executions.nil?
-
h[LogField::ProviderJobId] = provider_job_id unless provider_job_id.nil?
-
h[LogField::Retries] = retries unless retries.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActiveJob
-
1
class Finish < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Job, default: Source::Job
-
1
const :event, Event, default: Event::Finish
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :job_id, String
-
1
const :job_class, String
-
1
const :queue_name, T.nilable(Symbol), default: nil
-
1
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
-
1
const :executions, T.nilable(Integer), default: nil
-
1
const :provider_job_id, T.nilable(String), default: nil
-
-
# Event-specific fields
-
1
const :duration_ms, Float
-
1
const :finished_at, Time
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::JobId] = job_id
-
h[LogField::JobClass] = job_class
-
h[LogField::QueueName] = queue_name unless queue_name.nil?
-
h[LogField::Arguments] = arguments unless arguments.nil?
-
h[LogField::Executions] = executions unless executions.nil?
-
h[LogField::ProviderJobId] = provider_job_id unless provider_job_id.nil?
-
h[LogField::DurationMs] = duration_ms
-
h[LogField::FinishedAt] = finished_at
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActiveJob
-
1
class Schedule < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Job, default: Source::Job
-
1
const :event, Event, default: Event::Schedule
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :job_id, String
-
1
const :job_class, String
-
1
const :queue_name, T.nilable(Symbol), default: nil
-
1
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
-
1
const :executions, T.nilable(Integer), default: nil
-
1
const :provider_job_id, T.nilable(String), default: nil
-
-
# Event-specific fields
-
1
const :scheduled_at, Time
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::JobId] = job_id
-
h[LogField::JobClass] = job_class
-
h[LogField::QueueName] = queue_name unless queue_name.nil?
-
h[LogField::Arguments] = arguments unless arguments.nil?
-
h[LogField::Executions] = executions unless executions.nil?
-
h[LogField::ProviderJobId] = provider_job_id unless provider_job_id.nil?
-
h[LogField::ScheduledAt] = scheduled_at
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActiveJob
-
1
class Start < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Job, default: Source::Job
-
1
const :event, Event, default: Event::Start
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :job_id, String
-
1
const :job_class, String
-
1
const :queue_name, T.nilable(Symbol), default: nil
-
1
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
-
1
const :executions, T.nilable(Integer), default: nil
-
1
const :provider_job_id, T.nilable(String), default: nil
-
-
# Event-specific fields
-
1
const :started_at, Time
-
1
const :attempt, T.nilable(Integer), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::JobId] = job_id
-
h[LogField::JobClass] = job_class
-
h[LogField::QueueName] = queue_name unless queue_name.nil?
-
h[LogField::Arguments] = arguments unless arguments.nil?
-
h[LogField::Executions] = executions unless executions.nil?
-
h[LogField::ProviderJobId] = provider_job_id unless provider_job_id.nil?
-
h[LogField::StartedAt] = started_at
-
h[LogField::Attempt] = attempt unless attempt.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../enums/source"
-
1
require_relative "../enums/event"
-
1
require_relative "../enums/level"
-
1
require_relative "../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActiveModelSerializers < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Rails, default: Source::Rails
-
1
const :event, Event, default: Event::Generate
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :message, String
-
1
const :serializer, T.nilable(String), default: nil
-
1
const :adapter, T.nilable(String), default: nil
-
1
const :resource_class, T.nilable(String), default: nil
-
1
const :duration_ms, Float
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
1
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
1
h[LogField::Message] = message
-
1
h[LogField::Serializer] = serializer unless serializer.nil?
-
1
h[LogField::Adapter] = adapter unless adapter.nil?
-
1
h[LogField::ResourceClass] = resource_class unless resource_class.nil?
-
1
h[LogField::DurationMs] = duration_ms
-
1
h
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
-
-
1
require_relative "active_storage/upload"
-
1
require_relative "active_storage/download"
-
1
require_relative "active_storage/delete"
-
1
require_relative "active_storage/metadata"
-
1
require_relative "active_storage/exist"
-
1
require_relative "active_storage/stream"
-
1
require_relative "active_storage/url"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActiveStorage
-
1
class BaseFields < T::Struct
-
1
extend T::Sig
-
1
const :storage, Symbol
-
1
const :file_id, String
-
-
1
Kwargs = T.type_alias do
-
{
-
storage: Symbol,
-
file_id: String
-
}
-
end
-
-
1
sig { returns(Kwargs) }
-
1
def to_kwargs
-
{
-
storage: storage,
-
file_id: file_id
-
}
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActiveStorage
-
1
class Delete < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Storage, default: Source::Storage
-
1
const :event, Event, default: Event::Delete
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :storage, Symbol
-
1
const :file_id, String
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::FileId] = file_id
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActiveStorage
-
1
class Download < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Storage, default: Source::Storage
-
1
const :event, Event, default: Event::Download
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :storage, Symbol
-
1
const :file_id, String
-
-
# Event-specific fields
-
1
const :filename, T.nilable(String), default: nil
-
1
const :range, T.nilable(String), default: nil
-
1
const :duration_ms, T.nilable(Float), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::FileId] = file_id
-
h[LogField::Filename] = filename unless filename.nil?
-
h[LogField::Range] = range unless range.nil?
-
h[LogField::DurationMs] = duration_ms unless duration_ms.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActiveStorage
-
1
class Exist < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Storage, default: Source::Storage
-
1
const :event, Event, default: Event::Exist
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :storage, Symbol
-
1
const :file_id, String
-
-
# Event-specific fields
-
1
const :exist, T.nilable(T::Boolean), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::FileId] = file_id
-
h[LogField::Exist] = exist unless exist.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActiveStorage
-
1
class Metadata < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Storage, default: Source::Storage
-
1
const :event, Event, default: Event::Metadata
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :storage, Symbol
-
1
const :file_id, String
-
-
# Event-specific fields
-
1
const :metadata, T.nilable(T::Hash[String, T.untyped]), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::FileId] = file_id
-
h[LogField::Metadata] = metadata unless metadata.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActiveStorage
-
1
class Stream < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Storage, default: Source::Storage
-
1
const :event, Event, default: Event::Stream
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :storage, Symbol
-
1
const :file_id, String
-
-
# Event-specific fields
-
1
const :prefix, T.nilable(String), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::FileId] = file_id
-
h[LogField::Prefix] = prefix unless prefix.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActiveStorage
-
1
class Upload < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Storage, default: Source::Storage
-
1
const :event, Event, default: Event::Upload
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :storage, Symbol
-
1
const :file_id, String
-
-
# Event-specific fields
-
1
const :filename, T.nilable(String), default: nil
-
1
const :mime_type, T.nilable(String), default: nil
-
1
const :size, T.nilable(Integer), default: nil
-
1
const :metadata, T.nilable(T::Hash[String, T.untyped]), default: nil
-
1
const :duration_ms, T.nilable(Float), default: nil
-
1
const :checksum, T.nilable(String), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::FileId] = file_id
-
h[LogField::Filename] = filename unless filename.nil?
-
h[LogField::MimeType] = mime_type unless mime_type.nil?
-
h[LogField::Size] = size unless size.nil?
-
h[LogField::Metadata] = metadata unless metadata.nil?
-
h[LogField::DurationMs] = duration_ms unless duration_ms.nil?
-
h[LogField::Checksum] = checksum unless checksum.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class ActiveStorage
-
1
class Url < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Storage, default: Source::Storage
-
1
const :event, Event, default: Event::Url
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :storage, Symbol
-
1
const :file_id, String
-
-
# Event-specific fields
-
1
const :url, String
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::FileId] = file_id
-
h[LogField::Url] = url
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../enums/source"
-
1
require_relative "../enums/event"
-
1
require_relative "../enums/level"
-
1
require_relative "../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Ahoy < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::App, default: Source::App
-
1
const :event, Event, default: Event::Log
-
2
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :message, String
-
1
const :ahoy_event, String
-
1
const :properties, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
1
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
1
h[LogField::Message] = message
-
1
h[LogField::AhoyEvent] = ahoy_event
-
1
h[LogField::Properties] = properties unless properties.nil?
-
1
h
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
-
-
1
require_relative "carrierwave/upload"
-
1
require_relative "carrierwave/delete"
-
1
require_relative "carrierwave/download"
-
-
1
module LogStruct
-
1
module Log
-
1
class CarrierWave
-
1
class BaseFields < T::Struct
-
1
extend T::Sig
-
1
const :storage, Symbol
-
1
const :file_id, String
-
1
const :uploader, T.nilable(String), default: nil
-
1
const :model, T.nilable(String), default: nil
-
1
const :mount_point, T.nilable(String), default: nil
-
1
const :version, T.nilable(String), default: nil
-
1
const :store_path, T.nilable(String), default: nil
-
1
const :extension, T.nilable(String), default: nil
-
-
1
Kwargs = T.type_alias do
-
{
-
storage: Symbol,
-
file_id: String,
-
uploader: T.nilable(String),
-
model: T.nilable(String),
-
mount_point: T.nilable(String),
-
version: T.nilable(String),
-
store_path: T.nilable(String),
-
extension: T.nilable(String)
-
}
-
end
-
-
1
sig { returns(Kwargs) }
-
1
def to_kwargs
-
{
-
storage: storage,
-
file_id: file_id,
-
uploader: uploader,
-
model: model,
-
mount_point: mount_point,
-
version: version,
-
store_path: store_path,
-
extension: extension
-
}
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class CarrierWave
-
1
class Delete < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::CarrierWave, default: Source::CarrierWave
-
1
const :event, Event, default: Event::Delete
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :storage, Symbol
-
1
const :file_id, String
-
1
const :uploader, T.nilable(String), default: nil
-
1
const :model, T.nilable(String), default: nil
-
1
const :mount_point, T.nilable(String), default: nil
-
1
const :version, T.nilable(String), default: nil
-
1
const :store_path, T.nilable(String), default: nil
-
1
const :extension, T.nilable(String), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::FileId] = file_id
-
h[LogField::Uploader] = uploader unless uploader.nil?
-
h[LogField::Model] = model unless model.nil?
-
h[LogField::MountPoint] = mount_point unless mount_point.nil?
-
h[LogField::Version] = version unless version.nil?
-
h[LogField::StorePath] = store_path unless store_path.nil?
-
h[LogField::Extension] = extension unless extension.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class CarrierWave
-
1
class Download < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::CarrierWave, default: Source::CarrierWave
-
1
const :event, Event, default: Event::Download
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :storage, Symbol
-
1
const :file_id, String
-
1
const :uploader, T.nilable(String), default: nil
-
1
const :model, T.nilable(String), default: nil
-
1
const :mount_point, T.nilable(String), default: nil
-
1
const :version, T.nilable(String), default: nil
-
1
const :store_path, T.nilable(String), default: nil
-
1
const :extension, T.nilable(String), default: nil
-
-
# Event-specific fields
-
1
const :filename, T.nilable(String), default: nil
-
1
const :mime_type, T.nilable(String), default: nil
-
1
const :size, T.nilable(Integer), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::FileId] = file_id
-
h[LogField::Uploader] = uploader unless uploader.nil?
-
h[LogField::Model] = model unless model.nil?
-
h[LogField::MountPoint] = mount_point unless mount_point.nil?
-
h[LogField::Version] = version unless version.nil?
-
h[LogField::StorePath] = store_path unless store_path.nil?
-
h[LogField::Extension] = extension unless extension.nil?
-
h[LogField::Filename] = filename unless filename.nil?
-
h[LogField::MimeType] = mime_type unless mime_type.nil?
-
h[LogField::Size] = size unless size.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class CarrierWave
-
1
class Upload < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::CarrierWave, default: Source::CarrierWave
-
1
const :event, Event, default: Event::Upload
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :storage, Symbol
-
1
const :file_id, String
-
1
const :uploader, T.nilable(String), default: nil
-
1
const :model, T.nilable(String), default: nil
-
1
const :mount_point, T.nilable(String), default: nil
-
1
const :version, T.nilable(String), default: nil
-
1
const :store_path, T.nilable(String), default: nil
-
1
const :extension, T.nilable(String), default: nil
-
-
# Event-specific fields
-
1
const :filename, T.nilable(String), default: nil
-
1
const :mime_type, T.nilable(String), default: nil
-
1
const :size, T.nilable(Integer), default: nil
-
1
const :metadata, T.nilable(T::Hash[String, T.untyped]), default: nil
-
1
const :duration_ms, T.nilable(Float), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::FileId] = file_id
-
h[LogField::Uploader] = uploader unless uploader.nil?
-
h[LogField::Model] = model unless model.nil?
-
h[LogField::MountPoint] = mount_point unless mount_point.nil?
-
h[LogField::Version] = version unless version.nil?
-
h[LogField::StorePath] = store_path unless store_path.nil?
-
h[LogField::Extension] = extension unless extension.nil?
-
h[LogField::Filename] = filename unless filename.nil?
-
h[LogField::MimeType] = mime_type unless mime_type.nil?
-
h[LogField::Size] = size unless size.nil?
-
h[LogField::Metadata] = metadata unless metadata.nil?
-
h[LogField::DurationMs] = duration_ms unless duration_ms.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
-
-
1
require_relative "dotenv/load"
-
1
require_relative "dotenv/update"
-
1
require_relative "dotenv/save"
-
1
require_relative "dotenv/restore"
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Dotenv
-
1
class Load < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Dotenv, default: Source::Dotenv
-
1
const :event, Event, default: Event::Load
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :file, String
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::File] = file
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Dotenv
-
1
class Restore < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Dotenv, default: Source::Dotenv
-
1
const :event, Event, default: Event::Restore
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :vars, T::Array[String]
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Vars] = vars
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Dotenv
-
1
class Save < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Dotenv, default: Source::Dotenv
-
1
const :event, Event, default: Event::Save
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :snapshot, T::Boolean
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Snapshot] = snapshot
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Dotenv
-
1
class Update < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Dotenv, default: Source::Dotenv
-
1
const :event, Event, default: Event::Update
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :vars, T::Array[String]
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Vars] = vars
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../enums/source"
-
1
require_relative "../enums/event"
-
1
require_relative "../enums/level"
-
1
require_relative "../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Error < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source
-
1
const :event, Event, default: Event::Error
-
4
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :error_class, T.class_of(StandardError)
-
1
const :message, String
-
1
const :backtrace, T.nilable(T::Array[String]), default: nil
-
-
# Additional data
-
1
include LogStruct::Log::Interfaces::AdditionalDataField
-
1
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
-
1
include LogStruct::Log::Shared::MergeAdditionalDataFields
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
7
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
7
h[LogField::ErrorClass] = error_class
-
7
h[LogField::Message] = message
-
7
h[LogField::Backtrace] = backtrace unless backtrace.nil?
-
7
h
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
-
-
1
require_relative "good_job/log"
-
1
require_relative "good_job/enqueue"
-
1
require_relative "good_job/start"
-
1
require_relative "good_job/finish"
-
1
require_relative "good_job/error"
-
1
require_relative "good_job/schedule"
-
-
1
module LogStruct
-
1
module Log
-
1
class GoodJob
-
1
class BaseFields < T::Struct
-
1
extend T::Sig
-
1
const :job_id, T.nilable(String), default: nil
-
1
const :job_class, T.nilable(String), default: nil
-
1
const :queue_name, T.nilable(Symbol), default: nil
-
1
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
-
1
const :executions, T.nilable(Integer), default: nil
-
-
1
Kwargs = T.type_alias do
-
{
-
1
job_id: T.nilable(String),
-
job_class: T.nilable(String),
-
queue_name: T.nilable(Symbol),
-
arguments: T.nilable(T::Array[T.untyped]),
-
executions: T.nilable(Integer)
-
}
-
end
-
-
2
sig { returns(Kwargs) }
-
1
def to_kwargs
-
{
-
7
job_id: job_id,
-
job_class: job_class,
-
queue_name: queue_name,
-
arguments: arguments,
-
executions: executions
-
}
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class GoodJob
-
1
class Enqueue < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Job, default: Source::Job
-
1
const :event, Event, default: Event::Enqueue
-
3
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :job_id, T.nilable(String), default: nil
-
1
const :job_class, T.nilable(String), default: nil
-
1
const :queue_name, T.nilable(Symbol), default: nil
-
1
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
-
1
const :executions, T.nilable(Integer), default: nil
-
-
# Event-specific fields
-
1
const :duration_ms, Float
-
1
const :scheduled_at, T.nilable(Time), default: nil
-
1
const :enqueue_caller, T.nilable(String), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
4
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
4
h[LogField::JobId] = job_id unless job_id.nil?
-
4
h[LogField::JobClass] = job_class unless job_class.nil?
-
4
h[LogField::QueueName] = queue_name unless queue_name.nil?
-
4
h[LogField::Arguments] = arguments unless arguments.nil?
-
4
h[LogField::Executions] = executions unless executions.nil?
-
4
h[LogField::DurationMs] = duration_ms
-
4
h[LogField::ScheduledAt] = scheduled_at unless scheduled_at.nil?
-
4
h[LogField::EnqueueCaller] = enqueue_caller unless enqueue_caller.nil?
-
4
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class GoodJob
-
1
class Error < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Job, default: Source::Job
-
1
const :event, Event, default: Event::Error
-
3
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :job_id, T.nilable(String), default: nil
-
1
const :job_class, T.nilable(String), default: nil
-
1
const :queue_name, T.nilable(Symbol), default: nil
-
1
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
-
1
const :executions, T.nilable(Integer), default: nil
-
-
# Event-specific fields
-
1
const :error_class, String
-
1
const :error_message, String
-
1
const :duration_ms, T.nilable(Float), default: nil
-
1
const :process_id, Integer
-
1
const :thread_id, String
-
1
const :exception_executions, T.nilable(Integer), default: nil
-
1
const :backtrace, T.nilable(T::Array[String]), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
4
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
4
h[LogField::JobId] = job_id unless job_id.nil?
-
4
h[LogField::JobClass] = job_class unless job_class.nil?
-
4
h[LogField::QueueName] = queue_name unless queue_name.nil?
-
4
h[LogField::Arguments] = arguments unless arguments.nil?
-
4
h[LogField::Executions] = executions unless executions.nil?
-
4
h[LogField::ErrorClass] = error_class
-
4
h[LogField::ErrorMessage] = error_message
-
4
h[LogField::DurationMs] = duration_ms unless duration_ms.nil?
-
4
h[LogField::ProcessId] = process_id
-
4
h[LogField::ThreadId] = thread_id
-
4
h[LogField::ExceptionExecutions] = exception_executions unless exception_executions.nil?
-
4
h[LogField::Backtrace] = backtrace unless backtrace.nil?
-
4
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class GoodJob
-
1
class Finish < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Job, default: Source::Job
-
1
const :event, Event, default: Event::Finish
-
4
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :job_id, T.nilable(String), default: nil
-
1
const :job_class, T.nilable(String), default: nil
-
1
const :queue_name, T.nilable(Symbol), default: nil
-
1
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
-
1
const :executions, T.nilable(Integer), default: nil
-
-
# Event-specific fields
-
1
const :duration_ms, Float
-
1
const :finished_at, Time
-
1
const :process_id, Integer
-
1
const :thread_id, String
-
1
const :result, T.nilable(String), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
3
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
3
h[LogField::JobId] = job_id unless job_id.nil?
-
3
h[LogField::JobClass] = job_class unless job_class.nil?
-
3
h[LogField::QueueName] = queue_name unless queue_name.nil?
-
3
h[LogField::Arguments] = arguments unless arguments.nil?
-
3
h[LogField::Executions] = executions unless executions.nil?
-
3
h[LogField::DurationMs] = duration_ms
-
3
h[LogField::FinishedAt] = finished_at
-
3
h[LogField::ProcessId] = process_id
-
3
h[LogField::ThreadId] = thread_id
-
3
h[LogField::Result] = result unless result.nil?
-
3
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class GoodJob
-
1
class Log < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Job, default: Source::Job
-
1
const :event, Event, default: Event::Log
-
16
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :job_id, T.nilable(String), default: nil
-
1
const :job_class, T.nilable(String), default: nil
-
1
const :queue_name, T.nilable(Symbol), default: nil
-
1
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
-
1
const :executions, T.nilable(Integer), default: nil
-
-
# Event-specific fields
-
1
const :message, String
-
1
const :process_id, Integer
-
1
const :thread_id, String
-
1
const :scheduled_at, T.nilable(Time), default: nil
-
1
const :priority, T.nilable(Integer), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
21
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
21
h[LogField::JobId] = job_id unless job_id.nil?
-
21
h[LogField::JobClass] = job_class unless job_class.nil?
-
21
h[LogField::QueueName] = queue_name unless queue_name.nil?
-
21
h[LogField::Arguments] = arguments unless arguments.nil?
-
21
h[LogField::Executions] = executions unless executions.nil?
-
21
h[LogField::Message] = message
-
21
h[LogField::ProcessId] = process_id
-
21
h[LogField::ThreadId] = thread_id
-
21
h[LogField::ScheduledAt] = scheduled_at unless scheduled_at.nil?
-
21
h[LogField::Priority] = priority unless priority.nil?
-
21
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class GoodJob
-
1
class Schedule < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Job, default: Source::Job
-
1
const :event, Event, default: Event::Schedule
-
3
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :job_id, T.nilable(String), default: nil
-
1
const :job_class, T.nilable(String), default: nil
-
1
const :queue_name, T.nilable(Symbol), default: nil
-
1
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
-
1
const :executions, T.nilable(Integer), default: nil
-
-
# Event-specific fields
-
1
const :duration_ms, Float
-
1
const :scheduled_at, Time
-
1
const :priority, T.nilable(Integer), default: nil
-
1
const :cron_key, T.nilable(String), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
2
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
2
h[LogField::JobId] = job_id unless job_id.nil?
-
2
h[LogField::JobClass] = job_class unless job_class.nil?
-
2
h[LogField::QueueName] = queue_name unless queue_name.nil?
-
2
h[LogField::Arguments] = arguments unless arguments.nil?
-
2
h[LogField::Executions] = executions unless executions.nil?
-
2
h[LogField::DurationMs] = duration_ms
-
2
h[LogField::ScheduledAt] = scheduled_at
-
2
h[LogField::Priority] = priority unless priority.nil?
-
2
h[LogField::CronKey] = cron_key unless cron_key.nil?
-
2
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class GoodJob
-
1
class Start < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Job, default: Source::Job
-
1
const :event, Event, default: Event::Start
-
5
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :job_id, T.nilable(String), default: nil
-
1
const :job_class, T.nilable(String), default: nil
-
1
const :queue_name, T.nilable(Symbol), default: nil
-
1
const :arguments, T.nilable(T::Array[T.untyped]), default: nil
-
1
const :executions, T.nilable(Integer), default: nil
-
-
# Event-specific fields
-
1
const :process_id, Integer
-
1
const :thread_id, String
-
1
const :wait_ms, T.nilable(Float), default: nil
-
1
const :scheduled_at, T.nilable(Time), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
2
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
2
h[LogField::JobId] = job_id unless job_id.nil?
-
2
h[LogField::JobClass] = job_class unless job_class.nil?
-
2
h[LogField::QueueName] = queue_name unless queue_name.nil?
-
2
h[LogField::Arguments] = arguments unless arguments.nil?
-
2
h[LogField::Executions] = executions unless executions.nil?
-
2
h[LogField::ProcessId] = process_id
-
2
h[LogField::ThreadId] = thread_id
-
2
h[LogField::WaitMs] = wait_ms unless wait_ms.nil?
-
2
h[LogField::ScheduledAt] = scheduled_at unless scheduled_at.nil?
-
2
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "log_struct/shared/interfaces/public_common_fields"
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../enums/source"
-
1
require_relative "../enums/event"
-
1
require_relative "../enums/level"
-
1
require_relative "../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Plain < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::App, default: Source::App
-
1
const :event, Event, default: Event::Log
-
11
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :message, T.untyped
-
-
# Additional data
-
1
include LogStruct::Log::Interfaces::AdditionalDataField
-
1
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
-
1
include LogStruct::Log::Shared::MergeAdditionalDataFields
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
735
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
735
h[LogField::Message] = message
-
735
h
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
-
-
1
require_relative "puma/start"
-
1
require_relative "puma/shutdown"
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Puma
-
1
class Shutdown < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Puma, default: Source::Puma
-
1
const :event, Event, default: Event::Shutdown
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :process_id, T.nilable(Integer), default: nil
-
-
# Additional data
-
1
include LogStruct::Log::Interfaces::AdditionalDataField
-
1
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
-
1
include LogStruct::Log::Shared::MergeAdditionalDataFields
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::ProcessId] = process_id unless process_id.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Puma
-
1
class Start < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Puma, default: Source::Puma
-
1
const :event, Event, default: Event::Start
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :mode, T.nilable(String), default: nil
-
1
const :puma_version, T.nilable(String), default: nil
-
1
const :puma_codename, T.nilable(String), default: nil
-
1
const :ruby_version, T.nilable(String), default: nil
-
1
const :min_threads, T.nilable(Integer), default: nil
-
1
const :max_threads, T.nilable(Integer), default: nil
-
1
const :environment, T.nilable(String), default: nil
-
1
const :process_id, T.nilable(Integer), default: nil
-
1
const :listening_addresses, T.nilable(T::Array[String]), default: nil
-
-
# Additional data
-
1
include LogStruct::Log::Interfaces::AdditionalDataField
-
1
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
-
1
include LogStruct::Log::Shared::MergeAdditionalDataFields
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Mode] = mode unless mode.nil?
-
h[LogField::PumaVersion] = puma_version unless puma_version.nil?
-
h[LogField::PumaCodename] = puma_codename unless puma_codename.nil?
-
h[LogField::RubyVersion] = ruby_version unless ruby_version.nil?
-
h[LogField::MinThreads] = min_threads unless min_threads.nil?
-
h[LogField::MaxThreads] = max_threads unless max_threads.nil?
-
h[LogField::Environment] = environment unless environment.nil?
-
h[LogField::ProcessId] = process_id unless process_id.nil?
-
h[LogField::ListeningAddresses] = listening_addresses unless listening_addresses.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../enums/source"
-
1
require_relative "../enums/event"
-
1
require_relative "../enums/level"
-
1
require_relative "../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Request < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Rails, default: Source::Rails
-
1
const :event, Event, default: Event::Request
-
2
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :path, T.nilable(String), default: nil
-
1
const :http_method, T.nilable(String), default: nil
-
1
const :source_ip, T.nilable(String), default: nil
-
1
const :user_agent, T.nilable(String), default: nil
-
1
const :referer, T.nilable(String), default: nil
-
1
const :request_id, T.nilable(String), default: nil
-
-
# Event-specific fields
-
1
const :format, T.nilable(Symbol), default: nil
-
1
const :controller, T.nilable(String), default: nil
-
1
const :action, T.nilable(String), default: nil
-
1
const :status, T.nilable(Integer), default: nil
-
1
const :duration_ms, T.nilable(Float), default: nil
-
1
const :view, T.nilable(Float), default: nil
-
1
const :database, T.nilable(Float), default: nil
-
1
const :params, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
-
-
# Request fields (optional)
-
1
include LogStruct::Log::Interfaces::RequestFields
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
1
include LogStruct::Log::Shared::AddRequestFields
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
3
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
3
h[LogField::Path] = path unless path.nil?
-
3
h[LogField::HttpMethod] = http_method unless http_method.nil?
-
3
h[LogField::SourceIp] = source_ip unless source_ip.nil?
-
3
h[LogField::UserAgent] = user_agent unless user_agent.nil?
-
3
h[LogField::Referer] = referer unless referer.nil?
-
3
h[LogField::RequestId] = request_id unless request_id.nil?
-
3
h[LogField::Format] = format unless format.nil?
-
3
h[LogField::Controller] = controller unless controller.nil?
-
3
h[LogField::Action] = action unless action.nil?
-
3
h[LogField::Status] = status unless status.nil?
-
3
h[LogField::DurationMs] = duration_ms unless duration_ms.nil?
-
3
h[LogField::View] = view unless view.nil?
-
3
h[LogField::Database] = database unless database.nil?
-
3
h[LogField::Params] = params unless params.nil?
-
3
h
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
-
-
1
require_relative "security/ip_spoof"
-
1
require_relative "security/csrf_violation"
-
1
require_relative "security/blocked_host"
-
-
1
module LogStruct
-
1
module Log
-
1
class Security
-
1
class BaseFields < T::Struct
-
1
extend T::Sig
-
1
const :path, T.nilable(String), default: nil
-
1
const :http_method, T.nilable(String), default: nil
-
1
const :source_ip, T.nilable(String), default: nil
-
1
const :user_agent, T.nilable(String), default: nil
-
1
const :referer, T.nilable(String), default: nil
-
1
const :request_id, T.nilable(String), default: nil
-
-
1
Kwargs = T.type_alias do
-
{
-
path: T.nilable(String),
-
http_method: T.nilable(String),
-
source_ip: T.nilable(String),
-
user_agent: T.nilable(String),
-
referer: T.nilable(String),
-
request_id: T.nilable(String)
-
}
-
end
-
-
1
sig { returns(Kwargs) }
-
1
def to_kwargs
-
{
-
path: path,
-
http_method: http_method,
-
source_ip: source_ip,
-
user_agent: user_agent,
-
referer: referer,
-
request_id: request_id
-
}
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Security
-
1
class BlockedHost < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Security, default: Source::Security
-
1
const :event, Event, default: Event::BlockedHost
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :path, T.nilable(String), default: nil
-
1
const :http_method, T.nilable(String), default: nil
-
1
const :source_ip, T.nilable(String), default: nil
-
1
const :user_agent, T.nilable(String), default: nil
-
1
const :referer, T.nilable(String), default: nil
-
1
const :request_id, T.nilable(String), default: nil
-
-
# Event-specific fields
-
1
const :message, T.nilable(String), default: nil
-
1
const :blocked_host, T.nilable(String), default: nil
-
1
const :blocked_hosts, T.nilable(T::Array[String]), default: nil
-
1
const :x_forwarded_for, T.nilable(String), default: nil
-
1
const :allowed_hosts, T.nilable(T::Array[String]), default: nil
-
1
const :allow_ip_hosts, T.nilable(T::Boolean), default: nil
-
-
# Additional data
-
1
include LogStruct::Log::Interfaces::AdditionalDataField
-
1
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
-
1
include LogStruct::Log::Shared::MergeAdditionalDataFields
-
-
# Request fields (optional)
-
1
include LogStruct::Log::Interfaces::RequestFields
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
1
include LogStruct::Log::Shared::AddRequestFields
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Path] = path unless path.nil?
-
h[LogField::HttpMethod] = http_method unless http_method.nil?
-
h[LogField::SourceIp] = source_ip unless source_ip.nil?
-
h[LogField::UserAgent] = user_agent unless user_agent.nil?
-
h[LogField::Referer] = referer unless referer.nil?
-
h[LogField::RequestId] = request_id unless request_id.nil?
-
h[LogField::Message] = message unless message.nil?
-
h[LogField::BlockedHost] = blocked_host unless blocked_host.nil?
-
h[LogField::BlockedHosts] = blocked_hosts unless blocked_hosts.nil?
-
h[LogField::XForwardedFor] = x_forwarded_for unless x_forwarded_for.nil?
-
h[LogField::AllowedHosts] = allowed_hosts unless allowed_hosts.nil?
-
h[LogField::AllowIpHosts] = allow_ip_hosts unless allow_ip_hosts.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Security
-
1
class CSRFViolation < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Security, default: Source::Security
-
1
const :event, Event, default: Event::CSRFViolation
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :path, T.nilable(String), default: nil
-
1
const :http_method, T.nilable(String), default: nil
-
1
const :source_ip, T.nilable(String), default: nil
-
1
const :user_agent, T.nilable(String), default: nil
-
1
const :referer, T.nilable(String), default: nil
-
1
const :request_id, T.nilable(String), default: nil
-
-
# Event-specific fields
-
1
const :message, T.nilable(String), default: nil
-
-
# Additional data
-
1
include LogStruct::Log::Interfaces::AdditionalDataField
-
1
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
-
1
include LogStruct::Log::Shared::MergeAdditionalDataFields
-
-
# Request fields (optional)
-
1
include LogStruct::Log::Interfaces::RequestFields
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
1
include LogStruct::Log::Shared::AddRequestFields
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Path] = path unless path.nil?
-
h[LogField::HttpMethod] = http_method unless http_method.nil?
-
h[LogField::SourceIp] = source_ip unless source_ip.nil?
-
h[LogField::UserAgent] = user_agent unless user_agent.nil?
-
h[LogField::Referer] = referer unless referer.nil?
-
h[LogField::RequestId] = request_id unless request_id.nil?
-
h[LogField::Message] = message unless message.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Security
-
1
class IPSpoof < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Security, default: Source::Security
-
1
const :event, Event, default: Event::IPSpoof
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
1
const :path, T.nilable(String), default: nil
-
1
const :http_method, T.nilable(String), default: nil
-
1
const :source_ip, T.nilable(String), default: nil
-
1
const :user_agent, T.nilable(String), default: nil
-
1
const :referer, T.nilable(String), default: nil
-
1
const :request_id, T.nilable(String), default: nil
-
-
# Event-specific fields
-
1
const :message, T.nilable(String), default: nil
-
1
const :client_ip, T.nilable(String), default: nil
-
1
const :x_forwarded_for, T.nilable(String), default: nil
-
-
# Additional data
-
1
include LogStruct::Log::Interfaces::AdditionalDataField
-
1
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
-
1
include LogStruct::Log::Shared::MergeAdditionalDataFields
-
-
# Request fields (optional)
-
1
include LogStruct::Log::Interfaces::RequestFields
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
1
include LogStruct::Log::Shared::AddRequestFields
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Path] = path unless path.nil?
-
h[LogField::HttpMethod] = http_method unless http_method.nil?
-
h[LogField::SourceIp] = source_ip unless source_ip.nil?
-
h[LogField::UserAgent] = user_agent unless user_agent.nil?
-
h[LogField::Referer] = referer unless referer.nil?
-
h[LogField::RequestId] = request_id unless request_id.nil?
-
h[LogField::Message] = message unless message.nil?
-
h[LogField::ClientIp] = client_ip unless client_ip.nil?
-
h[LogField::XForwardedFor] = x_forwarded_for unless x_forwarded_for.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/source_parent.rb.erb
-
-
1
require_relative "shrine/upload"
-
1
require_relative "shrine/download"
-
1
require_relative "shrine/delete"
-
1
require_relative "shrine/metadata"
-
1
require_relative "shrine/exist"
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Shrine
-
1
class Delete < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Shrine, default: Source::Shrine
-
1
const :event, Event, default: Event::Delete
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :storage, Symbol
-
1
const :location, String
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::Location] = location
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Shrine
-
1
class Download < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Shrine, default: Source::Shrine
-
1
const :event, Event, default: Event::Download
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :storage, Symbol
-
1
const :location, String
-
1
const :download_options, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::Location] = location
-
h[LogField::DownloadOptions] = download_options unless download_options.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Shrine
-
1
class Exist < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Shrine, default: Source::Shrine
-
1
const :event, Event, default: Event::Exist
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :storage, Symbol
-
1
const :location, String
-
1
const :exist, T.nilable(T::Boolean), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::Location] = location
-
h[LogField::Exist] = exist unless exist.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Shrine
-
1
class Metadata < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Shrine, default: Source::Shrine
-
1
const :event, Event, default: Event::Metadata
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :storage, Symbol
-
1
const :location, T.nilable(String), default: nil
-
1
const :metadata, T.nilable(T::Hash[String, T.untyped]), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::Location] = location unless location.nil?
-
h[LogField::Metadata] = metadata unless metadata.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
1
require_relative "../../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Shrine
-
1
class Upload < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Shrine, default: Source::Shrine
-
1
const :event, Event, default: Event::Upload
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :storage, Symbol
-
1
const :location, String
-
1
const :upload_options, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
-
1
const :options, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
-
1
const :uploader, T.nilable(String), default: nil
-
1
const :duration_ms, T.nilable(Float), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Storage] = storage
-
h[LogField::Location] = location
-
h[LogField::UploadOptions] = upload_options unless upload_options.nil?
-
h[LogField::Options] = options unless options.nil?
-
h[LogField::Uploader] = uploader unless uploader.nil?
-
h[LogField::DurationMs] = duration_ms unless duration_ms.nil?
-
h
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../enums/source"
-
1
require_relative "../enums/event"
-
1
require_relative "../enums/level"
-
1
require_relative "../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class Sidekiq < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::Sidekiq, default: Source::Sidekiq
-
1
const :event, Event, default: Event::Log
-
1
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :message, T.nilable(String), default: nil
-
1
const :context, T.nilable(T::Hash[Symbol, T.untyped]), default: nil
-
1
const :process_id, T.nilable(Integer), default: nil
-
1
const :thread_id, T.nilable(T.any(Integer, String)), default: nil
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
1
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
h[LogField::Message] = message unless message.nil?
-
h[LogField::Context] = context unless context.nil?
-
h[LogField::ProcessId] = process_id unless process_id.nil?
-
h[LogField::ThreadId] = thread_id unless thread_id.nil?
-
h
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# AUTO-GENERATED: DO NOT EDIT
-
# Generated by scripts/generate_structs.rb
-
# Schemas dir: schemas/log_sources/
-
# Template: tools/codegen/templates/sorbet/event.rb.erb
-
-
1
require "log_struct/shared/interfaces/common_fields"
-
1
require "log_struct/shared/interfaces/additional_data_field"
-
1
require "log_struct/shared/interfaces/request_fields"
-
1
require "log_struct/shared/serialize_common"
-
1
require "log_struct/shared/merge_additional_data_fields"
-
1
require "log_struct/shared/add_request_fields"
-
1
require_relative "../enums/source"
-
1
require_relative "../enums/event"
-
1
require_relative "../enums/level"
-
1
require_relative "../enums/log_field"
-
-
1
module LogStruct
-
1
module Log
-
1
class SQL < T::Struct
-
1
extend T::Sig
-
-
# Shared/common fields
-
1
const :source, Source::App, default: Source::App
-
1
const :event, Event, default: Event::Database
-
34
const :timestamp, Time, factory: -> { Time.now }
-
1
const :level, Level, default: Level::Info
-
-
# Event-specific fields
-
1
const :message, String
-
1
const :sql, String
-
1
const :name, String
-
1
const :duration_ms, Float
-
1
const :row_count, T.nilable(Integer), default: nil
-
1
const :adapter, T.nilable(String), default: nil
-
1
const :bind_params, T.nilable(T::Array[T.untyped]), default: nil
-
1
const :database_name, T.nilable(String), default: nil
-
1
const :connection_pool_size, T.nilable(Integer), default: nil
-
1
const :active_connections, T.nilable(Integer), default: nil
-
1
const :operation_type, T.nilable(String), default: nil
-
1
const :table_names, T.nilable(T::Array[String]), default: nil
-
-
# Additional data
-
1
include LogStruct::Log::Interfaces::AdditionalDataField
-
1
const :additional_data, T.nilable(T::Hash[T.any(String, Symbol), T.untyped]), default: nil
-
1
include LogStruct::Log::Shared::MergeAdditionalDataFields
-
-
# Serialize shared fields
-
1
include LogStruct::Log::Interfaces::CommonFields
-
1
include LogStruct::Log::Shared::SerializeCommon
-
-
2
sig { returns(T::Hash[LogStruct::LogField, T.untyped]) }
-
1
def to_h
-
10
h = T.let({}, T::Hash[LogStruct::LogField, T.untyped])
-
10
h[LogField::Message] = message
-
10
h[LogField::Sql] = sql
-
10
h[LogField::Name] = name
-
10
h[LogField::DurationMs] = duration_ms
-
10
h[LogField::RowCount] = row_count unless row_count.nil?
-
10
h[LogField::Adapter] = adapter unless adapter.nil?
-
10
h[LogField::BindParams] = bind_params unless bind_params.nil?
-
10
h[LogField::DatabaseName] = database_name unless database_name.nil?
-
10
h[LogField::ConnectionPoolSize] = connection_pool_size unless connection_pool_size.nil?
-
10
h[LogField::ActiveConnections] = active_connections unless active_connections.nil?
-
10
h[LogField::OperationType] = operation_type unless operation_type.nil?
-
10
h[LogField::TableNames] = table_names unless table_names.nil?
-
10
h
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "active_support/tagged_logging"
-
-
# Monkey-patch ActiveSupport::TaggedLogging::Formatter to support hash inputs
-
# This allows us to pass structured data to the logger and have tags incorporated
-
# directly into the hash instead of being prepended as strings
-
1
module ActiveSupport
-
1
module TaggedLogging
-
1
extend T::Sig
-
-
# Add class-level current_tags method for compatibility with Rails code
-
# that expects to call ActiveSupport::TaggedLogging.current_tags
-
# Use thread-local storage directly like Rails does internally
-
2
sig { returns(T::Array[T.any(String, Symbol)]) }
-
1
def self.current_tags
-
13
Thread.current[:activesupport_tagged_logging_tags] || []
-
end
-
-
1
module FormatterExtension
-
1
extend T::Sig
-
1
extend T::Helpers
-
1
requires_ancestor { ::ActiveSupport::TaggedLogging::Formatter }
-
-
# Override the call method to support hash input/output, and wrap
-
# plain strings in a Hash under a `msg` key.
-
# The data is then passed to our custom log formatter that transforms it
-
# into a JSON string before logging.
-
#
-
# IMPORTANT: This only applies when LogStruct is enabled. When disabled,
-
# we preserve the original Rails logging behavior to avoid wrapping
-
# messages in hashes (which would break default Rails log formatting).
-
2
sig { params(severity: T.any(String, Symbol), time: Time, progname: T.untyped, data: T.untyped).returns(String) }
-
1
def call(severity, time, progname, data)
-
# Skip hash wrapping when LogStruct is disabled to preserve default Rails behavior
-
4
return super unless ::LogStruct.enabled?
-
-
# Convert data to a hash if it's not already one
-
2
data = {message: data.to_s} unless data.is_a?(Hash)
-
-
# Add current tags to the hash if present
-
# Use thread-local storage directly as fallback if current_tags method doesn't exist
-
2
tags = T.unsafe(self).respond_to?(:current_tags) ? current_tags : (Thread.current[:activesupport_tagged_logging_tags] || [])
-
2
data[:tags] = tags if tags.present?
-
-
# Call the original formatter with our enhanced data
-
2
super
-
end
-
end
-
end
-
end
-
-
1
ActiveSupport::TaggedLogging::Formatter.prepend(ActiveSupport::TaggedLogging::FormatterExtension)
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require_relative "enums/error_reporter"
-
1
require_relative "handlers"
-
-
# Try to require all supported error reporting libraries
-
# Users may have multiple installed, so we should load all of them
-
1
%w[sentry-ruby bugsnag rollbar honeybadger].each do |gem_name|
-
4
require gem_name
-
rescue LoadError
-
# If a particular gem is not available, we'll still load the others
-
end
-
-
1
module LogStruct
-
# MultiErrorReporter provides a unified interface for reporting errors to various services.
-
# You can also override this with your own error reporter by setting
-
# LogStruct#.config.error_reporting_handler
-
# NOTE: This is used for cases where an error should be reported
-
# but the operation should be allowed to continue (e.g. scrubbing log data.)
-
1
class MultiErrorReporter
-
# Class variable to store the selected reporter
-
1
class CallableReporterWrapper
-
1
extend T::Sig
-
-
2
sig { params(callable: T.untyped).void }
-
1
def initialize(callable)
-
2
@callable = callable
-
end
-
-
2
sig { returns(T.untyped) }
-
1
attr_reader :callable
-
1
alias_method :original, :callable
-
-
2
sig { params(error: StandardError, context: T.nilable(T::Hash[Symbol, T.untyped]), source: Source).void }
-
1
def call(error, context, source)
-
2
case callable_arity
-
when 3
-
1
callable.call(error, context, source)
-
when 2
-
1
callable.call(error, context)
-
when 1
-
callable.call(error)
-
else
-
callable.call(error, context, source)
-
end
-
end
-
-
1
private
-
-
2
sig { returns(Integer) }
-
1
def callable_arity
-
2
callable.respond_to?(:arity) ? callable.arity : -1
-
end
-
end
-
-
2
ReporterImpl = T.type_alias { T.any(ErrorReporter, CallableReporterWrapper) }
-
-
1
@reporter_impl = T.let(nil, T.nilable(ReporterImpl))
-
-
1
class << self
-
1
extend T::Sig
-
-
2
sig { returns(ReporterImpl) }
-
1
def reporter
-
7
reporter_impl
-
end
-
-
# Set the reporter to use (user-friendly API that accepts symbols)
-
2
sig { params(reporter_type: T.any(ErrorReporter, Symbol, Handlers::ErrorReporter)).returns(ReporterImpl) }
-
1
def reporter=(reporter_type)
-
7
@reporter_impl = case reporter_type
-
when ErrorReporter
-
reporter_type
-
when Symbol
-
5
resolve_symbol_reporter(reporter_type)
-
else
-
2
wrap_callable_reporter(reporter_type)
-
end
-
end
-
-
# Auto-detect which error reporting service to use
-
2
sig { returns(ErrorReporter) }
-
1
def detect_reporter
-
1
if defined?(::Sentry)
-
1
ErrorReporter::Sentry
-
elsif defined?(::Bugsnag)
-
ErrorReporter::Bugsnag
-
elsif defined?(::Rollbar)
-
ErrorReporter::Rollbar
-
elsif defined?(::Honeybadger)
-
ErrorReporter::Honeybadger
-
else
-
ErrorReporter::RailsLogger
-
end
-
end
-
-
# Report an error to the configured error reporting service
-
2
sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
-
1
def report_error(error, context = {})
-
# Call the appropriate reporter method based on what's available
-
8
impl = reporter_impl
-
-
8
case impl
-
when ErrorReporter::Sentry
-
2
report_to_sentry(error, context)
-
when ErrorReporter::Bugsnag
-
1
report_to_bugsnag(error, context)
-
when ErrorReporter::Rollbar
-
1
report_to_rollbar(error, context)
-
when ErrorReporter::Honeybadger
-
1
report_to_honeybadger(error, context)
-
when ErrorReporter::RailsLogger
-
1
fallback_logging(error, context)
-
when CallableReporterWrapper
-
2
impl.call(error, context, Source::Internal)
-
end
-
end
-
-
1
private
-
-
2
sig { returns(ReporterImpl) }
-
1
def reporter_impl
-
15
@reporter_impl ||= detect_reporter
-
end
-
-
2
sig { params(symbol: Symbol).returns(ErrorReporter) }
-
1
def resolve_symbol_reporter(symbol)
-
5
case symbol
-
1
when :sentry then ErrorReporter::Sentry
-
1
when :bugsnag then ErrorReporter::Bugsnag
-
1
when :rollbar then ErrorReporter::Rollbar
-
1
when :honeybadger then ErrorReporter::Honeybadger
-
1
when :rails_logger then ErrorReporter::RailsLogger
-
else
-
valid_types = ErrorReporter.values.map { |v| ":#{v.serialize}" }.join(", ")
-
raise ArgumentError, "Unknown reporter type: #{symbol}. Valid types are: #{valid_types}"
-
end
-
end
-
-
2
sig { params(callable: T.untyped).returns(CallableReporterWrapper) }
-
1
def wrap_callable_reporter(callable)
-
2
unless callable.respond_to?(:call)
-
raise ArgumentError, "Reporter must respond to #call"
-
end
-
-
2
CallableReporterWrapper.new(callable)
-
end
-
-
# Report to Sentry
-
2
sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
-
1
def report_to_sentry(error, context = {})
-
2
return unless defined?(::Sentry)
-
-
# Use the proper Sentry interface defined in the RBI
-
2
::Sentry.capture_exception(error, extra: context)
-
rescue => e
-
1
fallback_logging(e, {original_error: error.class.to_s})
-
end
-
-
# Report to Bugsnag
-
2
sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
-
1
def report_to_bugsnag(error, context = {})
-
1
return unless defined?(::Bugsnag)
-
-
1
::Bugsnag.notify(error) do |report|
-
1
report.add_metadata(:context, context)
-
end
-
rescue => e
-
fallback_logging(e, {original_error: error.class.to_s})
-
end
-
-
# Report to Rollbar
-
2
sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
-
1
def report_to_rollbar(error, context = {})
-
1
return unless defined?(::Rollbar)
-
-
1
::Rollbar.error(error, context)
-
rescue => e
-
fallback_logging(e, {original_error: error.class.to_s})
-
end
-
-
# Report to Honeybadger
-
2
sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
-
1
def report_to_honeybadger(error, context = {})
-
1
return unless defined?(::Honeybadger)
-
-
1
::Honeybadger.notify(error, context: context)
-
rescue => e
-
fallback_logging(e, {original_error: error.class.to_s})
-
end
-
-
# Fallback logging when no error reporting services are available
-
# Uses the LogStruct.error method to properly log the error
-
2
sig { params(error: StandardError, context: T::Hash[T.untyped, T.untyped]).void }
-
1
def fallback_logging(error, context = {})
-
2
return if error.nil?
-
-
# Create a proper error log entry
-
2
error_log = Log.from_exception(Source::Internal, error, context)
-
-
# Use LogStruct.error to properly log the error
-
2
LogStruct.error(error_log)
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "digest"
-
1
require_relative "hash_utils"
-
1
require_relative "config_struct/filters"
-
1
require_relative "enums/source"
-
-
1
module LogStruct
-
# This class contains methods for filtering sensitive data in logs
-
# It is used by Formatter to determine which keys should be filtered
-
1
class ParamFilters
-
1
class << self
-
1
extend T::Sig
-
-
# Check if a key should be filtered based on our defined sensitive keys
-
2
sig { params(key: T.untyped, value: T.untyped).returns(T::Boolean) }
-
1
def should_filter_key?(key, value = nil)
-
4164
filters = LogStruct.config.filters
-
4164
normalized_key = key.to_s
-
4164
normalized_symbol = normalized_key.downcase.to_sym
-
-
4164
return true if filters.filter_keys.include?(normalized_symbol)
-
-
4159
filters.filter_matchers.any? do |matcher|
-
1
matcher.matches?(normalized_key, value)
-
rescue => e
-
handle_filter_matcher_error(e, matcher, normalized_key)
-
false
-
end
-
end
-
-
# Check if a key should be hashed rather than completely filtered
-
2
sig { params(key: T.untyped).returns(T::Boolean) }
-
1
def should_include_string_hash?(key)
-
6
LogStruct.config.filters.filter_keys_with_hashes.include?(key.to_s.downcase.to_sym)
-
end
-
-
# Convert a value to a filtered summary hash (e.g. { _filtered: { class: "String", ... }})
-
2
sig { params(key: T.untyped, data: T.untyped).returns(T::Hash[Symbol, T.untyped]) }
-
1
def summarize_json_attribute(key, data)
-
7
case data
-
when Hash
-
1
summarize_hash(data)
-
when Array
-
1
summarize_array(data)
-
when String
-
4
summarize_string(data, should_include_string_hash?(key))
-
else
-
1
{_class: data.class}
-
end
-
end
-
-
# Summarize a String for logging, including details and an SHA256 hash (if configured)
-
2
sig { params(string: String, include_hash: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
-
1
def summarize_string(string, include_hash)
-
filtered_string = {
-
6
_class: String
-
}
-
6
if include_hash
-
2
filtered_string[:_hash] = HashUtils.hash_value(string)
-
else
-
4
filtered_string[:_bytes] = string.bytesize
-
end
-
-
6
filtered_string
-
end
-
-
# Summarize a Hash for logging, including details about the size and keys
-
2
sig { params(hash: T::Hash[T.untyped, T.untyped]).returns(T::Hash[Symbol, T.untyped]) }
-
1
def summarize_hash(hash)
-
4
return {_class: "Hash", _empty: true} if hash.empty?
-
-
# Don't include byte size if hash contains any filtered keys
-
4
has_sensitive_keys = T.let(false, T::Boolean)
-
4
normalized_keys = []
-
-
4
hash.each do |key, value|
-
6
has_sensitive_keys ||= should_filter_key?(key, value)
-
6
normalized_keys << normalize_summary_key(key)
-
end
-
-
summary = {
-
4
_class: Hash,
-
_keys_count: hash.keys.size,
-
_keys: normalized_keys.take(10)
-
}
-
-
# Only add byte size if no sensitive keys are present
-
4
summary[:_bytes] = hash.to_json.bytesize unless has_sensitive_keys
-
-
4
summary
-
end
-
-
# Summarize an Array for logging, including details about the size and items
-
2
sig { params(array: T::Array[T.untyped]).returns(T::Hash[Symbol, T.untyped]) }
-
1
def summarize_array(array)
-
3
return {_class: "Array", _empty: true} if array.empty?
-
-
{
-
2
_class: Array,
-
_count: array.size,
-
_bytes: array.to_json.bytesize
-
}
-
end
-
-
1
private
-
-
2
sig { params(key: T.any(String, Symbol, Integer, T.untyped)).returns(T.any(Symbol, String)) }
-
1
def normalize_summary_key(key)
-
6
if key.is_a?(Symbol)
-
5
key
-
1
elsif key.respond_to?(:to_sym)
-
key.to_sym
-
else
-
1
key.to_s
-
end
-
rescue
-
key.to_s
-
end
-
-
1
sig { params(error: StandardError, matcher: ConfigStruct::FilterMatcher, key: String).void }
-
1
def handle_filter_matcher_error(error, matcher, key)
-
context = {
-
matcher: matcher.label,
-
key: key
-
}
-
LogStruct.handle_exception(error, source: Source::Internal, context: context)
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "sorbet-runtime"
-
-
1
module LogStruct
-
1
module RailsBootBannerSilencer
-
1
extend T::Sig
-
-
1
@installed = T.let(false, T::Boolean)
-
-
2
sig { void }
-
1
def self.install!
-
2
return if @installed
-
2
@installed = true
-
-
2
return unless ARGV.include?("server")
-
patch!
-
end
-
-
2
sig { returns(T::Boolean) }
-
1
def self.patch!
-
begin
-
2
require "rails/command"
-
1
require "rails/commands/server/server_command"
-
rescue LoadError
-
# Best-effort – if Rails isn't available yet we'll try again later
-
1
return false
-
end
-
-
1
server_command = T.let(nil, T.untyped)
-
# rubocop:disable Sorbet/ConstantsFromStrings
-
begin
-
1
server_command = ::Object.const_get("Rails::Command::ServerCommand")
-
rescue NameError
-
server_command = nil
-
end
-
# rubocop:enable Sorbet/ConstantsFromStrings
-
1
return false unless server_command
-
-
1
patch_server_command(server_command)
-
1
true
-
end
-
-
2
sig { params(server_command: T.untyped).void }
-
1
def self.patch_server_command(server_command)
-
6
return if server_command <= ServerCommandSilencer
-
-
5
server_command.prepend(ServerCommandSilencer)
-
end
-
-
1
module ServerCommandSilencer
-
1
extend T::Sig
-
-
2
sig { params(args: T.untyped, block: T.nilable(T.proc.returns(T.untyped))).returns(T.untyped) }
-
1
def perform(*args, &block)
-
1
::LogStruct.server_mode = true
-
1
super
-
end
-
-
2
sig { params(server: T.untyped, url: T.nilable(String)).void }
-
1
def print_boot_information(server, url)
-
2
::LogStruct.server_mode = true
-
2
consume_boot_banner(server, url)
-
end
-
-
1
private
-
-
2
sig { params(server: T.untyped, url: T.nilable(String)).void }
-
1
def consume_boot_banner(server, url)
-
2
return unless defined?(::LogStruct::Integrations::Puma)
-
-
begin
-
2
::LogStruct::Integrations::Puma.emit_boot_if_needed!
-
rescue => e
-
::LogStruct::Integrations::Puma.handle_integration_error(e)
-
end
-
-
begin
-
2
model = ::ActiveSupport::Inflector.demodulize(server)
-
rescue
-
1
model = "Puma"
-
end
-
-
lines = [
-
2
"=> Booting #{model}",
-
build_rails_banner_line(url),
-
"=> Run `#{lookup_executable} --help` for more startup options"
-
]
-
-
2
lines.each do |line|
-
6
::LogStruct::Integrations::Puma.process_line(line)
-
rescue => e
-
::LogStruct::Integrations::Puma.handle_integration_error(e)
-
end
-
end
-
-
2
sig { params(url: T.nilable(String)).returns(String) }
-
1
def build_rails_banner_line(url)
-
2
suffix = url ? " #{url}" : ""
-
2
"=> Rails #{::Rails.version} application starting in #{::Rails.env}#{suffix}"
-
rescue
-
1
"=> Rails application starting"
-
end
-
-
2
sig { returns(String) }
-
1
def lookup_executable
-
3
return "rails" unless T.unsafe(self).respond_to?(:executable, true)
-
-
2
T.cast(T.unsafe(self).send(:executable), String)
-
rescue
-
1
"rails"
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "rails"
-
1
require "semantic_logger"
-
1
require_relative "formatter"
-
1
require_relative "semantic_logger/setup"
-
1
require_relative "integrations"
-
-
1
module LogStruct
-
# Railtie to integrate with Rails
-
1
class Railtie < ::Rails::Railtie
-
# Configure early, right after logger initialization
-
1
initializer "logstruct.configure_logger", after: :initialize_logger do |app|
-
1
next unless LogStruct.enabled?
-
-
# Use SemanticLogger for powerful logging features
-
1
LogStruct::SemanticLogger::Setup.configure_semantic_logger(app)
-
end
-
-
# Setup all integrations after logger setup is complete
-
1
initializer "logstruct.setup", before: :build_middleware_stack do |app|
-
1
next unless LogStruct.enabled?
-
-
# Merge Rails filter parameters into our filters
-
1
LogStruct.merge_rails_filter_parameters!
-
-
# Set up non-middleware integrations first
-
1
Integrations.setup_integrations(stage: :non_middleware)
-
-
# Note: Host allowances are managed by the test app itself.
-
end
-
-
# Setup middleware integrations during Rails configuration (before middleware stack is built)
-
# Must be done in the Railtie class body, not in an initializer
-
1
initializer "logstruct.configure_middleware", before: :build_middleware_stack do |app|
-
# This runs before middleware stack is frozen, so we can configure it
-
1
next unless LogStruct.enabled?
-
-
1
Integrations.setup_integrations(stage: :middleware)
-
end
-
-
# Emit Puma lifecycle logs when running `rails server`
-
1
initializer "logstruct.puma_lifecycle", after: "logstruct.configure_logger" do
-
1
is_server = ::LogStruct.server_mode?
-
1
next unless is_server
-
begin
-
require "log_struct/log/puma"
-
port = T.let(nil, T.nilable(String))
-
ARGV.each_with_index do |arg, idx|
-
if arg == "-p" || arg == "--port"
-
port = ARGV[idx + 1]
-
break
-
elsif arg.start_with?("--port=")
-
port = arg.split("=", 2)[1]
-
break
-
end
-
end
-
started = LogStruct::Log::Puma::Start.new(
-
mode: "single",
-
environment: (defined?(::Rails) && ::Rails.respond_to?(:env)) ? ::Rails.env : nil,
-
process_id: Process.pid,
-
listening_addresses: port ? ["tcp://127.0.0.1:#{port}"] : nil
-
)
-
begin
-
warn("[logstruct] puma lifecycle init")
-
rescue
-
end
-
LogStruct.info(started)
-
-
at_exit do
-
shutdown = LogStruct::Log::Puma::Shutdown.new(
-
process_id: Process.pid
-
)
-
LogStruct.info(shutdown)
-
end
-
rescue
-
# best-effort
-
end
-
end
-
-
# Delegate integration initializers to Integrations module
-
1
LogStruct::Integrations.setup_initializers(self)
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "semantic_logger"
-
1
require_relative "formatter"
-
-
1
module LogStruct
-
1
module SemanticLogger
-
# Development-Optimized Colorized JSON Formatter
-
#
-
# This formatter extends SemanticLogger's Color formatter to provide beautiful,
-
# readable JSON output in development environments. It significantly improves
-
# the developer experience when working with structured logs.
-
#
-
# ## Benefits of Colorized Output:
-
#
-
# ### Readability
-
# - **Syntax highlighting**: JSON keys, values, and data types are color-coded
-
# - **Visual hierarchy**: Different colors help identify structure at a glance
-
# - **Error spotting**: Quickly identify malformed data or unexpected values
-
# - **Context separation**: Log entries are visually distinct from each other
-
#
-
# ### Performance in Development
-
# - **Faster debugging**: Quickly scan logs without reading every character
-
# - **Pattern recognition**: Colors help identify common log patterns
-
# - **Reduced cognitive load**: Less mental effort required to parse log output
-
# - **Improved workflow**: Spend less time reading logs, more time coding
-
#
-
# ### Customization
-
# - **Configurable colors**: Customize colors for keys, strings, numbers, etc.
-
# - **Environment-aware**: Automatically disabled in production/CI environments
-
# - **Fallback support**: Gracefully falls back to standard formatting if needed
-
#
-
# ## Color Mapping:
-
# - **Keys**: Yellow - Easy to spot field names
-
# - **Strings**: Green - Clear indication of text values
-
# - **Numbers**: Blue - Numeric values stand out
-
# - **Booleans**: Magenta - true/false values are distinctive
-
# - **Null**: Red - Missing values are immediately visible
-
# - **Logger names**: Cyan - Source identification
-
#
-
# ## Integration with SemanticLogger:
-
# This formatter preserves all SemanticLogger benefits (performance, threading,
-
# reliability) while adding visual enhancements. It processes LogStruct types,
-
# hashes, and plain messages with appropriate colorization.
-
#
-
# The formatter is automatically enabled in development when `enable_color_output`
-
# is true (default), providing zero-configuration enhanced logging experience.
-
1
class ColorFormatter < ::SemanticLogger::Formatters::Color
-
1
extend T::Sig
-
-
2
sig { params(color_map: T.nilable(T::Hash[Symbol, Symbol]), args: T.untyped).void }
-
1
def initialize(color_map: nil, **args)
-
8
super(**args)
-
8
@logstruct_formatter = T.let(LogStruct::Formatter.new, LogStruct::Formatter)
-
-
# Set up custom color mapping
-
8
@custom_colors = T.let(color_map || default_color_map, T::Hash[Symbol, Symbol])
-
end
-
-
2
sig { override.params(log: ::SemanticLogger::Log, logger: T.untyped).returns(String) }
-
1
def call(log, logger)
-
# Handle LogStruct types specially with colorization
-
99
if log.payload.is_a?(LogStruct::Log::Interfaces::CommonFields)
-
# Get the LogStruct formatted JSON
-
logstruct_json = @logstruct_formatter.call(log.level, log.time, log.name, log.payload)
-
-
# Parse and colorize it
-
begin
-
parsed_data = T.let(JSON.parse(logstruct_json), T::Hash[String, T.untyped])
-
colorized_json = colorize_json(parsed_data)
-
-
# Use SemanticLogger's prefix formatting but with our colorized content
-
prefix = format("%<time>s %<level>s [%<process>s] %<name>s -- ",
-
time: format_time(log.time),
-
level: format_level(log.level),
-
process: log.process_info,
-
name: format_name(log.name))
-
-
"#{prefix}#{colorized_json}\n"
-
rescue JSON::ParserError
-
# Fallback to standard formatting
-
super
-
end
-
99
elsif log.payload.is_a?(Hash) || log.payload.is_a?(T::Struct)
-
# Process hashes through our formatter then colorize
-
begin
-
4
logstruct_json = @logstruct_formatter.call(log.level, log.time, log.name, log.payload)
-
4
parsed_data = T.let(JSON.parse(logstruct_json), T::Hash[String, T.untyped])
-
4
colorized_json = colorize_json(parsed_data)
-
-
4
prefix = format("%<time>s %<level>s [%<process>s] %<name>s -- ",
-
time: format_time(log.time),
-
level: format_level(log.level),
-
process: log.process_info,
-
name: format_name(log.name))
-
-
4
"#{prefix}#{colorized_json}\n"
-
rescue JSON::ParserError
-
# Fallback to standard formatting
-
super
-
end
-
else
-
# For plain messages, use SemanticLogger's default colorization
-
95
super
-
end
-
end
-
-
1
private
-
-
1
sig { returns(LogStruct::Formatter) }
-
1
attr_reader :logstruct_formatter
-
-
# Default color mapping for LogStruct JSON
-
2
sig { returns(T::Hash[Symbol, Symbol]) }
-
1
def default_color_map
-
7
{
-
key: :yellow,
-
string: :green,
-
number: :blue,
-
bool: :magenta,
-
nil: :red,
-
name: :cyan
-
}
-
end
-
-
# Simple JSON colorizer that adds ANSI codes
-
2
sig { params(data: T::Hash[String, T.untyped]).returns(String) }
-
1
def colorize_json(data)
-
# For now, just return a simple colorized version of the JSON
-
# This is much simpler than the full recursive approach
-
4
json_str = JSON.pretty_generate(data)
-
-
# Apply basic colorization with regex
-
4
json_str.gsub(/"([^"]+)":/, colorize_text('\1', :key) + ":")
-
.gsub(/: "([^"]*)"/, ": " + colorize_text('\1', :string))
-
.gsub(/: (\d+\.?\d*)/, ": " + colorize_text('\1', :number))
-
.gsub(/: (true|false)/, ": " + colorize_text('\1', :bool))
-
.gsub(": null", ": " + colorize_text("null", :nil))
-
end
-
-
# Add ANSI color codes to text
-
2
sig { params(text: String, color_type: Symbol).returns(String) }
-
1
def colorize_text(text, color_type)
-
20
color = @custom_colors[color_type] || :white
-
20
"\e[#{color_code_for(color)}m#{text}\e[0m"
-
end
-
-
# Format timestamp
-
2
sig { params(time: Time).returns(String) }
-
1
def format_time(time)
-
99
time.strftime("%Y-%m-%d %H:%M:%S.%6N")
-
end
-
-
# Format log level with color
-
2
sig { params(level: T.any(String, Symbol)).returns(String) }
-
1
def format_level(level)
-
4
level_str = level.to_s.upcase[0]
-
4
color = level_color_for(level.to_sym)
-
4
"\e[#{color_code_for(color)}m#{level_str}\e[0m"
-
end
-
-
# Format logger name with color
-
2
sig { params(name: T.nilable(String)).returns(String) }
-
1
def format_name(name)
-
4
return "" unless name
-
4
color = @custom_colors[:name] || :cyan
-
4
"\e[#{color_code_for(color)}m#{name}\e[0m"
-
end
-
-
# Get color for log level
-
2
sig { params(level: Symbol).returns(Symbol) }
-
1
def level_color_for(level)
-
4
case level
-
when :debug then :magenta
-
4
when :info then :cyan
-
when :warn then :yellow
-
when :error then :red
-
when :fatal then :red
-
else :cyan
-
end
-
end
-
-
# Get ANSI color code for color symbol
-
2
sig { params(color: Symbol).returns(String) }
-
1
def color_code_for(color)
-
28
case color
-
when :black then "30"
-
4
when :red then "31"
-
4
when :green then "32"
-
3
when :yellow then "33"
-
4
when :blue then "34"
-
3
when :magenta then "35"
-
8
when :cyan then "36"
-
2
when :white then "37"
-
when :bright_black then "90"
-
when :bright_red then "91"
-
when :bright_green then "92"
-
when :bright_yellow then "93"
-
when :bright_blue then "94"
-
when :bright_magenta then "95"
-
when :bright_cyan then "96"
-
when :bright_white then "97"
-
else "37" # default to white
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
1
module SemanticLogger
-
1
module Concerns
-
1
module LogMethods
-
1
extend T::Sig
-
1
extend T::Helpers
-
1
requires_ancestor { LogStruct::SemanticLogger::Logger }
-
-
# Override log methods to handle LogStruct types and broadcast
-
2
sig { params(message: T.untyped, payload: T.untyped, block: T.nilable(T.proc.returns(String))).returns(T::Boolean) }
-
1
def debug(message = nil, payload = nil, &block)
-
3
instrument_log(message, :debug)
-
3
result = if message.is_a?(LogStruct::Log::Interfaces::CommonFields) || message.is_a?(T::Struct) || message.is_a?(Hash)
-
2
super(nil, payload: message, &block)
-
else
-
1
super
-
end
-
3
broadcasts.each do |logger|
-
1
next unless logger.respond_to?(:debug)
-
1
message.is_a?(String) ? logger.debug(message) : (logger.debug(&block) if block)
-
end
-
3
result
-
end
-
-
2
sig { params(message: T.untyped, payload: T.untyped, block: T.nilable(T.proc.returns(String))).returns(T::Boolean) }
-
1
def info(message = nil, payload = nil, &block)
-
647
instrument_log(message, :info)
-
647
result = if message.is_a?(LogStruct::Log::Interfaces::CommonFields) || message.is_a?(T::Struct) || message.is_a?(Hash)
-
26
super(nil, payload: message, &block)
-
else
-
621
super
-
end
-
647
broadcasts.each do |logger|
-
3
next unless logger.respond_to?(:info)
-
3
message.is_a?(String) ? logger.info(message) : (logger.info(&block) if block)
-
end
-
647
result
-
end
-
-
2
sig { params(message: T.untyped, payload: T.untyped, block: T.nilable(T.proc.returns(String))).returns(T::Boolean) }
-
1
def warn(message = nil, payload = nil, &block)
-
3
instrument_log(message, :warn)
-
3
result = if message.is_a?(LogStruct::Log::Interfaces::CommonFields) || message.is_a?(T::Struct) || message.is_a?(Hash)
-
2
super(nil, payload: message, &block)
-
else
-
1
super
-
end
-
3
broadcasts.each do |logger|
-
1
next unless logger.respond_to?(:warn)
-
1
message.is_a?(String) ? logger.warn(message) : (logger.warn(&block) if block)
-
end
-
3
result
-
end
-
-
2
sig { params(message: T.untyped, payload: T.untyped, block: T.nilable(T.proc.returns(String))).returns(T::Boolean) }
-
1
def error(message = nil, payload = nil, &block)
-
10
instrument_log(message, :error)
-
10
result = if message.is_a?(LogStruct::Log::Interfaces::CommonFields) || message.is_a?(T::Struct) || message.is_a?(Hash)
-
8
super(nil, payload: message, &block)
-
else
-
2
super
-
end
-
10
broadcasts.each do |logger|
-
1
next unless logger.respond_to?(:error)
-
1
message.is_a?(String) ? logger.error(message) : (logger.error(&block) if block)
-
end
-
10
result
-
end
-
-
2
sig { params(message: T.untyped, payload: T.untyped, block: T.nilable(T.proc.returns(String))).returns(T::Boolean) }
-
1
def fatal(message = nil, payload = nil, &block)
-
2
instrument_log(message, :fatal)
-
2
result = if message.is_a?(LogStruct::Log::Interfaces::CommonFields) || message.is_a?(T::Struct) || message.is_a?(Hash)
-
2
super(nil, payload: message, &block)
-
else
-
super
-
end
-
2
broadcasts.each do |logger|
-
next unless logger.respond_to?(:fatal)
-
message.is_a?(String) ? logger.fatal(message) : (logger.fatal(&block) if block)
-
end
-
2
result
-
end
-
-
1
private
-
-
# Instrument log events for subscribers
-
2
sig { params(message: T.untyped, level: Symbol).void }
-
1
def instrument_log(message, level)
-
665
return unless message.is_a?(LogStruct::Log::Interfaces::CommonFields) || message.is_a?(T::Struct)
-
-
33
::ActiveSupport::Notifications.instrument("log.logstruct", log: message, level: level)
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "semantic_logger"
-
1
require_relative "../formatter"
-
-
1
module LogStruct
-
1
module SemanticLogger
-
# High-Performance JSON Formatter with LogStruct Integration
-
#
-
# This formatter extends SemanticLogger's JSON formatter to provide optimal
-
# JSON serialization performance while preserving all LogStruct features
-
# including data filtering, sensitive data scrubbing, and type-safe structures.
-
#
-
# ## Performance Advantages Over Rails Logger:
-
#
-
# ### Serialization Performance
-
# - **Direct JSON generation**: Bypasses intermediate object creation
-
# - **Streaming serialization**: Memory-efficient processing of large objects
-
# - **Type-optimized paths**: Fast serialization for common data types
-
# - **Zero-copy operations**: Minimal memory allocation during serialization
-
#
-
# ### Memory Efficiency
-
# - **Object reuse**: Formatter instances are reused across log calls
-
# - **Lazy evaluation**: Only processes data that will be included in output
-
# - **Efficient buffering**: Optimal buffer sizes for JSON generation
-
# - **Garbage collection friendly**: Minimal object allocation reduces GC pressure
-
#
-
# ### Integration Benefits
-
# - **LogStruct compatibility**: Native support for typed log structures
-
# - **Filter preservation**: Maintains all LogStruct filtering capabilities
-
# - **Scrubbing integration**: Seamless sensitive data scrubbing
-
# - **Error handling**: Robust handling of serialization errors
-
#
-
# ## Feature Preservation:
-
# This formatter maintains full compatibility with LogStruct's features:
-
# - Sensitive data filtering (passwords, tokens, etc.)
-
# - Recursive object scrubbing and processing
-
# - Type-safe log structure handling
-
# - Custom field transformations
-
# - Metadata preservation and enrichment
-
#
-
# ## JSON Output Structure:
-
# The formatter produces consistent, parseable JSON that includes:
-
# - Standard log fields (timestamp, level, message, logger name)
-
# - LogStruct-specific fields (source, event, context)
-
# - SemanticLogger metadata (process ID, thread ID, tags)
-
# - Application-specific payload data
-
#
-
# This combination provides the performance benefits of SemanticLogger with
-
# the structured data benefits of LogStruct, resulting in faster, more
-
# reliable logging for high-traffic applications.
-
1
class Formatter < ::SemanticLogger::Formatters::Json
-
1
extend T::Sig
-
-
2
sig { void }
-
1
def initialize
-
40
super
-
40
@logstruct_formatter = T.let(LogStruct::Formatter.new, LogStruct::Formatter)
-
end
-
-
2
sig { params(log: ::SemanticLogger::Log, logger: T.untyped).returns(String) }
-
1
def call(log, logger)
-
# Handle LogStruct types specially - they get wrapped in payload hash by SemanticLogger
-
769
json = if log.payload.is_a?(Hash) && log.payload[:payload].is_a?(LogStruct::Log::Interfaces::CommonFields)
-
# Use our formatter to process LogStruct types
-
55
@logstruct_formatter.call(log.level, log.time, log.name, log.payload[:payload])
-
714
elsif log.payload.is_a?(LogStruct::Log::Interfaces::CommonFields)
-
# Direct LogStruct (fallback case)
-
@logstruct_formatter.call(log.level, log.time, log.name, log.payload)
-
714
elsif log.payload.is_a?(Hash) && log.payload[:payload].is_a?(T::Struct)
-
# T::Struct wrapped in payload hash
-
1
@logstruct_formatter.call(log.level, log.time, log.name, log.payload[:payload])
-
713
elsif log.payload.is_a?(Hash) || log.payload.is_a?(T::Struct)
-
# Process hashes and T::Structs through our formatter
-
7
@logstruct_formatter.call(log.level, log.time, log.name, log.payload)
-
else
-
# For plain messages, create a Plain log entry
-
706
message_data = log.payload || log.message
-
706
plain_log = ::LogStruct::Log::Plain.new(
-
message: message_data,
-
timestamp: log.time
-
)
-
706
@logstruct_formatter.call(log.level, log.time, log.name, plain_log)
-
end
-
# SemanticLogger appenders typically add their own newline. Avoid double newlines by stripping ours.
-
769
json.end_with?("\n") ? json.chomp : json
-
end
-
-
1
private
-
-
1
sig { returns(LogStruct::Formatter) }
-
1
attr_reader :logstruct_formatter
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "semantic_logger"
-
1
require_relative "concerns/log_methods"
-
-
1
module LogStruct
-
1
module SemanticLogger
-
# High-Performance Logger with LogStruct Integration
-
#
-
# This logger extends SemanticLogger::Logger to provide optimal logging performance
-
# while seamlessly integrating with LogStruct's typed logging system.
-
#
-
# ## Key Benefits Over Rails.logger:
-
#
-
# ### Performance
-
# - **10-100x faster** than Rails' default logger for high-volume applications
-
# - **Non-blocking I/O**: Uses background threads for actual log writes
-
# - **Minimal memory allocation**: Efficient object reuse and zero-copy operations
-
# - **Batched writes**: Reduces system calls by batching multiple log entries
-
#
-
# ### Reliability
-
# - **Thread-safe operations**: Safe for use in multi-threaded environments
-
# - **Error resilience**: Logger failures don't crash your application
-
# - **Graceful fallbacks**: Continues operating even if appenders fail
-
#
-
# ### Features
-
# - **Structured logging**: Native support for LogStruct types and hashes
-
# - **Rich metadata**: Automatic inclusion of process ID, thread ID, timestamps
-
# - **Tagged context**: Hierarchical tagging for request/job tracking
-
# - **Multiple destinations**: Simultaneously log to files, STDOUT, cloud services
-
#
-
# ### Development Experience
-
# - **Colorized output**: Beautiful ANSI-colored logs in development
-
# - **Detailed timing**: Built-in measurement of log processing time
-
# - **Context preservation**: Maintains Rails.logger compatibility
-
#
-
# ## Usage Examples
-
#
-
# The logger automatically handles LogStruct types, hashes, and plain messages:
-
#
-
# ```ruby
-
# logger = LogStruct::SemanticLogger::Logger.new("MyApp")
-
#
-
# # LogStruct typed logging (optimal performance)
-
# log_entry = LogStruct::Log::Plain.new(
-
# message: "User authenticated",
-
# source: LogStruct::Source::App,
-
# event: LogStruct::Event::Security
-
# )
-
# logger.info(log_entry)
-
#
-
# # Hash logging (automatically structured)
-
# logger.info({
-
# action: "user_login",
-
# user_id: 123,
-
# ip_address: "192.168.1.1"
-
# })
-
#
-
# # Plain string logging (backward compatibility)
-
# logger.info("User logged in successfully")
-
# ```
-
#
-
# The logger is a drop-in replacement for Rails.logger and maintains full
-
# API compatibility while providing significantly enhanced performance.
-
1
class Logger < ::SemanticLogger::Logger
-
1
extend T::Sig
-
-
2
sig { params(name: T.any(String, Symbol, Module, T::Class[T.anything]), level: T.nilable(Symbol), filter: T.untyped).void }
-
1
def initialize(name = "Application", level: nil, filter: nil)
-
# SemanticLogger::Logger expects positional arguments, not named arguments
-
41
super(name, level, filter)
-
# T.untyped because users can pass any logger: ::Logger, ActiveSupport::Logger,
-
# custom loggers (FakeLogger in tests), or third-party loggers
-
41
@broadcasts = T.let([], T::Array[T.untyped])
-
# ActiveJob expects logger.formatter to exist and respond to current_tags
-
41
@formatter = T.let(FormatterProxy.new, FormatterProxy)
-
end
-
-
# ActiveSupport::BroadcastLogger compatibility
-
# These methods allow Rails.logger to broadcast to multiple loggers
-
2
sig { returns(T::Array[T.untyped]) }
-
1
attr_reader :broadcasts
-
-
# ActiveJob compatibility - expects logger.formatter.current_tags
-
2
sig { returns(FormatterProxy) }
-
1
attr_reader :formatter
-
-
# T.untyped for logger param because we accept any logger-like object:
-
# ::Logger, ActiveSupport::Logger, test doubles, etc.
-
2
sig { params(logger: T.untyped).returns(T.untyped) }
-
1
def broadcast_to(logger)
-
6
@broadcasts << logger
-
6
logger
-
end
-
-
2
sig { params(logger: T.untyped).void }
-
1
def stop_broadcasting_to(logger)
-
1
@broadcasts.delete(logger)
-
end
-
-
1
include Concerns::LogMethods
-
-
# Support for tagged logging
-
2
sig { params(tags: T.untyped, block: T.proc.returns(T.untyped)).returns(T.untyped) }
-
1
def tagged(*tags, &block)
-
# Convert tags to array and pass individually to avoid splat issues
-
1
tag_array = tags.flatten
-
1
if tag_array.empty?
-
super(&block)
-
else
-
1
super(*T.unsafe(tag_array), &block)
-
end
-
end
-
-
# Ensure compatibility with Rails.logger interface
-
1
sig { returns(T::Array[T.any(String, Symbol)]) }
-
1
def current_tags
-
::SemanticLogger.tags
-
end
-
-
1
sig { void }
-
1
def clear_tags!
-
# SemanticLogger doesn't have clear_tags!, use pop_tags instead
-
count = ::SemanticLogger.tags.length
-
::SemanticLogger.pop_tags(count) if count > 0
-
end
-
-
1
sig { params(tags: T.untyped).returns(T::Array[T.untyped]) }
-
1
def push_tags(*tags)
-
flat = tags.flatten.compact
-
flat.each { |tag| ::SemanticLogger.push_tags(tag) }
-
flat
-
end
-
-
1
sig { params(count: Integer).void }
-
1
def pop_tags(count = 1)
-
::SemanticLogger.pop_tags(count)
-
end
-
-
# Support for << operator (used by RailsLogSplitter)
-
2
sig { params(msg: String).returns(T.self_type) }
-
1
def <<(msg)
-
1
info(msg)
-
2
@broadcasts.each { |logger| logger << msg if logger.respond_to?(:<<) }
-
1
self
-
end
-
end
-
-
# Proxy object to provide ActiveJob-compatible formatter interface
-
1
class FormatterProxy
-
1
extend T::Sig
-
-
1
sig { returns(T::Array[T.any(String, Symbol)]) }
-
1
def current_tags
-
Thread.current[:activesupport_tagged_logging_tags] || []
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "semantic_logger"
-
1
require_relative "formatter"
-
1
require_relative "color_formatter"
-
1
require_relative "logger"
-
-
1
module LogStruct
-
# SemanticLogger Integration
-
#
-
# LogStruct uses SemanticLogger as its core logging engine, providing significant
-
# performance and functionality benefits over Rails' default logger:
-
#
-
# ## Performance Benefits
-
# - **Asynchronous logging**: Logs are written in a background thread, eliminating
-
# I/O blocking in your main application threads
-
# - **High throughput**: Can handle 100,000+ log entries per second
-
# - **Memory efficient**: Structured data processing with minimal allocations
-
# - **Zero-copy serialization**: Direct JSON generation without intermediate objects
-
#
-
# ## Reliability Benefits
-
# - **Thread-safe**: All operations are thread-safe by design
-
# - **Graceful degradation**: Continues logging even if appenders fail
-
# - **Error isolation**: Logging errors don't crash your application
-
# - **Buffered writes**: Reduces disk I/O with intelligent batching
-
#
-
# ## Feature Benefits
-
# - **Multiple appenders**: Log to files, STDOUT, databases, cloud services simultaneously
-
# - **Structured metadata**: Rich context including process ID, thread ID, tags, and more
-
# - **Log filtering**: Runtime filtering by logger name, level, or custom rules
-
# - **Formatters**: Pluggable output formatting (JSON, colorized, custom)
-
# - **Metrics integration**: Built-in performance metrics and timing data
-
#
-
# ## Development Experience
-
# - **Colorized output**: Beautiful, readable logs in development with ANSI colors
-
# - **Tagged logging**: Hierarchical context tracking (requests, jobs, etc.)
-
# - **Debugging tools**: Detailed timing and memory usage information
-
# - **Hot reloading**: Configuration changes without application restart
-
#
-
# ## Production Benefits
-
# - **Log rotation**: Automatic file rotation with size/time-based policies
-
# - **Compression**: Automatic log compression to save disk space
-
# - **Cloud integration**: Direct integration with CloudWatch, Splunk, etc.
-
# - **Alerting**: Built-in support for error alerting and monitoring
-
#
-
# ## LogStruct Specific Enhancements
-
# - **Type safety**: Full Sorbet type annotations for compile-time error detection
-
# - **Structured data**: Native support for LogStruct's typed log structures
-
# - **Filtering integration**: Seamless integration with LogStruct's data filters
-
# - **Error handling**: Enhanced error reporting with full stack traces and context
-
#
-
# SemanticLogger is a production-grade logging framework used by companies processing
-
# millions of requests per day. It provides the performance and reliability needed
-
# for high-traffic Rails applications while maintaining an elegant developer experience.
-
1
module SemanticLogger
-
# Handles setup and configuration of SemanticLogger for Rails applications
-
#
-
# This module provides the core integration between LogStruct and SemanticLogger,
-
# configuring appenders, formatters, and logger replacement to provide optimal
-
# logging performance while maintaining full compatibility with Rails conventions.
-
1
module Setup
-
1
extend T::Sig
-
-
# Configures SemanticLogger as the primary logging engine for the Rails application
-
#
-
# This method replaces Rails' default logger with SemanticLogger, providing:
-
# - **10-100x performance improvement** for high-volume logging
-
# - **Non-blocking I/O** through background thread processing
-
# - **Enhanced reliability** with graceful error handling
-
# - **Multiple output destinations** (files, STDOUT, cloud services)
-
# - **Structured metadata** including process/thread IDs and timing
-
#
-
# The configuration automatically:
-
# - Determines optimal log levels based on environment
-
# - Sets up appropriate appenders (console, file, etc.)
-
# - Enables colorized output in development
-
# - Replaces Rails.logger and component loggers
-
# - Preserves full Rails.logger API compatibility
-
#
-
# @param app [Rails::Application] The Rails application instance
-
2
sig { params(app: T.untyped).void }
-
1
def self.configure_semantic_logger(app)
-
# Set SemanticLogger configuration
-
1
::SemanticLogger.application = Rails.application.class.module_parent_name
-
1
::SemanticLogger.environment = Rails.env
-
-
# Determine log level from Rails config
-
1
log_level = determine_log_level(app)
-
1
::SemanticLogger.default_level = log_level
-
-
# Clear existing appenders
-
1
::SemanticLogger.clear_appenders!
-
-
# Add appropriate appenders based on environment
-
1
add_appenders(app)
-
-
# Replace Rails.logger with SemanticLogger
-
1
replace_rails_logger(app)
-
end
-
-
2
sig { params(app: T.untyped).returns(Symbol) }
-
1
def self.determine_log_level(app)
-
1
level = if app.config.log_level
-
1
app.config.log_level
-
elsif Rails.env.production?
-
:info
-
elsif Rails.env.test?
-
:debug
-
else
-
:debug
-
end
-
# Rails config.log_level can be a String or Symbol
-
1
level.is_a?(String) ? level.to_sym : level
-
end
-
-
2
sig { params(app: T.untyped).void }
-
1
def self.add_appenders(app)
-
1
config = LogStruct.config
-
-
# Determine output destination
-
1
io = determine_output(app)
-
-
1
if Rails.env.development?
-
if config.prefer_json_in_development
-
# Default to production-style JSON in development when enabled
-
::SemanticLogger.add_appender(
-
io: io,
-
formatter: LogStruct::SemanticLogger::Formatter.new,
-
filter: determine_filter
-
)
-
elsif config.enable_color_output
-
# Opt-in colorful human formatter in development
-
::SemanticLogger.add_appender(
-
io: io,
-
formatter: LogStruct::SemanticLogger::ColorFormatter.new(
-
color_map: config.color_map
-
),
-
filter: determine_filter
-
)
-
else
-
::SemanticLogger.add_appender(
-
io: io,
-
formatter: LogStruct::SemanticLogger::Formatter.new,
-
filter: determine_filter
-
)
-
end
-
else
-
# Use our custom JSON formatter in non-development environments
-
1
::SemanticLogger.add_appender(
-
io: io,
-
formatter: LogStruct::SemanticLogger::Formatter.new,
-
filter: determine_filter
-
)
-
end
-
-
# Add file appender if Rails has a log path configured (normal Rails behavior)
-
1
if app.config.paths["log"].first
-
1
::SemanticLogger.add_appender(
-
file_name: app.config.paths["log"].first,
-
formatter: LogStruct::SemanticLogger::Formatter.new,
-
filter: determine_filter
-
)
-
end
-
end
-
-
2
sig { params(app: T.untyped).returns(T.untyped) }
-
1
def self.determine_output(app)
-
# Always honor explicit STDOUT directive
-
1
return $stdout if ENV["RAILS_LOG_TO_STDOUT"].present?
-
-
1
if Rails.env.test?
-
# Use StringIO in test to keep stdout clean
-
1
StringIO.new
-
else
-
# Use STDOUT for app logs in dev/production
-
$stdout
-
end
-
end
-
-
2
sig { returns(T.nilable(Regexp)) }
-
1
def self.determine_filter
-
# Filter out noisy loggers if configured
-
2
config = LogStruct.config
-
2
return nil unless config.filter_noisy_loggers
-
-
# Common noisy loggers to filter
-
/\A(ActionView|ActionController::RoutingError|ActiveRecord::SchemaMigration)/
-
end
-
-
# Replaces Rails.logger and all component loggers with LogStruct's SemanticLogger
-
#
-
# This method provides seamless integration by replacing the default Rails logger
-
# throughout the entire Rails stack, ensuring all logging flows through the
-
# high-performance SemanticLogger system.
-
#
-
# ## Benefits of Complete Logger Replacement:
-
# - **Consistent performance**: All Rails components benefit from SemanticLogger speed
-
# - **Unified formatting**: All logs use the same structured JSON format
-
# - **Centralized configuration**: Single point of control for all logging
-
# - **Complete compatibility**: Maintains all Rails.logger API contracts
-
#
-
# ## Components Updated:
-
# - Rails.logger (framework core)
-
# - ActiveRecord::Base.logger (database queries)
-
# - ActionController::Base.logger (request processing)
-
# - ActionMailer::Base.logger (email delivery)
-
# - ActiveJob::Base.logger (background jobs)
-
# - ActionView::Base.logger (template rendering)
-
# - ActionCable.server.config.logger (WebSocket connections)
-
#
-
# After replacement, all Rails logging maintains API compatibility while gaining
-
# SemanticLogger's performance, reliability, and feature benefits.
-
#
-
# @param app [Rails::Application] The Rails application instance
-
2
sig { params(app: T.untyped).void }
-
1
def self.replace_rails_logger(app)
-
# Create new SemanticLogger instance
-
1
logger = LogStruct::SemanticLogger::Logger.new("Rails")
-
-
# Replace Rails.logger
-
1
Rails.logger = logger
-
-
# Also replace various component loggers
-
1
ActiveRecord::Base.logger = logger if defined?(ActiveRecord::Base)
-
1
ActionController::Base.logger = logger if defined?(ActionController::Base)
-
1
if defined?(ActionMailer::Base)
-
1
ActionMailer::Base.logger = logger
-
# Ensure ActionMailer.logger is also set (it might be accessed directly)
-
1
T.unsafe(::ActionMailer).logger = logger if T.unsafe(::ActionMailer).respond_to?(:logger=)
-
end
-
1
ActiveJob::Base.logger = logger if defined?(ActiveJob::Base)
-
1
ActionView::Base.logger = logger if defined?(ActionView::Base)
-
1
ActionCable.server.config.logger = logger if defined?(ActionCable)
-
-
# Store reference in app config
-
1
app.config.logger = logger
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require_relative "../enums/log_field"
-
1
require_relative "interfaces/request_fields"
-
-
1
module LogStruct
-
1
module Log
-
1
module Shared
-
1
module AddRequestFields
-
1
extend T::Sig
-
1
extend T::Helpers
-
-
1
requires_ancestor { Interfaces::RequestFields }
-
-
1
sig { params(hash: T::Hash[Symbol, T.untyped]).void }
-
1
def add_request_fields(hash)
-
hash[LogField::Path.serialize] = path if path
-
hash[LogField::HttpMethod.serialize] = http_method if http_method
-
hash[LogField::SourceIp.serialize] = source_ip if source_ip
-
hash[LogField::UserAgent.serialize] = user_agent if user_agent
-
hash[LogField::Referer.serialize] = referer if referer
-
hash[LogField::RequestId.serialize] = request_id if request_id
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# Moved from lib/log_struct/log/interfaces/additional_data_field.rb
-
1
module LogStruct
-
1
module Log
-
1
module Interfaces
-
1
module AdditionalDataField
-
1
extend T::Sig
-
1
extend T::Helpers
-
-
1
interface!
-
-
1
requires_ancestor { T::Struct }
-
-
1
sig { abstract.returns(T.nilable(T::Hash[T.any(String, Symbol), T.untyped])) }
-
1
def additional_data
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require_relative "../../enums/source"
-
1
require_relative "../../enums/event"
-
1
require_relative "../../enums/level"
-
-
1
module LogStruct
-
1
module Log
-
1
module Interfaces
-
1
module CommonFields
-
1
extend T::Sig
-
1
extend T::Helpers
-
-
1
interface!
-
-
1
sig { abstract.returns(Source) }
-
1
def source
-
end
-
-
1
sig { abstract.returns(Event) }
-
1
def event
-
end
-
-
1
sig { abstract.returns(Level) }
-
1
def level
-
end
-
-
1
sig { abstract.returns(Time) }
-
1
def timestamp
-
end
-
-
1
sig { abstract.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
-
1
def serialize(strict = true)
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require_relative "../../enums/level"
-
-
1
module LogStruct
-
1
module Log
-
1
module Interfaces
-
1
module PublicCommonFields
-
1
extend T::Sig
-
1
extend T::Helpers
-
-
1
interface!
-
-
1
sig { abstract.returns(Level) }
-
1
def level
-
end
-
-
1
sig { abstract.returns(Time) }
-
1
def timestamp
-
end
-
-
2
sig { abstract.params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
-
1
def serialize(strict = true)
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
1
module Log
-
1
module Interfaces
-
1
module RequestFields
-
1
extend T::Sig
-
1
extend T::Helpers
-
-
1
interface!
-
-
1
sig { abstract.returns(T.nilable(String)) }
-
1
def path
-
end
-
-
1
sig { abstract.returns(T.nilable(String)) }
-
1
def http_method
-
end
-
-
1
sig { abstract.returns(T.nilable(String)) }
-
1
def source_ip
-
end
-
-
1
sig { abstract.returns(T.nilable(String)) }
-
1
def user_agent
-
end
-
-
1
sig { abstract.returns(T.nilable(String)) }
-
1
def referer
-
end
-
-
1
sig { abstract.returns(T.nilable(String)) }
-
1
def request_id
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require_relative "interfaces/additional_data_field"
-
-
1
module LogStruct
-
1
module Log
-
1
module Shared
-
1
module MergeAdditionalDataFields
-
1
extend T::Sig
-
1
extend T::Helpers
-
-
1
requires_ancestor { T::Struct }
-
1
requires_ancestor { Interfaces::AdditionalDataField }
-
-
2
sig { params(hash: T::Hash[Symbol, T.untyped]).void }
-
1
def merge_additional_data_fields(hash)
-
758
ad = additional_data
-
758
return unless ad
-
15
ad.each do |key, value|
-
13
hash[key.to_sym] = value
-
end
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require_relative "../enums/log_field"
-
1
require_relative "interfaces/common_fields"
-
1
require_relative "merge_additional_data_fields"
-
-
1
module LogStruct
-
1
module Log
-
1
module Shared
-
1
module SerializeCommon
-
1
extend T::Sig
-
1
extend T::Helpers
-
-
1
requires_ancestor { Interfaces::CommonFields }
-
-
2
sig { params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
-
1
def serialize(strict = true)
-
# Start with shared fields (source, event, level, timestamp)
-
797
out = serialize_common(strict)
-
-
# Merge event/base fields from the struct-specific hash
-
797
kernel_self = T.cast(self, Kernel)
-
797
field_hash = T.cast(kernel_self.public_send(:to_h), T::Hash[LogStruct::LogField, T.untyped])
-
797
field_hash.each do |log_field, value|
-
1041
next if value.nil?
-
1041
key = log_field.serialize
-
-
# Limit backtrace to first 5 lines
-
1041
if key == :backtrace && value.is_a?(Array)
-
4
value = value.first(5)
-
end
-
-
1041
out[key] = value.is_a?(::Time) ? value.iso8601 : value
-
end
-
-
# Merge any additional_data at top level if available
-
797
if kernel_self.respond_to?(:merge_additional_data_fields)
-
# merge_additional_data_fields expects symbol keys
-
756
merge_target = T.cast(self, LogStruct::Log::Shared::MergeAdditionalDataFields)
-
756
merge_target.merge_additional_data_fields(out)
-
end
-
-
797
out
-
end
-
-
2
sig { params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
-
1
def serialize_common(strict = true)
-
{
-
797
LogField::Source.serialize => source.serialize.to_s,
-
LogField::Event.serialize => event.serialize.to_s,
-
LogField::Level.serialize => level.serialize.to_s,
-
LogField::Timestamp.serialize => timestamp.iso8601(3)
-
}
-
end
-
-
2
sig { params(options: T.untyped).returns(T::Hash[String, T.untyped]) }
-
1
def as_json(options = nil)
-
7
serialize.transform_keys(&:to_s)
-
end
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require_relative "../enums/log_field"
-
1
require_relative "interfaces/public_common_fields"
-
-
1
module LogStruct
-
1
module Log
-
# Common serialization for public custom log structs with string/symbol source/event
-
1
module SerializeCommonPublic
-
1
extend T::Sig
-
1
extend T::Helpers
-
-
1
requires_ancestor { Interfaces::PublicCommonFields }
-
1
requires_ancestor { Kernel }
-
-
2
sig { params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
-
1
def serialize_common_public(strict = true)
-
2
unless respond_to?(:source) && respond_to?(:event)
-
raise ArgumentError, "Public log struct must define #source and #event"
-
end
-
-
2
src_val = public_send(:source)
-
2
evt_val = public_send(:event)
-
2
src = src_val.respond_to?(:serialize) ? src_val.public_send(:serialize).to_s : src_val.to_s
-
2
evt = evt_val.respond_to?(:serialize) ? evt_val.public_send(:serialize).to_s : evt_val.to_s
-
2
lvl = level.serialize.to_s
-
2
ts = timestamp.iso8601(3)
-
-
{
-
2
LogField::Source.serialize => src,
-
LogField::Event.serialize => evt,
-
LogField::Level.serialize => lvl,
-
LogField::Timestamp.serialize => ts
-
}
-
end
-
-
2
sig { params(options: T.untyped).returns(T::Hash[String, T.untyped]) }
-
1
def as_json(options = nil)
-
1
serialize.transform_keys(&:to_s)
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
# Note: We use T::Struct for our Log classes so Sorbet is a hard requirement,
-
# not an optional dependency.
-
1
require "sorbet-runtime"
-
1
require "log_struct/sorbet/serialize_symbol_keys"
-
-
# Don't extend T::Sig to all modules! We're just a library, not a private Rails application
-
# See: https://sorbet.org/docs/sigs
-
# class Module
-
# include T::Sig
-
# end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
module LogStruct
-
1
module Sorbet
-
1
module SerializeSymbolKeys
-
1
extend T::Sig
-
1
extend T::Helpers
-
-
1
requires_ancestor { T::Struct }
-
-
1
sig { params(strict: T::Boolean).returns(T::Hash[Symbol, T.untyped]) }
-
1
def serialize(strict = true)
-
super.deep_symbolize_keys
-
end
-
-
1
sig { returns(T::Hash[Symbol, T.untyped]) }
-
1
def to_h
-
serialize
-
end
-
end
-
end
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
require "digest"
-
-
1
module LogStruct
-
# StringScrubber is inspired by logstop by @ankane: https://github.com/ankane/logstop
-
# Enhancements:
-
# - Shows which type of data was filtered
-
# - Includes an SHA256 hash with filtered emails for request tracing
-
# - Uses configuration options from LogStruct.config
-
1
module StringScrubber
-
1
class << self
-
1
extend T::Sig
-
-
# Also supports URL-encoded URLs like https%3A%2F%2Fuser%3Asecret%40example.com
-
# cspell:ignore Fuser Asecret
-
1
URL_PASSWORD_REGEX = /((?:\/\/|%2F%2F)[^\s\/]+(?::|%3A))[^\s\/]+(@|%40)/
-
1
URL_PASSWORD_REPLACEMENT = '\1[PASSWORD]\2'
-
-
1
EMAIL_REGEX = /\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}\b/i
-
-
1
CREDIT_CARD_REGEX_SHORT = /\b[3456]\d{15}\b/
-
1
CREDIT_CARD_REGEX_DELIMITERS = /\b[3456]\d{3}[\s-]\d{4}[\s-]\d{4}[\s-]\d{4}\b/
-
1
CREDIT_CARD_REPLACEMENT = "[CREDIT_CARD]"
-
-
1
PHONE_REGEX = /\b\d{3}[\s-]\d{3}[\s-]\d{4}\b/
-
1
PHONE_REPLACEMENT = "[PHONE]"
-
-
1
SSN_REGEX = /\b\d{3}[\s-]\d{2}[\s-]\d{4}\b/
-
1
SSN_REPLACEMENT = "[SSN]"
-
-
1
IP_REGEX = /\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b/
-
1
IP_REPLACEMENT = "[IP]"
-
-
1
MAC_REGEX = /\b[0-9a-f]{2}(:[0-9a-f]{2}){5}\b/i
-
1
MAC_REPLACEMENT = "[MAC]"
-
-
# Scrub sensitive information from a string
-
2
sig { params(string: String).returns(String) }
-
1
def scrub(string)
-
4024
return string if string.empty?
-
-
4024
string = string.to_s.dup
-
4024
config = LogStruct.config.filters
-
-
# Passwords in URLs
-
4024
string.gsub!(URL_PASSWORD_REGEX, URL_PASSWORD_REPLACEMENT) if config.url_passwords
-
-
# Emails
-
4024
if config.email_addresses
-
4023
string.gsub!(EMAIL_REGEX) do |email|
-
11
email_hash = HashUtils.hash_value(email)
-
11
"[EMAIL:#{email_hash}]"
-
end
-
end
-
-
# Credit card numbers
-
4024
if config.credit_card_numbers
-
4023
string.gsub!(CREDIT_CARD_REGEX_SHORT, CREDIT_CARD_REPLACEMENT)
-
4023
string.gsub!(CREDIT_CARD_REGEX_DELIMITERS, CREDIT_CARD_REPLACEMENT)
-
end
-
-
# Phone numbers
-
4024
string.gsub!(PHONE_REGEX, PHONE_REPLACEMENT) if config.phone_numbers
-
-
# SSNs
-
4024
string.gsub!(SSN_REGEX, SSN_REPLACEMENT) if config.ssns
-
-
# IPs
-
4024
string.gsub!(IP_REGEX, IP_REPLACEMENT) if config.ip_addresses
-
-
# MAC addresses
-
4024
string.gsub!(MAC_REGEX, MAC_REPLACEMENT) if config.mac_addresses
-
-
# Custom scrubber
-
4024
custom_scrubber = LogStruct.config.string_scrubbing_handler
-
4024
string = custom_scrubber.call(string) if !custom_scrubber.nil?
-
-
4024
string
-
end
-
end
-
end
-
end
-
# Add your own tasks in files placed in lib/tasks ending in .rake,
-
# for example lib/tasks/capistrano.rake, and they will automatically be available to Rake.
-
-
1
require_relative "config/application"
-
-
1
Rails.application.load_tasks
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
class ApplicationController < ActionController::Base
-
end
-
# typed: true
-
# frozen_string_literal: true
-
-
1
class LoggingController < ApplicationController
-
# Basic logging
-
1
def test_basic
-
# Test standard Rails logging - this is the primary usage pattern
-
2
Rails.logger.info("Info level message")
-
2
Rails.logger.warn("Warning level message")
-
2
Rails.logger.debug("Debug level message with context")
-
-
# For structured data, use LogStruct's Log::Plain
-
2
plain_log = LogStruct::Log::Plain.new(
-
message: "Structured log message",
-
source: LogStruct::Source::App
-
)
-
2
Rails.logger.info(plain_log)
-
-
# Test email scrubbing in plain string
-
2
Rails.logger.info("User email is test@example.com and password is secret123")
-
-
2
render json: {status: "ok", message: "Basic logging completed"}
-
end
-
-
# Error logging
-
1
def test_error
-
# Since the tests run in the test environment and Rails' test behavior may catch exceptions
-
# differently, let's log the error but also raise it to ensure it's properly captured
-
1
Rails.logger.info("About to raise test error")
-
begin
-
1
raise "Test error for integration testing"
-
rescue => e
-
# Log the error first
-
1
error_log = LogStruct::Log::Error.new(
-
source: LogStruct::Source::App,
-
error_class: e.class,
-
message: e.message
-
)
-
1
Rails.logger.error(error_log)
-
-
# Then re-raise it for the test to catch
-
1
raise
-
end
-
end
-
-
# Custom log structures
-
1
def test_custom
-
# Create and log a custom log structure
-
1
5.times do |i|
-
5
custom_log = LogStruct::Log::Plain.new(
-
message: "Custom log message #{i}",
-
source: LogStruct::Source::App,
-
additional_data: {
-
iteration: i,
-
timestamp: Time.now.to_f,
-
random: rand(100)
-
}
-
)
-
5
Rails.logger.info(custom_log)
-
end
-
-
1
render json: {status: "ok", message: "Custom logging completed"}
-
end
-
-
# Request logging test - DO NOT MODIFY THIS METHOD
-
# This method INTENTIONALLY reproduces the SystemStackError issue
-
# which must be fixed in the LogStruct codebase itself.
-
1
def test_request
-
# This is exactly the code that was causing the infinite recursion issue
-
# We need to fix the library - not modify this test!
-
1
request_log = LogStruct::Log::Request.new(
-
http_method: "GET",
-
path: "/api/users",
-
status: 200,
-
duration_ms: 15.5,
-
source_ip: "127.0.0.1"
-
)
-
1
Rails.logger.info(request_log)
-
-
1
render json: {status: "ok", message: "Request logging completed"}
-
end
-
-
# Model-related logging
-
1
def test_model
-
# Create a test user to trigger ActiveRecord logging
-
user = User.create!(name: "Test User", email: "user@example.com")
-
# Simple string logging
-
Rails.logger.info("Created user #{user.id}")
-
-
# Get the existing user
-
found_user = User.find(user.id)
-
Rails.logger.info("Found user: #{found_user.name}")
-
-
render json: {status: "ok", message: "Model logging completed", user_id: user.id}
-
end
-
-
# Job-related logging
-
1
def test_job
-
# Enqueue a job to test ActiveJob integration
-
job = TestJob.perform_later("test_argument")
-
Rails.logger.info("Job enqueued with ID: #{job.job_id}")
-
-
# LogStruct will automatically enhance job enqueued/performed logs
-
render json: {status: "ok", message: "Job enqueued for testing", job_id: job.job_id}
-
end
-
-
# Context and tagging
-
1
def test_context
-
# TODO: Fix types for the tagged method
-
# Test Rails' built-in tagged logging
-
T.unsafe(Rails.logger).tagged("REQUEST_ID_123", "USER_456") do
-
Rails.logger.info("Message with tags")
-
-
# Nested tags
-
T.unsafe(Rails.logger).tagged("NESTED") do
-
Rails.logger.warn("Message with nested tags")
-
end
-
end
-
-
# Message without tags
-
Rails.logger.info("Message without tags")
-
-
render json: {status: "ok", message: "Context logging completed"}
-
end
-
-
1
def test_error_logging
-
# Also test error handling in formatter by logging to trigger fallback handlers
-
begin
-
# Raise an error
-
1
raise "Test error for recursion safety"
-
rescue => e
-
# Log the error, which would trigger the formatter code
-
1
Rails.logger.error("Error occurred: #{e.message}")
-
-
# Also try structured error logging
-
1
error_log = LogStruct::Log::Error.new(
-
source: LogStruct::Source::App,
-
message: e.message,
-
error_class: e.class
-
)
-
1
Rails.logger.error(error_log)
-
end
-
-
# If we got here without a SystemStackError, the infinite recursion was prevented
-
1
render json: {status: "ok", message: "Stack-safe error handling test completed"}
-
end
-
end
-
# typed: true
-
# frozen_string_literal: true
-
-
1
class ApplicationJob < ActiveJob::Base
-
end
-
# typed: true
-
# frozen_string_literal: true
-
-
1
class TestJob < ApplicationJob
-
1
queue_as :default
-
-
1
def perform(arg)
-
# Log job processing - standard Rails approach
-
logger.info("Processing job #{job_id} with argument: #{arg}")
-
-
# Simulate some work
-
sleep 0.1
-
-
# Test error handling in a job
-
begin
-
raise StandardError, "Test job error"
-
rescue => e
-
# Standard Rails logging
-
logger.error("Job error: #{e.message}")
-
-
# Example of enhanced structured logging
-
exception_log = LogStruct::Log::Error.new(
-
source: LogStruct::Source::Job,
-
error_class: e.class,
-
message: e.message,
-
additional_data: {job_class: self.class.name, job_id: job_id}
-
)
-
logger.error(exception_log)
-
end
-
-
# Log job completion
-
logger.info("Job #{job_id} completed successfully")
-
end
-
end
-
1
class ApplicationMailer < ActionMailer::Base
-
1
default from: "from@example.com"
-
1
layout "mailer"
-
end
-
# typed: true
-
# frozen_string_literal: true
-
-
1
class TestMailer < ApplicationMailer
-
1
def test_email_with_ids(account, user)
-
1
@account = account
-
1
@user = user
-
1
mail(to: "test@example.com", subject: "Test Email")
-
end
-
-
1
def test_email_with_organization(organization)
-
1
@organization = organization
-
1
mail(to: "test@example.com", subject: "Test Email")
-
end
-
end
-
# typed: true
-
# frozen_string_literal: true
-
-
1
class ApplicationRecord < ActiveRecord::Base
-
1
primary_abstract_class
-
1
self.abstract_class = true
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
class Document < ApplicationRecord
-
1
extend T::Sig
-
-
1
has_one_attached :file
-
-
2
sig { params(filename: String, content: String).returns(Document) }
-
1
def self.create_with_file(filename:, content:)
-
4
document = T.let(create!, Document)
-
4
document.file.attach(
-
io: StringIO.new(content),
-
filename: filename,
-
content_type: "text/plain"
-
)
-
4
document
-
end
-
end
-
# typed: true
-
# frozen_string_literal: true
-
-
1
class User < ApplicationRecord
-
1
validates :name, presence: true
-
1
validates :email, presence: true, format: {with: URI::MailTo::EMAIL_REGEXP}
-
-
# Add callbacks to test logging
-
1
after_create :log_creation
-
1
after_update :log_update
-
-
1
private
-
-
1
def log_creation
-
Rails.logger.info("User created with ID: #{id} and email: #{attributes["email"]}")
-
end
-
-
1
def log_update
-
# Standard Rails logging with context
-
changed_attrs = previous_changes.keys.join(", ")
-
Rails.logger.info("User #{id} updated. Changed attributes: #{changed_attrs}")
-
end
-
end
-
# typed: true
-
-
1
require_relative "boot"
-
-
1
require "rails/all"
-
-
# Require the gems listed in Gemfile, including any gems
-
# you've limited to :test, :development, or :production.
-
1
Bundler.require(*Rails.groups)
-
-
1
module LogstructTestApp
-
1
class Application < Rails::Application
-
# Initialize configuration defaults for originally generated Rails version.
-
1
config.load_defaults 8.1
-
-
# Configuration for the application, engines, and railties goes here.
-
#
-
# These settings can be overridden in specific environments using the files
-
# in config/environments, which are processed later.
-
#
-
# config.time_zone = "Central Time (US & Canada)"
-
# config.eager_load_paths << Rails.root.join("extras")
-
-
# Only use API mode
-
1
config.api_only = true
-
-
# Use test adapter for ActiveJob in all environments for testing
-
1
config.active_job.queue_adapter = :test
-
-
# Force all environments to log to STDOUT so development behaves like test/production
-
# This mirrors how many platforms and 12-factor apps expect logs to be emitted.
-
1
config.log_level = :debug
-
1
stdout_logger = ActiveSupport::Logger.new($stdout)
-
1
stdout_logger.formatter = config.log_formatter
-
1
config.logger = ActiveSupport::TaggedLogging.new(stdout_logger)
-
end
-
end
-
# Load the Rails application.
-
1
require_relative "application"
-
-
# Initialize the Rails application.
-
1
Rails.application.initialize!
-
# The test environment is used exclusively to run your application's
-
# test suite. You never need to work with it otherwise. Remember that
-
# your test database is "scratch space" for the test suite and is wiped
-
# and recreated between test runs. Don't rely on the data there!
-
-
1
Rails.application.configure do
-
# Host authorization for tests - allow .localhost subdomains, IPs, and www.example.com
-
1
config.hosts = [
-
".localhost",
-
"www.example.com",
-
IPAddr.new("0.0.0.0/0"), # IPv4
-
IPAddr.new("::/0"), # IPv6
-
]
-
-
# Settings specified here will take precedence over those in config/application.rb.
-
-
# While tests run files are not watched, reloading is not necessary.
-
1
config.enable_reloading = false
-
-
# Eager loading loads your entire application. When running a single test locally,
-
# this is usually not necessary, and can slow down your test suite. However, it's
-
# recommended that you enable it in continuous integration systems to ensure eager
-
# loading is working properly before deploying your code.
-
1
config.eager_load = ENV["CI"].present?
-
-
# Configure public file server for tests with cache-control for performance.
-
1
config.public_file_server.headers = { "cache-control" => "public, max-age=3600" }
-
-
# Show full error reports.
-
1
config.consider_all_requests_local = true
-
1
config.cache_store = :null_store
-
-
# Render exception templates for rescuable exceptions and raise for other exceptions.
-
1
config.action_dispatch.show_exceptions = :rescuable
-
-
# Disable request forgery protection in test environment.
-
1
config.action_controller.allow_forgery_protection = false
-
-
# Store uploaded files on the local file system in a temporary directory.
-
1
config.active_storage.service = :test
-
-
# Tell Action Mailer not to deliver emails to the real world.
-
# The :test delivery method accumulates sent emails in the
-
# ActionMailer::Base.deliveries array.
-
1
config.action_mailer.delivery_method = :test
-
-
# Set host to be used by links generated in mailer templates.
-
1
config.action_mailer.default_url_options = { host: "example.com" }
-
-
# Print deprecation notices to the stderr.
-
1
config.active_support.deprecation = :stderr
-
-
# Raises error for missing translations.
-
# config.i18n.raise_on_missing_translations = true
-
-
# Annotate rendered view with file names.
-
# config.action_view.annotate_rendered_view_with_filenames = true
-
-
# Raise error when a before_action's only/except options reference missing actions.
-
1
config.action_controller.raise_on_missing_callback_actions = true
-
end
-
# Be sure to restart your server when you modify this file.
-
-
# Avoid CORS issues when API is called from the frontend app.
-
# Handle Cross-Origin Resource Sharing (CORS) in order to accept cross-origin Ajax requests.
-
-
# Read more: https://github.com/cyu/rack-cors
-
-
# Rails.application.config.middleware.insert_before 0, Rack::Cors do
-
# allow do
-
# origins "example.com"
-
#
-
# resource "*",
-
# headers: :any,
-
# methods: [:get, :post, :put, :patch, :delete, :options, :head]
-
# end
-
# end
-
# Be sure to restart your server when you modify this file.
-
-
# Configure parameters to be partially matched (e.g. passw matches password) and filtered from the log file.
-
# Use this to limit dissemination of sensitive information.
-
# See the ActiveSupport::ParameterFilter documentation for supported notations and behaviors.
-
1
Rails.application.config.filter_parameters += [
-
:passw, :email, :secret, :token, :_key, :crypt, :salt, :certificate, :otp, :ssn, :cvv, :cvc
-
]
-
# Be sure to restart your server when you modify this file.
-
-
# Add new inflection rules using the following format. Inflections
-
# are locale specific, and you may define rules for as many different
-
# locales as you wish. All of these examples are active by default:
-
# ActiveSupport::Inflector.inflections(:en) do |inflect|
-
# inflect.plural /^(ox)$/i, "\\1en"
-
# inflect.singular /^(ox)en/i, "\\1"
-
# inflect.irregular "person", "people"
-
# inflect.uncountable %w( fish sheep )
-
# end
-
-
# These inflection rules are supported but not enabled by default:
-
# ActiveSupport::Inflector.inflections(:en) do |inflect|
-
# inflect.acronym "RESTful"
-
# end
-
# typed: strict
-
-
1
require "log_struct"
-
-
# Configure LogStruct
-
1
LogStruct.configure do |config|
-
# Specify which environments to enable in
-
1
config.enabled_environments = [:development, :test, :production]
-
-
# Specify which environments are considered local/development
-
1
config.local_environments = [:development, :test]
-
-
# Configure integrations
-
1
config.integrations.enable_lograge = true
-
1
config.integrations.enable_actionmailer = true
-
1
config.integrations.enable_activejob = true
-
1
config.integrations.enable_rack_error_handler = true
-
1
config.integrations.enable_sidekiq = !!defined?(Sidekiq)
-
1
config.integrations.enable_shrine = !!defined?(Shrine)
-
1
config.integrations.enable_carrierwave = !!defined?(CarrierWave)
-
1
config.integrations.enable_activestorage = true
-
-
# Configure string scrubbing filters
-
1
config.filters.email_addresses = true
-
1
config.filters.url_passwords = true
-
1
config.filters.credit_card_numbers = true
-
1
config.filters.phone_numbers = true
-
1
config.filters.ssns = true
-
1
config.filters.ip_addresses = true
-
1
config.filters.mac_addresses = true
-
-
# Configure error handling modes
-
1
config.error_handling_modes.logstruct_errors = LogStruct::ErrorHandlingMode::Log
-
1
config.error_handling_modes.security_errors = LogStruct::ErrorHandlingMode::Report
-
1
config.error_handling_modes.standard_errors = LogStruct::ErrorHandlingMode::LogProduction
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
Rails.application.routes.draw do
-
# Testing routes
-
1
get "/logging/basic", to: "logging#test_basic"
-
1
get "/logging/error", to: "logging#test_error"
-
1
get "/logging/model", to: "logging#test_model"
-
1
get "/logging/job", to: "logging#test_job"
-
1
get "/logging/context", to: "logging#test_context"
-
1
get "/logging/custom", to: "logging#test_custom"
-
1
get "/logging/request", to: "logging#test_request"
-
1
get "/logging/error_logging", to: "logging#test_error_logging"
-
-
# Healthcheck route
-
3
get "/health", to: proc { [200, {}, ["OK"]] }
-
end
-
# typed: strict
-
# frozen_string_literal: true
-
-
1
namespace :logging do
-
1
desc "Test log output for rake task logging tests"
-
1
task test_output: :environment do
-
Rails.logger.info "Test log message from rake task"
-
Rails.logger.tagged("custom_tag") do
-
Rails.logger.info "Tagged test log message"
-
end
-
end
-
end
-
# typed: true
-
# frozen_string_literal: true
-
-
1
require "test_helper"
-
-
1
class ActionMailerIdMappingTest < ActiveSupport::TestCase
-
1
setup do
-
2
@original_mapping = LogStruct.config.integrations.actionmailer_id_mapping
-
-
# Use StringIO to capture log output
-
2
@log_output = StringIO.new
-
2
@original_logger = Rails.logger
-
-
# Create a new logger with our StringIO and LogStruct's formatter
-
2
logger = Logger.new(@log_output)
-
2
logger.formatter = LogStruct::Formatter.new
-
2
Rails.logger = logger
-
end
-
-
1
teardown do
-
2
LogStruct.config.integrations.actionmailer_id_mapping = @original_mapping
-
2
Rails.logger = @original_logger
-
end
-
-
# Helper method to parse log entries
-
1
def find_log_entries(event_type)
-
2
@log_output.rewind
-
-
2
logs = []
-
2
@log_output.each_line do |line|
-
4
if line =~ /(\{.+\})/
-
4
json = JSON.parse($1)
-
4
logs << json if json["src"] == "mailer" && json["evt"] == event_type
-
end
-
rescue JSON::ParserError
-
# Skip lines that don't contain valid JSON
-
end
-
-
2
logs
-
end
-
-
1
test "actionmailer_id_mapping extracts configured instance variables as IDs in additional_data" do
-
# Clear the log buffer before the test
-
1
@log_output.truncate(0)
-
1
@log_output.rewind
-
-
# Configure default ID mapping
-
1
LogStruct.config.integrations.actionmailer_id_mapping = {
-
account: :account_id,
-
user: :user_id
-
}
-
-
# Create test objects
-
1
account = Struct.new(:id).new(123)
-
1
user = Struct.new(:id).new(456)
-
-
# Deliver email
-
1
TestMailer.test_email_with_ids(account, user).deliver_now
-
-
# Find delivery logs in the captured output
-
1
delivery_logs = find_log_entries("delivered")
-
-
1
assert_not_empty delivery_logs, "Expected delivery logs to be generated"
-
-
1
delivered_log = delivery_logs.first
-
-
# Check that account_id and user_id are in the log
-
1
assert_equal 123, delivered_log["account_id"]
-
1
assert_equal 456, delivered_log["user_id"]
-
end
-
-
1
test "actionmailer_id_mapping uses custom field names" do
-
# Clear the log buffer before the test
-
1
@log_output.truncate(0)
-
1
@log_output.rewind
-
-
# Configure custom ID mapping
-
1
LogStruct.config.integrations.actionmailer_id_mapping = {
-
organization: :org_id
-
}
-
-
# Create test object
-
1
organization = Struct.new(:id).new(789)
-
-
# Deliver email
-
1
TestMailer.test_email_with_organization(organization).deliver_now
-
-
# Find delivery logs in the captured output
-
1
delivery_logs = find_log_entries("delivered")
-
-
1
assert_not_empty delivery_logs, "Expected delivery logs to be generated"
-
-
1
delivered_log = delivery_logs.first
-
-
# Check that org_id is in the log
-
1
assert_equal 789, delivered_log["org_id"]
-
# Should not have account_id or user_id
-
1
assert_nil delivered_log["account_id"]
-
1
assert_nil delivered_log["user_id"]
-
end
-
end
-
# typed: true
-
# frozen_string_literal: true
-
-
1
require "test_helper"
-
-
1
class ActiveStorageTest < ActiveSupport::TestCase
-
1
setup do
-
# Use StringIO to capture log output
-
5
@log_output = StringIO.new
-
5
@original_logger = Rails.logger
-
-
# Create a new logger with our StringIO and LogStruct's formatter
-
5
logger = Logger.new(@log_output)
-
5
logger.formatter = LogStruct::Formatter.new
-
5
Rails.logger = logger
-
end
-
-
1
teardown do
-
# Restore the original logger
-
5
Rails.logger = @original_logger
-
end
-
-
# Helper method to parse log entries
-
1
def find_log_entries(event_type)
-
# Reset the StringIO position to the beginning
-
5
@log_output.rewind
-
-
# Parse the log contents looking for JSON data
-
5
logs = []
-
5
@log_output.each_line do |line|
-
# Log lines might have timestamps or other text before the JSON
-
5
if line =~ /(\{.+\})/
-
5
json = JSON.parse($1)
-
# Only include active storage logs with the specified event
-
5
logs << json if json["src"] == "storage" && json["evt"] == event_type
-
end
-
rescue JSON::ParserError
-
# Skip lines that don't contain valid JSON
-
end
-
-
5
logs
-
end
-
-
1
test "logs are created when uploading a file" do
-
# Clear the log buffer before the test
-
1
@log_output.truncate(0)
-
1
@log_output.rewind
-
-
# Create a document with an attached file, which should trigger upload
-
1
Document.create_with_file(
-
filename: "test_file.txt",
-
content: "This is test content for Active Storage"
-
)
-
-
# Give some time for the async events to process
-
1
sleep(0.2)
-
-
# Find upload logs in the captured output
-
1
upload_logs = find_log_entries("upload")
-
-
1
assert_not_empty upload_logs, "Expected upload logs to be generated"
-
-
1
upload_log = upload_logs.first
-
-
1
assert_equal "storage", upload_log["src"]
-
1
assert_equal "upload", upload_log["evt"]
-
1
assert_equal "Disk", upload_log["storage"]
-
1
assert_not_nil upload_log["file_id"]
-
1
assert_not_nil upload_log["checksum"]
-
1
assert_not_nil upload_log["duration_ms"]
-
end
-
-
1
test "logs are created when downloading a file" do
-
# Create a document with a file for testing
-
1
document = Document.create_with_file(
-
filename: "download_test.txt",
-
content: "This is content to download"
-
)
-
-
# Clear the log buffer before the test
-
1
@log_output.truncate(0)
-
1
@log_output.rewind
-
-
# Download the file
-
1
document.file.download
-
-
# Give some time for the async events to process
-
1
sleep(0.2)
-
-
# Find download logs in the captured output
-
1
download_logs = find_log_entries("download")
-
-
1
assert_not_empty download_logs, "Expected download logs to be generated"
-
-
1
download_log = download_logs.first
-
-
1
assert_equal "storage", download_log["src"]
-
1
assert_equal "download", download_log["evt"]
-
1
assert_equal "Disk", download_log["storage"]
-
1
assert_not_nil download_log["file_id"]
-
1
assert_not_nil download_log["duration_ms"]
-
end
-
-
1
test "logs are created when checking if a file exists" do
-
# Create a document with a file for testing
-
1
document = Document.create_with_file(
-
filename: "exist_test.txt",
-
content: "This is content to check existence"
-
)
-
-
# Clear the log buffer before the test
-
1
@log_output.truncate(0)
-
1
@log_output.rewind
-
-
# Check if file exists - we need to hit the storage service directly to trigger the exist event
-
# In ActiveStorage, we need to directly check through the storage service
-
1
storage = ActiveStorage::Blob.service
-
1
storage.exist?(document.file.key)
-
-
# Give some time for the async events to process
-
1
sleep(0.2)
-
-
# Find existence check logs in the captured output
-
1
exist_logs = find_log_entries("exist")
-
-
1
assert_not_empty exist_logs, "Expected existence check logs to be generated"
-
-
1
exist_log = exist_logs.first
-
-
1
assert_equal "storage", exist_log["src"]
-
1
assert_equal "exist", exist_log["evt"]
-
1
assert_equal "Disk", exist_log["storage"]
-
1
assert_not_nil exist_log["file_id"]
-
end
-
-
1
test "logs are created when deleting a file" do
-
# Create a document with a file for testing
-
1
document = Document.create_with_file(
-
filename: "delete_test.txt",
-
content: "This is content to delete"
-
)
-
-
# Clear the log buffer before the test
-
1
@log_output.truncate(0)
-
1
@log_output.rewind
-
-
# Delete the file
-
1
document.file.purge
-
-
# Give some time for the async events to process
-
1
sleep(0.2)
-
-
# Find delete logs in the captured output
-
1
delete_logs = find_log_entries("delete")
-
-
1
assert_not_empty delete_logs, "Expected delete logs to be generated"
-
-
1
delete_log = delete_logs.first
-
-
1
assert_equal "storage", delete_log["src"]
-
1
assert_equal "delete", delete_log["evt"]
-
1
assert_equal "Disk", delete_log["storage"]
-
1
assert_not_nil delete_log["file_id"]
-
end
-
-
1
test "logs contain expected metadata fields" do
-
# Clear the log buffer before the test
-
1
@log_output.truncate(0)
-
1
@log_output.rewind
-
-
# Create a document with specific metadata
-
1
document = Document.create!
-
-
# Clear the buffer again to make sure we only capture the attach operation
-
1
@log_output.truncate(0)
-
1
@log_output.rewind
-
-
# Now attach the file with our known metadata
-
1
document.file.attach(
-
io: StringIO.new("Test content with specific metadata"),
-
filename: "metadata_test.txt",
-
content_type: "text/plain"
-
)
-
-
# Give some time for the async events to process
-
1
sleep(0.2)
-
-
# Find upload logs in the captured output
-
1
upload_logs = find_log_entries("upload")
-
-
1
assert_not_empty upload_logs, "Expected upload logs to be generated"
-
-
1
upload_log = upload_logs.first
-
-
# Verify upload log contains the expected fields
-
-
# The checksum should be present
-
1
assert_not_nil upload_log["checksum"]
-
-
# Check file size if available - from the blob service
-
1
if upload_log["size"]
-
assert_kind_of Integer, upload_log["size"]
-
end
-
-
# Check for duration which should always be present
-
1
assert_not_nil upload_log["duration_ms"]
-
end
-
end
-
# typed: true
-
-
1
require "test_helper"
-
1
require "open3"
-
-
1
class BootLogsIntegrationTest < ActiveSupport::TestCase
-
1
def test_rails_runner_emits_dotenv_structured_logs_and_ends_with_true
-
env = {
-
1
"LOGSTRUCT_ENABLED" => "true",
-
"RAILS_ENV" => "test",
-
"RAILS_LOG_TO_STDOUT" => "1"
-
}
-
1
cmd = ["bundle", "exec", "rails", "runner", "puts LogStruct.enabled?"]
-
-
1
stdout_str, stderr_str, status = Open3.capture3(env, *cmd)
-
-
1
assert_predicate status, :success?, "rails runner failed: #{stderr_str}"
-
1
output = stdout_str.to_s
-
-
1
refute_empty output, "Expected some output from rails runner"
-
-
1
lines = output.split("\n").map(&:strip).reject(&:empty?)
-
1
lines.reject! do |line|
-
3
line.start_with?("Coverage report generated", "Line Coverage:", "Branch Coverage:")
-
end
-
# Ensure the last non-empty line is 'true'
-
1
last_line = lines.last
-
-
1
assert_equal "true", last_line, "Expected final line to be 'true'"
-
-
1
before = lines[0...-1] || []
-
-
1
refute_empty before, "Expected logs before the final result"
-
-
1
json_logs = before.filter_map { |l|
-
begin
-
2
JSON.parse(l)
-
rescue
-
nil
-
end
-
}
-
3
dotenv_logs = json_logs.select { |h| h["src"] == "dotenv" }
-
-
1
assert_equal 2, dotenv_logs.size, "Expected two dotenv logs"
-
3
assert dotenv_logs.any? { |h| h["evt"] == "load" }, "Expected a load event"
-
2
assert dotenv_logs.any? { |h| h["evt"] == "update" }, "Expected an update event"
-
end
-
-
1
def test_rails_runner_emits_original_dotenv_logs_when_disabled
-
env = {
-
1
"LOGSTRUCT_ENABLED" => "false",
-
"RAILS_ENV" => "development",
-
"RAILS_LOG_TO_STDOUT" => "1"
-
}
-
1
cmd = ["bundle", "exec", "rails", "runner", "puts LogStruct.enabled?"]
-
-
1
stdout_str, stderr_str, status = Open3.capture3(env, *cmd)
-
-
1
assert_predicate status, :success?, "rails runner failed: #{stderr_str}"
-
1
output = stdout_str.to_s
-
-
1
refute_empty output, "Expected some output from rails runner"
-
-
1
lines = output.split("\n").map(&:strip).reject(&:empty?)
-
1
lines.reject! do |line|
-
3
line.start_with?("Coverage report generated", "Line Coverage:", "Branch Coverage:")
-
end
-
1
last_line = lines.last
-
-
1
assert_equal "false", last_line, "Expected final line to be 'false'"
-
-
1
before = lines[0...-1] || []
-
-
1
refute_empty before, "Expected logs before the final result"
-
-
# Expect original dotenv log lines (not JSON)
-
3
dotenv_lines = before.select { |l| l.start_with?("[dotenv]") }
-
-
1
assert_equal 2, dotenv_lines.size, "Expected two original dotenv lines"
-
2
assert dotenv_lines.any? { |l| l.include?("Set ") }, "Expected a 'Set ...' line"
-
3
assert dotenv_lines.any? { |l| l.include?("Loaded ") }, "Expected a 'Loaded ...' line"
-
end
-
end
-
# typed: true
-
-
1
require "test_helper"
-
-
1
class DotenvIntegrationTest < ActiveSupport::TestCase
-
1
def setup
-
1
@io = StringIO.new
-
1
::SemanticLogger.clear_appenders!
-
1
::SemanticLogger.add_appender(io: @io, formatter: LogStruct::SemanticLogger::Formatter.new, async: false)
-
end
-
-
1
def test_emits_structured_dotenv_logs_and_suppresses_unstructured_messages
-
# Simulate a dotenv update event after boot
-
1
diff = Struct.new(:env).new({"BOOT_FLAG" => "1", "REGION" => "ap-southeast-2"})
-
1
ActiveSupport::Notifications.instrument("update.dotenv", diff: diff) {}
-
-
1
::SemanticLogger.flush
-
-
1
@io.rewind
-
1
lines = @io.read.to_s.split("\n").map(&:strip).reject(&:empty?)
-
-
1
refute_empty lines, "Expected logs to be captured during test"
-
-
1
json_logs = lines.filter_map { |l|
-
begin
-
1
JSON.parse(l)
-
rescue
-
nil
-
end
-
}
-
2
dotenv_updates = json_logs.select { |h| h["src"] == "dotenv" && h["evt"] == "update" }
-
-
1
refute_empty dotenv_updates, "Expected a structured dotenv update log"
-
-
# Vars should include at least BOOT_FLAG
-
2
assert dotenv_updates.any? { |h| Array(h["vars"]).include?("BOOT_FLAG") }, "Expected BOOT_FLAG in vars"
-
-
# Ensure no plain unstructured "Set ..." messages slipped through
-
2
no_unstructured = json_logs.none? { |h| h["msg"].is_a?(String) && h["msg"].start_with?("Set ") }
-
-
1
assert no_unstructured, "Found unstructured 'Set ...' message in logs"
-
end
-
end
-
# typed: true
-
# frozen_string_literal: true
-
-
1
require "test_helper"
-
-
1
class HostAuthorizationTest < ActionDispatch::IntegrationTest
-
1
def setup
-
# Capture JSON output via a dedicated SemanticLogger appender
-
3
@io = StringIO.new
-
3
::SemanticLogger.clear_appenders!
-
# Use synchronous appender to avoid timing issues in tests
-
3
::SemanticLogger.add_appender(io: @io, formatter: LogStruct::SemanticLogger::Formatter.new, async: false)
-
end
-
-
1
def test_blocked_host_is_logged_with_logstruct
-
# Make a request with a blocked host
-
1
host! "blocked-host.example.com"
-
1
get "/health"
-
-
# Should return 403 Forbidden
-
1
assert_response :forbidden
-
-
# Ensure all logs are flushed from buffers
-
1
::SemanticLogger.flush
-
-
# Read all logged lines
-
1
@io.rewind
-
1
lines = @io.read.to_s.split("\n").map(&:strip).reject(&:empty?)
-
-
# Parse JSON logs
-
1
parsed_logs = lines.filter_map { |l|
-
begin
-
1
JSON.parse(l)
-
rescue
-
nil
-
end
-
}
-
-
# Find blocked host logs
-
2
blocked_host_logs = parsed_logs.select { |log| log["evt"] == "blocked_host" }
-
-
1
assert_equal 1, blocked_host_logs.size, "Expected exactly one blocked host log entry"
-
-
1
log_entry = blocked_host_logs.first
-
-
# Verify the log entry has the correct structure
-
1
assert_equal "security", log_entry["src"]
-
1
assert_equal "blocked_host", log_entry["evt"]
-
1
assert_equal "blocked-host.example.com", log_entry["blocked_host"]
-
1
assert_equal "/health", log_entry["path"]
-
1
assert_equal "GET", log_entry["method"]
-
end
-
-
1
def test_allowed_host_is_not_blocked
-
# Make a request with an allowed host (.localhost is allowed by default)
-
1
host! "www.localhost"
-
1
get "/health"
-
-
# Should return 200 OK
-
1
assert_response :success
-
-
# Ensure all logs are flushed from buffers
-
1
::SemanticLogger.flush
-
-
# Read all logged lines
-
1
@io.rewind
-
1
lines = @io.read.to_s.split("\n").map(&:strip).reject(&:empty?)
-
-
# Parse JSON logs
-
1
parsed_logs = lines.filter_map { |l|
-
begin
-
JSON.parse(l)
-
rescue
-
nil
-
end
-
}
-
-
# Find blocked host logs
-
1
blocked_host_logs = parsed_logs.select { |log| log["evt"] == "blocked_host" }
-
-
1
assert_equal 0, blocked_host_logs.size, "Should not log blocked host for allowed hosts"
-
end
-
-
1
def test_blocked_host_log_can_be_serialized
-
1
host! "malicious.example.com"
-
1
get "/health"
-
-
1
assert_response :forbidden
-
-
# Ensure all logs are flushed from buffers
-
1
::SemanticLogger.flush
-
-
# Read all logged lines
-
1
@io.rewind
-
1
lines = @io.read.to_s.split("\n").map(&:strip).reject(&:empty?)
-
-
# Parse JSON logs
-
1
parsed_logs = lines.filter_map { |l|
-
begin
-
1
JSON.parse(l)
-
rescue
-
nil
-
end
-
}
-
-
# Find blocked host logs
-
2
blocked_host_logs = parsed_logs.select { |log| log["evt"] == "blocked_host" }
-
-
1
assert_equal 1, blocked_host_logs.size
-
-
1
log_entry = blocked_host_logs.first
-
-
# Verify it's a properly serialized hash
-
1
assert_kind_of Hash, log_entry
-
-
# Verify key fields are in serialized output
-
1
assert_equal "security", log_entry["src"]
-
1
assert_equal "blocked_host", log_entry["evt"]
-
1
assert_equal "malicious.example.com", log_entry["blocked_host"]
-
1
assert_equal "/health", log_entry["path"]
-
1
assert_equal "GET", log_entry["method"]
-
end
-
end
-
# typed: true
-
# frozen_string_literal: true
-
-
1
require "test_helper"
-
-
1
class LoggingIntegrationTest < ActionDispatch::IntegrationTest
-
# Basic test to ensure the Rails app is working
-
1
def test_healthcheck_works
-
1
get "/health"
-
-
1
assert_response :success
-
1
assert_equal "OK", response.body
-
end
-
-
# More detailed test to verify basic logging
-
1
def test_basic_logging_endpoint_works
-
1
get "/logging/basic"
-
-
1
assert_response :success
-
-
1
response_json = JSON.parse(response.body)
-
-
1
assert_equal "ok", response_json["status"]
-
1
assert_equal "Basic logging completed", response_json["message"]
-
end
-
-
# Test error logging
-
1
def test_error_logging_endpoint_works
-
# The error will be raised and we should see it
-
1
error_raised = false
-
-
begin
-
1
get "/logging/error"
-
rescue RuntimeError => e
-
1
error_raised = true
-
-
1
assert_equal "Test error for integration testing", e.message
-
end
-
-
1
assert error_raised, "Expected an error to be raised"
-
end
-
-
# Test custom log structures
-
1
def test_custom_log_class_work
-
1
get "/logging/custom"
-
-
1
assert_response :success
-
-
1
response_json = JSON.parse(response.body)
-
-
1
assert_equal "ok", response_json["status"]
-
1
assert_equal "Custom logging completed", response_json["message"]
-
end
-
-
# Test request logging
-
1
def test_request_logging_works
-
1
get "/logging/request"
-
-
1
assert_response :success
-
-
1
response_json = JSON.parse(response.body)
-
-
1
assert_equal "ok", response_json["status"]
-
1
assert_equal "Request logging completed", response_json["message"]
-
end
-
-
# Test that error handling is stack-safe
-
1
def test_error_logging
-
# This test intentionally creates a situation that would cause
-
# an infinite loop if error handling is not implemented correctly
-
1
get "/logging/error_logging"
-
-
1
assert_response :success
-
-
1
response_json = JSON.parse(response.body)
-
-
1
assert_equal "ok", response_json["status"]
-
1
assert_equal "Stack-safe error handling test completed", response_json["message"]
-
end
-
end
-
# typed: true
-
-
1
require "test_helper"
-
-
1
class LogrageFormatterIntegrationTest < ActionDispatch::IntegrationTest
-
1
def setup
-
# Capture JSON output via a dedicated SemanticLogger appender
-
1
@io = StringIO.new
-
1
::SemanticLogger.clear_appenders!
-
# Use synchronous appender to avoid timing issues in tests
-
1
::SemanticLogger.add_appender(io: @io, formatter: LogStruct::SemanticLogger::Formatter.new, async: false)
-
end
-
-
1
def test_request_through_stack_emits_json_request_log
-
1
get "/logging/basic", params: {format: :json}
-
-
1
assert_response :success
-
-
# Ensure all logs are flushed from buffers
-
1
::SemanticLogger.flush
-
-
# Read all logged lines
-
1
@io.rewind
-
1
lines = @io.read.to_s.split("\n").map(&:strip).reject(&:empty?)
-
-
1
refute_empty lines, "Expected some JSON logs to be emitted"
-
-
# Find the request log entry
-
1
request_log = lines.filter_map { |l|
-
begin
-
6
JSON.parse(l)
-
rescue
-
nil
-
end
-
6
}.find { |h| h["evt"] == "request" }
-
-
1
refute_nil request_log, "Expected a request log entry"
-
-
# Validate normalized types
-
1
assert_equal "GET", request_log["method"]
-
1
assert_equal "json", request_log["format"]
-
1
assert_kind_of Hash, request_log["params"]
-
end
-
end
-
# typed: true
-
-
1
require "test_helper"
-
1
require "open3"
-
1
require "timeout"
-
-
1
class PumaIntegrationTest < ActiveSupport::TestCase
-
# Test that running `puma` directly (without `rails server`) auto-enables LogStruct
-
# via Puma::Server detection - no LOGSTRUCT_ENABLED env var needed
-
1
def test_puma_direct_auto_enables_logstruct
-
1
port = 32124
-
env = {
-
1
"RAILS_ENV" => "production",
-
"RAILS_LOG_TO_STDOUT" => "1",
-
"SECRET_KEY_BASE" => "test_secret_key_base_for_production_mode_1234567890"
-
}
-
-
# Run puma directly, NOT rails server
-
1
cmd = ["bundle", "exec", "puma", "-p", port.to_s, "-e", "production"]
-
-
1
Open3.popen3(env, *cmd) do |_stdin, stdout, stderr, wait_thr|
-
begin
-
1
lines = []
-
1
Timeout.timeout(15) do
-
11
while (line = stdout.gets)
-
9
lines << line.strip
-
# Puma outputs "Listening on" when ready
-
9
break if line.include?("Listening on")
-
end
-
end
-
-
# Send TERM to trigger graceful shutdown
-
begin
-
Process.kill("TERM", wait_thr.pid)
-
rescue Errno::ESRCH
-
# Process already exited
-
end
-
-
# Collect shutdown output
-
Timeout.timeout(10) do
-
while (line = stdout.gets)
-
lines << line.strip
-
end
-
end
-
rescue Timeout::Error
-
# Fall through and ensure process is terminated
-
ensure
-
begin
-
1
Process.kill("TERM", wait_thr.pid)
-
rescue Errno::ESRCH
-
# already dead
-
end
-
end
-
-
1
output = lines.join("\n")
-
1
stderr_output = stderr.read
-
-
# Find JSON log lines - LogStruct should be enabled via Puma::Server detection
-
1
json_lines = lines.filter_map do |l|
-
9
JSON.parse(l) if l.strip.start_with?("{")
-
rescue JSON::ParserError
-
nil
-
end
-
-
1
assert_predicate json_lines,
-
:any?,
-
"Expected JSON logs from direct puma invocation (Puma::Server detection should enable LogStruct).\n" \
-
"STDOUT: #{output}\nSTDERR: #{stderr_output}"
-
-
# Verify we got puma lifecycle logs
-
2
puma_logs = json_lines.select { |h| h["src"] == "puma" }
-
-
1
assert_predicate puma_logs,
-
:any?,
-
"Expected puma lifecycle logs. JSON logs: #{json_lines.inspect}"
-
end
-
end
-
-
1
def test_rails_server_emits_structured_puma_logs_and_on_exit
-
1
port = 32123
-
env = {
-
1
"LOGSTRUCT_ENABLED" => "true",
-
"RAILS_ENV" => "test",
-
"RAILS_LOG_TO_STDOUT" => "1"
-
}
-
-
1
cmd = ["bundle", "exec", "rails", "server", "-p", port.to_s]
-
-
1
Open3.popen3(env, *cmd) do |_stdin, stdout, stderr, wait_thr| # cspell:disable-line
-
begin
-
1
lines = []
-
1
Timeout.timeout(10) do
-
8
while (line = stdout.gets)
-
7
lines << line.strip
-
7
break if line.include?("Use Ctrl-C to stop")
-
end
-
end
-
-
# Send TERM to trigger graceful shutdown
-
begin
-
1
Process.kill("TERM", wait_thr.pid)
-
rescue Errno::ESRCH
-
# Process already exited
-
end
-
-
# Collect shutdown output
-
1
Timeout.timeout(10) do
-
4
while (line = stdout.gets)
-
2
lines << line.strip
-
end
-
end
-
rescue Timeout::Error
-
# Fall through and ensure process is terminated
-
ensure
-
begin
-
1
Process.kill("TERM", wait_thr.pid)
-
rescue Errno::ESRCH
-
# already dead
-
end
-
end
-
-
1
output = lines.join("\n")
-
1
lines.filter_map { |l|
-
begin
-
9
JSON.parse(l)
-
rescue
-
5
nil
-
end
-
}
-
# Consider only logs after the first JSON line
-
1
first_json_index = lines.find_index { |l|
-
4
l.strip.start_with?("{") && begin
-
1
JSON.parse(l)
-
rescue
-
nil
-
end
-
}
-
-
1
assert first_json_index, "Did not find any JSON log lines. Output: #{output}\nSTDERR: #{stderr.read}"
-
1
after_lines = lines[first_json_index..]
-
1
after_json = after_lines.filter_map do |l|
-
6
JSON.parse(l)
-
rescue JSON::ParserError
-
2
nil
-
end
-
5
puma_logs = after_json.select { |h| h["src"] == "puma" }
-
-
# Expect exactly 2 structured logs: start, shutdown
-
1
assert_equal 2, puma_logs.length, "Expected exactly 2 Puma logs. Output: #{output}\nSTDERR: #{stderr.read}"
-
-
3
events = puma_logs.map { |h| h["evt"] }
-
-
1
assert_equal ["start", "shutdown"], events, "Expected Puma events in order: start, shutdown"
-
-
1
start = puma_logs[0]
-
-
1
assert_equal "puma", start["src"]
-
1
assert_equal "info", start["lvl"]
-
1
assert_equal "single", start["mode"]
-
1
assert_equal "test", start["environment"]
-
1
assert_kind_of Integer, start["pid"]
-
1
assert_kind_of Array, start["listening_addresses"]
-
2
assert start["listening_addresses"].any? { |a| a.include?(":#{port}") }, "Expected listening address to include :#{port}"
-
-
1
shutdown = puma_logs[1]
-
-
1
assert_equal "puma", shutdown["src"]
-
1
assert_equal "info", shutdown["lvl"]
-
1
assert_kind_of Integer, shutdown["pid"]
-
end
-
end
-
end
-
# typed: true
-
# frozen_string_literal: true
-
-
1
require "test_helper"
-
1
require "open3"
-
1
require "timeout"
-
-
# Test that rake tasks in production have working logs when LogStruct auto-disables.
-
1
class RakeTaskLoggingTest < ActiveSupport::TestCase
-
1
def test_rake_task_in_production_has_clean_logs
-
# Simulate production rake task:
-
# - RAILS_ENV=production (LogStruct would enable for servers)
-
# - NOT a server process (rake task)
-
# - LogStruct should auto-disable and fall back to clean Rails logging
-
1
env = {
-
"RAILS_ENV" => "production"
-
# Don't set LOGSTRUCT_ENABLED - let auto-detection handle it
-
# Rake tasks should auto-disable because no server is detected
-
}
-
-
# Run a rake task that logs something
-
1
cmd = ["bundle", "exec", "rake", "logging:test_output"]
-
-
1
stdout_output = nil
-
1
stderr_output = nil
-
-
1
Open3.popen3(env, *cmd, chdir: Rails.root.to_s) do |_stdin, stdout, stderr, wait_thr|
-
begin
-
1
Timeout.timeout(30) do
-
1
wait_thr.value
-
end
-
rescue Timeout::Error
-
begin
-
Process.kill("TERM", wait_thr.pid)
-
rescue
-
nil
-
end
-
-
flunk "Rake task timed out"
-
end
-
-
1
stdout_output = stdout.read
-
1
stderr_output = stderr.read
-
end
-
-
1
combined_output = "#{stdout_output}\n#{stderr_output}"
-
-
# Should NOT have hybrid format like: {message: "...", tags: [...]}
-
# This pattern indicates the TaggedLogging monkey patch is wrapping
-
# messages in hashes but they're not going through JSON formatter
-
1
hybrid_pattern = /\{message:\s*["'].*["'],\s*tags:/
-
-
1
refute_match hybrid_pattern,
-
combined_output,
-
"Found hybrid log format - LogStruct is half-enabled!\n" \
-
"This means TaggedLogging monkey patch is active but SemanticLogger is not.\n" \
-
"Output:\n#{combined_output}"
-
-
# Should have the exact log messages (not silently dropped)
-
1
assert_includes combined_output,
-
"Test log message from rake task",
-
"Expected to see 'Test log message from rake task' but logs appear to be dropped.\n" \
-
"Output:\n#{combined_output}"
-
-
# Should have the [custom_tag] prefix in clean Rails format
-
1
assert_match(/\[custom_tag\].*Tagged test log message/,
-
combined_output,
-
"Expected to see '[custom_tag] Tagged test log message' in clean Rails format.\n" \
-
"Output:\n#{combined_output}")
-
end
-
end
-
# typed: true
-
-
1
require "test_helper"
-
1
require "open3"
-
1
require "timeout"
-
1
require "fileutils"
-
-
1
class TestLoggingIntegrationTest < ActiveSupport::TestCase
-
1
def test_test_logs_go_to_file_not_stdout
-
# Clean up log file before test
-
1
log_file = Rails.root.join("log/test.log")
-
1
FileUtils.rm_f(log_file)
-
1
FileUtils.touch(log_file)
-
-
env = {
-
1
"LOGSTRUCT_ENABLED" => "true",
-
"RAILS_ENV" => "test"
-
}
-
-
# Run a simple test that will generate logs
-
1
cmd = ["bundle", "exec", "rails", "test", "test/models/user_test.rb"]
-
-
1
Open3.popen3(env, *cmd, chdir: Rails.root.to_s) do |_stdin, stdout, stderr, wait_thr|
-
begin
-
1
Timeout.timeout(30) do
-
1
wait_thr.value # Wait for process to complete
-
end
-
rescue Timeout::Error
-
begin
-
Process.kill("TERM", wait_thr.pid)
-
rescue
-
nil
-
end
-
-
flunk "Test process timed out"
-
end
-
-
1
stdout_output = stdout.read
-
1
stderr.read
-
-
# Check that stdout doesn't contain JSON logs
-
1
json_lines_in_stdout = stdout_output.lines.select { |line|
-
7
line.strip.start_with?("{") && begin
-
JSON.parse(line)
-
rescue
-
nil
-
end
-
}
-
-
1
assert_equal 0,
-
json_lines_in_stdout.length,
-
"Expected no JSON logs in stdout, but found #{json_lines_in_stdout.length} lines. First few:\n#{json_lines_in_stdout.first(3).join}"
-
-
# Check that log/test.log contains JSON logs
-
1
assert_path_exists log_file, "Expected log/test.log to exist"
-
1
log_contents = File.read(log_file)
-
-
1
json_lines_in_file = log_contents.lines.select { |line|
-
11
line.strip.start_with?("{") && begin
-
11
JSON.parse(line)
-
rescue
-
nil
-
end
-
}
-
-
1
assert_operator json_lines_in_file.length, :>, 0, "Expected JSON logs in log/test.log, but found none. File size: #{log_contents.bytesize} bytes"
-
-
# Verify at least one structured log exists
-
12
parsed_logs = json_lines_in_file.map { |line| JSON.parse(line) }
-
-
2
assert parsed_logs.any? { |log| log["src"] && log["evt"] && log["lvl"] },
-
"Expected at least one properly structured log in log/test.log"
-
end
-
ensure
-
# Clean up
-
1
FileUtils.rm_f(log_file) if log_file
-
end
-
end
-
# typed: true
-
-
1
require "test_helper"
-
-
1
class UserTest < ActiveSupport::TestCase
-
1
test "simple test that generates logs" do # rubocop:disable Minitest/NoAssertions
-
# This test just needs to run and generate some logs
-
1
Rails.logger.info("Test log message")
-
end
-
end
-
# typed: true
-
-
1
require "simplecov" unless defined?(SimpleCov)
-
1
require "simplecov-json"
-
1
require "sorbet-runtime"
-
1
require "debug"
-
-
1
unless SimpleCov.running
-
SimpleCov.formatters = [
-
SimpleCov::Formatter::HTMLFormatter,
-
SimpleCov::Formatter::JSONFormatter
-
]
-
-
SimpleCov.start do
-
T.bind(self, T.all(SimpleCov::Configuration, Kernel))
-
-
gem_path = File.expand_path("../../../../", __FILE__)
-
SimpleCov.root(gem_path)
-
-
add_filter "rails_test_app"
-
-
coverage_dir "coverage_rails"
-
-
enable_coverage :branch
-
primary_coverage :branch
-
end
-
-
SimpleCov.at_exit do
-
SimpleCov.result
-
end
-
end
-
-
# Require logstruct after starting SimpleCov
-
1
require "logstruct"
-
-
1
ENV["RAILS_ENV"] ||= "test"
-
1
require_relative "../config/environment"
-
1
require "rails/test_help"
-
1
require "minitest/reporters"
-
-
# Configure colorful test output
-
1
Minitest::Reporters.use! Minitest::Reporters::SpecReporter.new
-
-
# Configure the test database
-
1
class ActiveSupport::TestCase
-
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
-
# fixtures :all
-
-
# Add more helper methods to be used by all tests here...
-
-
# Helper method to run jobs synchronously
-
1
def perform_enqueued_jobs
-
jobs = ActiveJob::Base.queue_adapter.enqueued_jobs
-
jobs.each do |job|
-
ActiveJob::Base.execute job
-
end
-
end
-
end
-
-
# Ensure LogStruct is enabled and emits JSON in tests across Rails versions
-
begin
-
1
LogStruct.configure do |config|
-
1
config.enabled = true
-
# Prefer production-style JSON in development/test
-
1
config.prefer_json_in_development = true
-
end
-
rescue NameError
-
# LogStruct not loaded; ignore
-
end