+ # Must have at least one filter set up to receive events
+ if ws.filters.length > 0
+ # Start with log rows readable by user, sorted in ascending order
+ logs = Log.readable_by(ws.user).order("id asc")
+
+ cond_id = nil
+ cond_out = []
+ param_out = []
+
+ if !ws.last_log_id.nil?
+ # Client is only interested in log rows that are newer than the
+ # last log row seen by the client.
+ cond_id = "logs.id > ?"
+ param_out << ws.last_log_id
+ elsif !notify_id.nil?
+ # No last log id, so look at rows starting with notify id
+ cond_id = "logs.id >= ?"
+ param_out << notify_id
+ else
+ # No log id to start from, nothing to do, return
+ return
+ end
+
+ # Now build filters provided by client
+ ws.filters.each do |filter|
+ ft = record_filters filter.filters, Log
+ if ft[:cond_out].any?
+ # Join the clauses within a single subscription filter with AND
+ # so it is consistent with regular queries
+ cond_out << "(#{ft[:cond_out].join ') AND ('})"
+ param_out += ft[:param_out]
+ end
+ end
+
+ # Add filters to query
+ if cond_out.any?
+ # Join subscriptions with OR
+ logs = logs.where(cond_id + " AND ((#{cond_out.join ') OR ('}))", *param_out)
+ else
+ logs = logs.where(cond_id, *param_out)
+ end
+
+ # Execute query and actually send the matching log rows
+ count = 0
+ limit = 100
+
+ logs.limit(limit).each do |l|
+ ws.send(l.as_api_response.to_json)
+ ws.last_log_id = l.id
+ count += 1
+ end
+
+ if count == limit
+ # Number of rows returned was capped by limit(), we need to schedule
+ # another query to get more logs (will start from last_log_id
+ # reported by current query)
+ EventMachine::schedule do
+ push_events ws, nil
+ end
+ elsif !notify_id.nil? and (ws.last_log_id.nil? or notify_id > ws.last_log_id)
+ # Number of rows returned was less than cap, but the notify id is
+ # higher than the last id visible to the client, so update last_log_id
+ ws.last_log_id = notify_id
+ end
+ elsif !notify_id.nil?
+ # No filters set up, so just record the sequence number
+ ws.last_log_id = notify_id