making a sidekiq helper and started on a method to read data from raw json file to ship to es
This commit is contained in:
parent
9b0e11bcd9
commit
f8d790e6df
5 changed files with 74 additions and 1 deletions
52
config/sidekiq.service
Normal file
52
config/sidekiq.service
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
#
|
||||||
|
# systemd unit file for CentOS 7, Ubuntu 15.04
|
||||||
|
#
|
||||||
|
# Customize this file based on your bundler location, app directory, etc.
|
||||||
|
# Put this in /usr/lib/systemd/system (CentOS) or /lib/systemd/system (Ubuntu).
|
||||||
|
# Run:
|
||||||
|
# - systemctl enable sidekiq
|
||||||
|
# - systemctl {start,stop,restart} sidekiq
|
||||||
|
#
|
||||||
|
# This file corresponds to a single Sidekiq process. Add multiple copies
|
||||||
|
# to run multiple processes (sidekiq-1, sidekiq-2, etc).
|
||||||
|
#
|
||||||
|
# See Inspeqtor's Systemd wiki page for more detail about Systemd:
|
||||||
|
# https://github.com/mperham/inspeqtor/wiki/Systemd
|
||||||
|
#
|
||||||
|
[Unit]
|
||||||
|
Description=sidekiq
|
||||||
|
# start us only once the network and logging subsystems are available,
|
||||||
|
# consider adding redis-server.service if Redis is local and systemd-managed.
|
||||||
|
After=syslog.target network.target
|
||||||
|
|
||||||
|
# See these pages for lots of options:
|
||||||
|
# http://0pointer.de/public/systemd-man/systemd.service.html
|
||||||
|
# http://0pointer.de/public/systemd-man/systemd.exec.html
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
WorkingDirectory=/opt/myapp/current
|
||||||
|
# If you use rbenv:
|
||||||
|
# ExecStart=/bin/bash -lc '/home/deploy/.rbenv/shims/bundle exec sidekiq -e production'
|
||||||
|
# If you use the system's ruby:
|
||||||
|
ExecStart=/usr/local/bin/bundle exec sidekiq -e production
|
||||||
|
User=deploy
|
||||||
|
Group=deploy
|
||||||
|
UMask=0002
|
||||||
|
|
||||||
|
# Greatly reduce Ruby memory fragmentation and heap usage
|
||||||
|
# https://www.mikeperham.com/2018/04/25/taming-rails-memory-bloat/
|
||||||
|
Environment=MALLOC_ARENA_MAX=2
|
||||||
|
|
||||||
|
# if we crash, restart
|
||||||
|
RestartSec=1
|
||||||
|
Restart=on-failure
|
||||||
|
|
||||||
|
# output goes to /var/log/syslog
|
||||||
|
StandardOutput=syslog
|
||||||
|
StandardError=syslog
|
||||||
|
|
||||||
|
# This will default to "bundler" if we don't specify it
|
||||||
|
SyslogIdentifier=sidekiq
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
|
@ -44,5 +44,4 @@ class ElasticSearchHelper
|
||||||
self.json_to_es(paste_json)
|
self.json_to_es(paste_json)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
12
helpers/sidekiq_helper.rb
Normal file
12
helpers/sidekiq_helper.rb
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
require 'sidekiq'
|
||||||
|
require 'sidekiq/api'
|
||||||
|
|
||||||
|
# load up the redis cfg
|
||||||
|
require '../config/initializers/sidekiq'
|
||||||
|
|
||||||
|
class SidekiqHelper
|
||||||
|
|
||||||
|
def initialize
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
|
@ -169,6 +169,11 @@ class Pastebinner
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def json_paste_from_file(raw_paste_json_file)
|
||||||
|
raw_paste_json = File.read(raw_paste_json_file)
|
||||||
|
self.hash_paste(raw_paste_json).to_json
|
||||||
|
end
|
||||||
|
|
||||||
# keep this method private so we are not letting anyone run any method in our program
|
# keep this method private so we are not letting anyone run any method in our program
|
||||||
private
|
private
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,14 @@
|
||||||
class PasteToEs
|
class PasteToEs
|
||||||
include Sidekiq::Worker
|
include Sidekiq::Worker
|
||||||
|
|
||||||
|
sidekiq_options retry: false # i dont want to get rate limited so im just letting this fail if their are any failures
|
||||||
def perform(es_object, pb_object, paste_max)
|
def perform(es_object, pb_object, paste_max)
|
||||||
Logger.new(STDOUT).info("PasteToEs started")
|
Logger.new(STDOUT).info("PasteToEs started")
|
||||||
|
# get public pastes and their keys
|
||||||
pastes = pb_object.scrape_public_pastes(paste_max)
|
pastes = pb_object.scrape_public_pastes(paste_max)
|
||||||
keys = pb_object.get_unique_paste_keys(pastes)
|
keys = pb_object.get_unique_paste_keys(pastes)
|
||||||
|
|
||||||
|
# build it into json and send it to elasticsearch
|
||||||
json_data = pb_object.json_paste(keys)
|
json_data = pb_object.json_paste(keys)
|
||||||
es_object.json_to_es_bulk(json_data)
|
es_object.json_to_es_bulk(json_data)
|
||||||
end
|
end
|
||||||
|
|
Loading…
Add table
Reference in a new issue