Skip to content

Commit

Permalink
Fuck it we shipping boys
Browse files Browse the repository at this point in the history
  • Loading branch information
austinpray committed Nov 29, 2017
1 parent 735431d commit 3227261
Show file tree
Hide file tree
Showing 23 changed files with 581 additions and 0 deletions.
1 change: 1 addition & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
SLACK_API_TOKEN=xoxb-BOGUS
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
# project specific

/.env

# python stuff below

# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
Expand Down
15 changes: 15 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
FROM python:3.6-stretch

ENV workdir /kizuna

WORKDIR ${workdir}

RUN apt-get update \
&& apt-get install -y graphviz

COPY requirements.txt ${workdir}/requirements.txt
RUN pip install -r requirements.txt

COPY . ${workdir}

CMD ["python", "-u", "./bot.py"]
72 changes: 72 additions & 0 deletions alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
# A generic, single database configuration.

[alembic]
# path to migration scripts
script_location = db

# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s

# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =

# max length of characters to apply to the
# "slug" field
#truncate_slug_length = 40

# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false

# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false

# version location specification; this defaults
# to db/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat db/versions

# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8


# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
54 changes: 54 additions & 0 deletions bot.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import os
import signal
import sys
import time

from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from slackclient import SlackClient

from kizuna.Kizuna import Kizuna
from kizuna.PingCommand import PingCommand
from kizuna.AtGraphCommand import AtGraphCommand
from kizuna.AtGraphDataCollector import AtGraphDataCollector
from kizuna.strings import HAI_DOMO


def signal_handler(signal, frame):
print("\nprogram exiting gracefully")
sys.exit(0)


READ_WEBSOCKET_DELAY = 0.01

if __name__ == "__main__":
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
sc = SlackClient(os.environ.get('SLACK_API_TOKEN'))
db_engine = create_engine(os.environ.get('DATABASE_URL'))
Session = sessionmaker(bind=db_engine)
if sc.rtm_connect():
auth = sc.api_call('auth.test')
bot_id = auth['user_id']

k = Kizuna(bot_id, sc)

pc = PingCommand()
k.register_command(pc)

at_graph_command = AtGraphCommand(Session)
k.register_command(at_graph_command)

at_graph_data_collector = AtGraphDataCollector(Session, sc)
k.register_command(at_graph_data_collector)

print("{} BOT_ID {}".format(HAI_DOMO, bot_id))
while True:
read = sc.rtm_read()
if read:
for output in read:
if output['type'] == 'message':
k.handle_message(output)
time.sleep(READ_WEBSOCKET_DELAY)
else:
print("Can't connect to slack.")
1 change: 1 addition & 0 deletions db/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Generic single-database configuration.
68 changes: 68 additions & 0 deletions db/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
from __future__ import with_statement
from alembic import context
from sqlalchemy import create_engine
from logging.config import fileConfig
import os

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = os.environ.get('DATABASE_URL')
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True)

with context.begin_transaction():
context.run_migrations()


def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = create_engine(os.environ.get('DATABASE_URL'))

with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)

with context.begin_transaction():
context.run_migrations()

if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
24 changes: 24 additions & 0 deletions db/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}


def upgrade():
${upgrades if upgrades else "pass"}


def downgrade():
${downgrades if downgrades else "pass"}
30 changes: 30 additions & 0 deletions db/versions/276b2fbf75a6_creates_at_graph_edges_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
"""creates at_graph_edges table
Revision ID: 276b2fbf75a6
Revises: 380c385b82a6
Create Date: 2017-11-29 06:52:16.116826
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '276b2fbf75a6'
down_revision = '380c385b82a6'
branch_labels = None
depends_on = None


def upgrade():
op.create_table(
'at_graph_edges',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('head_user_id', sa.Integer, sa.ForeignKey('users.id')),
sa.Column('tail_user_id', sa.Integer, sa.ForeignKey('users.id')),
sa.Column('weight', sa.Integer, nullable=False)
)


def downgrade():
op.drop_table('at_graph_edges')
29 changes: 29 additions & 0 deletions db/versions/380c385b82a6_create_user_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
"""create user table
Revision ID: 380c385b82a6
Revises:
Create Date: 2017-11-29 06:35:00.723089
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '380c385b82a6'
down_revision = None
branch_labels = None
depends_on = None


def upgrade():
op.create_table(
'users',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.String, nullable=False),
sa.Column('slack_id', sa.String, index=True, unique=True),
)


def downgrade():
op.drop_table('users')
19 changes: 19 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
version: '3'
services:
db:
image: postgres
ports:
- "4444:5432"
volumes:
- "/var/lib/postgresql/data"
environment:
POSTGRES_DB: kizuna
POSTGRES_PASSWORD: kizuna
POSTGRES_USER: kizuna
bot:
build: .
env_file: .env
volumes:
- .:/kizuna
depends_on:
- db
5 changes: 5 additions & 0 deletions kizuna/AtGraph.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
import re

def extract_ats(text):
return set(re.findall(r"<@(.*?)>", text, re.DOTALL))

35 changes: 35 additions & 0 deletions kizuna/AtGraphCommand.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
from kizuna.AtGraphEdge import AtGraphEdge
from kizuna.User import User
from kizuna.Command import Command
import pygraphviz as pgv


class AtGraphCommand(Command):
def __init__(self, db_session) -> None:
help_text = "{bot} at graph - show the graph of @'s\n"
self.db_session = db_session

super().__init__('at-graph', "(?:show )?at graph(?: show)?$", help_text, True)

def respond(self, slack_client, message, matches):
session = self.db_session()

edges = session.query(AtGraphEdge).all()

if not edges or len(edges) < 1:
return

G = pgv.AGraph(directed=True)
for edge in edges:
G.add_edge(edge.head_user.name, edge.tail_user.name, label=edge.weight, weight=edge.weight)

image_path = '/tmp/graph.png'
G.layout(prog='dot')
G.draw(image_path)
slack_client.api_call('files.upload',
as_user=True,
channels=message['channel'],
filename='graph.png',
file=open(image_path, 'rb'))

return None
Loading

0 comments on commit 3227261

Please sign in to comment.