18 # Defaults, may be overwritten by command line arguments.
19 SERVER = "irc.freenode.net"
20 CHANNEL = "#plomlombot-test"
23 USERNAME = "plomlombot"
26 DBDIR = os.path.expanduser("~/plomlombot_db")
29 def write_to_file(path, mode, text):
35 class ExceptionForRestart(Exception):
41 def __init__(self, line):
43 self.tokens = line.split(" ")
45 if self.tokens[0][0] == ":":
46 for rune in self.tokens[0][1:]:
47 if rune in {"!", "@"}:
51 if len(self.tokens) > 2:
52 for rune in self.tokens[2]:
53 if rune in {"!", "@"}:
61 def __init__(self, chandir, nickname, username, channel, rmlogs):
62 self.nickname = nickname
63 self.username = username
64 self.channel = channel
65 self.chandir = chandir
66 self.rmlogcycle = rmlogs
67 self.rawlogdir = chandir + "raw_logs/"
68 self.logdir = chandir + "logs/"
69 if not os.path.exists(self.logdir):
70 os.makedirs(self.logdir)
71 if not os.path.exists(self.rawlogdir):
72 os.makedirs(self.rawlogdir)
74 def log(self, line, sent=False):
79 line = Line("< " + line)
80 line.sender = self.nickname
81 identity = self.username + "@localhost"
85 now = datetime.datetime.utcnow()
86 form = "%Y-%m-%d %H:%M:%S UTC"
87 write_to_file(self.rawlogdir + now.strftime("%Y-%m-%d") + ".txt", "a",
88 now.strftime(form) + separator + line.line + "\n")
89 to_log = irclog.format_logline(line, self.channel, identity)
91 write_to_file(self.logdir + now.strftime("%Y-%m-%d") + ".txt", "a",
92 now.strftime(form) + " " + to_log + "\n")
95 if self.rmlogcycle > 0:
96 for f in os.listdir(self.logdir):
97 f = os.path.join(self.logdir, f)
98 if os.path.isfile(f) and \
99 os.stat(f).st_mtime < time.time() - self.rmlogcycle:
102 def separator_line(self):
103 now = datetime.datetime.utcnow()
104 write_to_file(self.logdir + now.strftime("%Y-%m-%d") + ".txt", "a",
105 "-----------------------\n")
110 def __init__(self, server, port, timeout):
112 self.timeout = timeout
113 self.socket = socket.socket()
115 self.socket.connect((server, port))
117 raise ExceptionForRestart
118 self.socket.setblocking(0)
119 self.line_buffer = []
120 self.rune_buffer = ""
121 self.last_pong = time.time()
122 line = self.recv_line(send_ping=False)
123 if not line or len(line) < 1:
124 raise ExceptionForRestart
125 self.servername = line.split(" ")[0][1:]
127 def _pingtest(self, send_ping=True):
128 if self.last_pong + self.timeout < time.time():
129 print("SERVER NOT ANSWERING")
130 raise ExceptionForRestart
132 self.send_line("PING " + self.servername)
134 def send_line(self, msg):
135 msg = msg.replace("\r", " ")
136 msg = msg.replace("\n", " ")
137 if len(msg.encode("utf-8")) > 510:
138 print("NOT SENT LINE TO SERVER (too long): " + msg)
139 print("LINE TO SERVER: "
140 + str(datetime.datetime.now()) + ": " + msg)
142 self.log.log(msg, True)
146 while total_sent_len < msg_len:
147 sent_len = self.socket.send(bytes(msg[total_sent_len:], "UTF-8"))
149 print("SOCKET CONNECTION BROKEN")
150 raise ExceptionForRestart
151 total_sent_len += sent_len
153 def _recv_line_wrapped(self, send_ping=True):
154 if len(self.line_buffer) > 0:
155 return self.line_buffer.pop(0)
157 ready = select.select([self.socket], [], [], int(self.timeout / 2))
159 self._pingtest(send_ping)
161 self.last_pong = time.time()
162 received_bytes = self.socket.recv(1024)
164 received_runes = received_bytes.decode("UTF-8")
165 except UnicodeDecodeError:
166 received_runes = received_bytes.decode("latin1")
167 if len(received_runes) == 0:
168 print("SOCKET CONNECTION BROKEN")
169 raise ExceptionForRestart
170 self.rune_buffer += received_runes
171 lines_split = str.split(self.rune_buffer, "\r\n")
172 self.line_buffer += lines_split[:-1]
173 self.rune_buffer = lines_split[-1]
174 if len(self.line_buffer) > 0:
175 return self.line_buffer.pop(0)
177 def recv_line(self, send_ping=True):
178 line = self._recv_line_wrapped(send_ping)
182 print("LINE FROM SERVER " + str(datetime.datetime.now()) + ": " +
187 def handle_command(command, argument, notice, target, session):
190 if not os.access(session.quotesfile, os.F_OK):
191 write_to_file(session.quotesfile, "w",
192 "QUOTES FOR " + target + ":\n")
193 write_to_file(session.quotesfile, "a", argument + "\n")
194 quotesfile = open(session.quotesfile, "r")
195 lines = quotesfile.readlines()
197 notice("added quote #" + str(len(lines) - 1))
202 notice("syntax: !quote [int] OR !quote search QUERY "
203 "OR !quote offset-search [int] QUERY")
204 notice("QUERY may be a boolean grouping of quoted or unquoted " +
205 "search terms, examples:")
206 notice("!quote search foo")
207 notice("!quote search foo AND (bar OR NOT baz)")
208 notice("!quote search \"foo\\\"bar\" AND ('NOT\"' AND \"'foo'\"" +
210 notice("The offset-search int argument defines how many matches "
211 "to skip (useful if results are above maximum number to "
217 tokens = argument.split(" ")
218 if (len(tokens) == 1 and not tokens[0].isdigit()) or \
220 (tokens[0] not in {"search", "offset-search"} or
221 (tokens[0] == "offset-search" and
222 ((not len(tokens) > 2) or (not tokens[1].isdigit()))))):
225 if not os.access(session.quotesfile, os.F_OK):
226 notice("no quotes available")
228 quotesfile = open(session.quotesfile, "r")
229 lines = quotesfile.readlines()
234 if i == 0 or i > len(lines):
235 notice("there's no quote of that index")
238 elif len(tokens) > 1:
240 if tokens[0] == "search":
241 query = str.join(" ", tokens[1:])
242 elif tokens[0] == "offset-search":
243 to_skip = int(tokens[1])
244 query = str.join(" ", tokens[2:])
246 results = plomsearch.search(query, lines)
247 except plomsearch.LogicParserError as err:
248 notice("failed query parsing: " + str(err))
250 if len(results) == 0:
251 notice("no quotes matching query")
253 if to_skip >= len(results):
254 notice("skipped all quotes matching query")
256 notice("found %s matches, showing max. 3, skipping %s"
257 % (len(results), to_skip))
258 for i in range(len(results)):
259 if i >= to_skip and i < to_skip + 3:
261 notice("quote #" + str(result[0] + 1) + ": "
265 i = random.randrange(len(lines))
266 notice("quote #" + str(i + 1) + ": " + lines[i][:-1])
271 notice("syntax: !markov [integer from 1 to infinite]")
274 usable_selections = []
275 for i in range(select_length, 0, -1):
276 for selection in selections:
280 if snippet[-j] != selection[-(j+1)]:
284 usable_selections += [selection]
285 if [] != usable_selections:
287 if [] == usable_selections:
288 usable_selections = selections
289 selection = choice(usable_selections)
290 return selection[select_length]
295 tokens = argument.split(" ")
296 if (len(tokens) > 1 or (len(tokens) == 1 and not tokens[0].isdigit())):
300 from random import choice, shuffle
307 notice("bad value, using default: " + str(select_length))
310 if not os.access(session.markovfile, os.F_OK):
311 notice("not enough text to markov for selection length")
314 # Lowercase incoming lines, ensure they end in a sentence end mark.
315 file = open(session.markovfile, "r")
316 lines = file.readlines()
319 sentence_end_markers = ".!?)("
321 line = line.lower().replace("\n", "")
322 if line[-1] not in sentence_end_markers:
324 tokens += line.split()
325 if len(tokens) - 1 <= select_length:
326 notice("not enough text to markov")
329 # Replace URLs with escape string for now, so that the Markov selector
330 # won't see them as different strings. Stash replaced URLs in urls.
333 url_starts = ["http://", "https://", "<http://", "<https://"]
334 for i in range(len(tokens)):
335 for url_start in url_starts:
336 if tokens[i][:len(url_start)] == url_start:
337 length = len(tokens[i])
338 if url_start[0] == "<":
340 length = tokens[i].index(">") + 1
343 urls += [tokens[i][:length]]
344 tokens[i] = url_escape + tokens[i][length:]
347 # For each snippet of select_length, use markov() to find continuation
348 # token from selections. Replace present users' names with malkovich.
349 # Start snippets with the beginning of a sentence, if possible.
350 for i in range(len(tokens) - select_length):
352 for j in range(select_length + 1):
353 token_list += [tokens[i + j]]
354 selections += [token_list]
356 for i in range(select_length):
359 for i in range(len(selections)):
360 if selections[i][0][-1] in sentence_end_markers:
361 for j in range(select_length):
362 snippet[j] = selections[j][j + 1]
365 malkovich = "malkovich"
367 new_end = markov(snippet)
368 for name in session.users_in_chan:
369 if new_end[:len(name)] == name.lower():
370 new_end = malkovich + new_end[len(name):]
372 if len(msg) + len(new_end) > 200:
375 for i in range(select_length - 1):
376 snippet[i] = snippet[i + 1]
377 snippet[select_length - 1] = new_end
379 # Replace occurences of url escape string with random choice from urls.
381 index = msg.find(url_escape)
384 msg = msg.replace(url_escape, choice(urls), 1)
386 # More meaningful ways to randomly end sentences.
387 notice(msg + malkovich + ".")
392 twtfile = open(session.twtfile, mode)
393 except (PermissionError, FileNotFoundError) as err:
394 notice("can't access or create twt file: " + str(err))
398 from datetime import datetime
399 if not os.access(session.twtfile, os.F_OK):
400 twtfile = try_open("w")
404 twtfile = try_open("a")
407 twtfile.write(datetime.utcnow().isoformat() + "\t" + argument + "\n")
411 if "addquote" == command:
413 elif "quote" == command:
415 elif "markov" == command:
417 elif "twt" == command:
421 def handle_url(url, notice, show_url=False):
423 def mobile_twitter_hack(url):
424 re1 = 'https?://(mobile.twitter.com/)[^/]+(/status/)'
425 re2 = 'https?://mobile.twitter.com/([^/]+)/status/([^\?/]+)'
426 m = re.search(re1, url)
427 if m and m.group(1) == 'mobile.twitter.com/' \
428 and m.group(2) == '/status/':
429 m = re.search(re2, url)
430 url = 'https://twitter.com/' + m.group(1) + '/status/' + m.group(2)
431 handle_url(url, notice, True)
434 class TimeOut(Exception):
437 def timeout_handler(ignore1, ignore2):
438 raise TimeOut("timeout")
440 signal.signal(signal.SIGALRM, timeout_handler)
443 r = requests.get(url, headers = {'User-Agent': 'plomlombot'}, stream=True)
444 r.raw.decode_content = True
445 text = r.raw.read(10000000+1)
446 if len(text) > 10000000:
447 raise ValueError('Too large a response')
448 except (requests.exceptions.TooManyRedirects,
449 requests.exceptions.ConnectionError,
450 requests.exceptions.InvalidURL,
454 requests.exceptions.InvalidSchema) as error:
456 notice("trouble following url: " + str(error))
459 if mobile_twitter_hack(url):
461 title = bs4.BeautifulSoup(text, "html5lib").title
462 if title and title.string:
463 prefix = "page title: "
465 prefix = "page title for <" + url + ">: "
466 notice(prefix + title.string.strip())
468 notice("page has no title tag")
474 def __init__(self, io, username, nickname, sasl, channel, twtfile, dbdir, rmlogs,
475 markov_input, no_show_page_titles):
478 self.nickname = nickname
479 self.users_in_chan = []
480 self.twtfile = twtfile
481 hash_channel = hashlib.md5(channel.encode("utf-8")).hexdigest()
482 chandir = dbdir + "/" + hash_channel + "/"
483 self.markov_input = markov_input
484 self.markovfile = chandir + "markovfeed"
485 self.quotesfile = chandir + "quotes"
486 self.log = Log(chandir, self.nickname, username, channel, rmlogs)
488 self.io.send_line("CAP REQ :sasl")
489 self.io.send_line("NICK " + self.nickname)
490 self.io.send_line("USER " + username + " 0 * : ")
492 self.io.send_line("AUTHENTICATE PLAIN")
493 auth = username + '\0' + username + '\0' + sasl
494 auth_encoded = base64.b64encode(auth.encode()).decode().rstrip()
495 self.io.send_line("AUTHENTICATE " + auth_encoded)
496 self.io.send_line("CAP END")
497 self.io.send_line("JOIN " + channel)
498 self.io.log = self.log
499 self.log.separator_line()
500 self.show_page_titles = not no_show_page_titles
504 def handle_privmsg(line):
507 line = "NOTICE " + target + " :" + msg
508 self.io.send_line(line)
511 if line.receiver != self.nickname:
512 target = line.receiver
513 msg = str.join(" ", line.tokens[3:])[1:]
514 if self.show_page_titles:
515 matches = re.findall("(https?://[^\s>]+)", msg)
517 for i in range(len(matches)):
518 if handle_url(matches[i], notice):
521 notice("maximum number of urls to parse per "
524 if "!" == msg[0] and len(msg) > 1:
525 tokens = msg[1:].split()
526 argument = str.join(" ", tokens[1:])
527 handle_command(tokens[0], argument, notice, target, self)
529 if self.markov_input:
530 write_to_file(self.markovfile, "a", msg + "\n")
534 line = self.io.recv_line()
538 if len(line.tokens) > 1:
539 if line.tokens[0] == "PING":
540 self.io.send_line("PONG " + line.tokens[1])
541 elif line.tokens[1] == "PRIVMSG":
543 elif line.tokens[1] == "353":
544 names = line.tokens[5:]
545 names[0] = names[0][1:]
546 for i in range(len(names)):
547 names[i] = names[i].replace("@", "").replace("+", "")
548 self.users_in_chan += names
549 elif line.tokens[1] == "JOIN" and line.sender != self.nickname:
550 self.users_in_chan += [line.sender]
551 elif line.tokens[1] == "PART":
552 del(self.users_in_chan[self.users_in_chan.index(line.sender)])
553 elif line.tokens[1] == "NICK":
554 del(self.users_in_chan[self.users_in_chan.index(line.sender)])
555 self.users_in_chan += [line.receiver]
558 def parse_command_line_arguments():
559 parser = argparse.ArgumentParser()
560 parser.add_argument("-s, --server", action="store", dest="server",
562 help="server or server net to connect to (default: "
564 parser.add_argument("-p, --port", action="store", dest="port", type=int,
565 default=PORT, help="port to connect to (default : "
567 parser.add_argument("-c, --channel", action="store", dest="channel",
568 default=SERVER, help="channel to join")
569 parser.add_argument("-w, --wait", action="store", dest="timeout",
570 type=int, default=TIMEOUT,
571 help="timeout in seconds after which to attempt "
572 "reconnect (default: " + str(TIMEOUT) + ")")
573 parser.add_argument("-u, --username", action="store", dest="username",
574 default=USERNAME, help="username to use (default: "
576 parser.add_argument("-n, --nickname", action="store", dest="nickname",
577 default=NICKNAME, help="nickname to use (default: "
579 parser.add_argument("-a, --authenticate", action="store", dest="sasl",
580 default=None, help="SASL password (default: none)")
581 parser.add_argument("-t, --twtxtfile", action="store", dest="twtfile",
582 default=TWTFILE, help="twtxt file to use (default: "
584 parser.add_argument("-d, --dbdir", action="store", dest="dbdir",
585 default=DBDIR, help="directory to store DB files in")
586 parser.add_argument("-r, --rmlogs", action="store", dest="rmlogs",
588 help="maximum age in seconds for logfiles in logs/ "
589 "(0 means: never delete, and is default)")
590 parser.add_argument("-m, --markov_store", action="store_true",
592 help="log channel discussions for !markov input")
593 parser.add_argument("--no-show-page-titles", action="store_true",
594 dest="no_show_page_titles",
595 help="do not show page titles")
596 opts, unknown = parser.parse_known_args()
600 opts = parse_command_line_arguments()
603 io = IO(opts.server, opts.port, opts.timeout)
604 hash_server = hashlib.md5(opts.server.encode("utf-8")).hexdigest()
605 dbdir = opts.dbdir + "/" + hash_server
606 session = Session(io, opts.username, opts.nickname, opts.sasl, opts.channel,
607 opts.twtfile, dbdir, opts.rmlogs, opts.markov_store,
608 opts.no_show_page_titles)
610 except ExceptionForRestart: