Compare commits

...

16 Commits
0.4.0 ... 0.4.2

Author SHA1 Message Date
FoxxMD
8e2fee6d50 Fix heartbeat interval 2021-07-06 16:52:45 -04:00
FoxxMD
ed8be6dda2 Refactor app/manager building for in-situ updates
* Separate manager instantiation from configuration flow so config can be reloaded
* Move wiki page parsing into manager for better encapsulation
* Check for wiki revision date on heartbeat and on checks if older than one minute
* Catch config parsing issues and retry on next heartbeat
2021-07-06 16:28:18 -04:00
FoxxMD
00e38b5560 Use correct media property for anchor parsing 2021-07-06 12:59:46 -04:00
FoxxMD
9cac11f436 Implement author parsing for audio/podcast related media
Parse spotify and anchor.fm media sources
2021-07-06 11:33:00 -04:00
FoxxMD
f591c3a05a Implement more powerful content parsing options
* Can get wiki pages from other subreddits
* Can fetch from an external url
2021-07-06 10:27:30 -04:00
FoxxMD
39fad91c7f Fix missing author criteria 2021-07-05 17:20:35 -04:00
FoxxMD
529b8fc03e Further improvements for subreddit name parsing
* Allow whitespace on either side of regex value to parse since its automatically trimmed by getting capture group
* Implement sub name parsing everywhere subreddits can be specified and update documentation to remove prefix restrictions
2021-07-05 16:06:15 -04:00
FoxxMD
54eef5620d Update interfaces and documentation for new filters and item states 2021-07-05 15:39:08 -04:00
FoxxMD
99537fbebb Fix missing filtering behavior on repeat and add remove check
* Add missing include/exclude behavior for counting repeat submissions
* Add parameter to enable user to specify if removed activities should be included
2021-07-05 15:38:49 -04:00
FoxxMD
4c3f9ee082 Fix remove check on remove action 2021-07-05 15:37:37 -04:00
FoxxMD
5b028b6a45 Fix some item state checks and implement subreddit filtering on window
* Fix how removed check is performed since there are different behaviors for submission/comment
* Add filtered and deleted states for item check
* Add subreddit filters (include/exclude) on window criteria
2021-07-05 15:37:19 -04:00
FoxxMD
859bcf9213 Implement subreddit name parser to allow more lax input
Use regex to extract subreddit name regardless of prefix
2021-07-05 15:34:59 -04:00
FoxxMD
e790f7c260 Fix issue when activities retrieved for attribution rule 2021-06-25 15:20:31 -04:00
FoxxMD
20358294ce Fix domain ident aliases when not aggregating parent domain 2021-06-25 10:29:19 -04:00
FoxxMD
e0f18dc0a2 Add typescript dep 2021-06-25 10:28:39 -04:00
FoxxMD
9a788a8323 Add file logging for uncaught rejection/exceptions 2021-06-23 16:05:58 -04:00
19 changed files with 860 additions and 205 deletions

119
package-lock.json generated
View File

@@ -18,12 +18,14 @@
"json5": "^2.2.0",
"memory-cache": "^0.2.0",
"mustache": "^4.2.0",
"node-fetch": "^2.6.1",
"object-hash": "^2.2.0",
"p-event": "^4.2.0",
"pako": "^0.2.6",
"safe-stable-stringify": "^1.1.1",
"snoostorm": "^1.5.2",
"snoowrap": "^1.23.0",
"typescript": "^4.3.4",
"winston": "FoxxMD/winston#fbab8de969ecee578981c77846156c7f43b5f01e",
"winston-daily-rotate-file": "^4.5.5",
"zlib": "^1.0.5"
@@ -36,6 +38,7 @@
"@types/minimist": "^1.2.1",
"@types/mustache": "^4.1.1",
"@types/node": "^15.6.1",
"@types/node-fetch": "^2.5.10",
"@types/object-hash": "^2.1.0",
"@types/pako": "^1.0.1",
"ts-auto-guard": "*",
@@ -118,6 +121,19 @@
"typescript": "~4.1.3"
}
},
"node_modules/@ts-morph/common/node_modules/typescript": {
"version": "4.1.6",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.1.6.tgz",
"integrity": "sha512-pxnwLxeb/Z5SP80JDRzVjh58KsM6jZHRAOtTpS7sXLS4ogXNKC9ANxHHZqLLeVHZN35jCtI4JdmLLbLiC1kBow==",
"dev": true,
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=4.2.0"
}
},
"node_modules/@tsconfig/node14": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.1.tgz",
@@ -190,6 +206,30 @@
"integrity": "sha512-zjQ69G564OCIWIOHSXyQEEDpdpGl+G348RAKY0XXy9Z5kU9Vzv1GMNnkar/ZJ8dzXB3COzD9Mo9NtRZ4xfgUww==",
"dev": true
},
"node_modules/@types/node-fetch": {
"version": "2.5.10",
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.5.10.tgz",
"integrity": "sha512-IpkX0AasN44hgEad0gEF/V6EgR5n69VEqPEgnmoM8GsIGro3PowbWs4tR6IhxUTyPLpOn+fiGG6nrQhcmoCuIQ==",
"dev": true,
"dependencies": {
"@types/node": "*",
"form-data": "^3.0.0"
}
},
"node_modules/@types/node-fetch/node_modules/form-data": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz",
"integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==",
"dev": true,
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/@types/object-hash": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@types/object-hash/-/object-hash-2.1.0.tgz",
@@ -1255,6 +1295,14 @@
"mustache": "bin/mustache"
}
},
"node_modules/node-fetch": {
"version": "2.6.1",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz",
"integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==",
"engines": {
"node": "4.x || >=6.0.0"
}
},
"node_modules/oauth-sign": {
"version": "0.9.0",
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
@@ -1827,19 +1875,6 @@
"node": ">=10.0.0"
}
},
"node_modules/ts-json-schema-generator/node_modules/typescript": {
"version": "4.3.2",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.3.2.tgz",
"integrity": "sha512-zZ4hShnmnoVnAHpVHWpTcxdv7dWP60S2FsydQLV8V5PbS3FifjWFFRiHSWpDJahly88PRyV5teTSLoq4eG7mKw==",
"dev": true,
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=4.2.0"
}
},
"node_modules/ts-morph": {
"version": "9.1.0",
"resolved": "https://registry.npmjs.org/ts-morph/-/ts-morph-9.1.0.tgz",
@@ -1906,10 +1941,9 @@
"integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q="
},
"node_modules/typescript": {
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.1.5.tgz",
"integrity": "sha512-6OSu9PTIzmn9TCDiovULTnET6BgXtDYL4Gg4szY+cGsc3JP1dQL8qvE8kShTRx1NIw4Q9IBHlwODjkjWEtMUyA==",
"dev": true,
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.3.4.tgz",
"integrity": "sha512-uauPG7XZn9F/mo+7MrsRjyvbxFpzemRjKEZXS4AK83oP2KKOJPvb+9cO/gmnv8arWZvhnjVOXz7B49m1l0e9Ew==",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
@@ -2297,6 +2331,14 @@
"mkdirp": "^1.0.4",
"multimatch": "^5.0.0",
"typescript": "~4.1.3"
},
"dependencies": {
"typescript": {
"version": "4.1.6",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.1.6.tgz",
"integrity": "sha512-pxnwLxeb/Z5SP80JDRzVjh58KsM6jZHRAOtTpS7sXLS4ogXNKC9ANxHHZqLLeVHZN35jCtI4JdmLLbLiC1kBow==",
"dev": true
}
}
},
"@tsconfig/node14": {
@@ -2371,6 +2413,29 @@
"integrity": "sha512-zjQ69G564OCIWIOHSXyQEEDpdpGl+G348RAKY0XXy9Z5kU9Vzv1GMNnkar/ZJ8dzXB3COzD9Mo9NtRZ4xfgUww==",
"dev": true
},
"@types/node-fetch": {
"version": "2.5.10",
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.5.10.tgz",
"integrity": "sha512-IpkX0AasN44hgEad0gEF/V6EgR5n69VEqPEgnmoM8GsIGro3PowbWs4tR6IhxUTyPLpOn+fiGG6nrQhcmoCuIQ==",
"dev": true,
"requires": {
"@types/node": "*",
"form-data": "^3.0.0"
},
"dependencies": {
"form-data": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz",
"integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==",
"dev": true,
"requires": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
}
}
}
},
"@types/object-hash": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@types/object-hash/-/object-hash-2.1.0.tgz",
@@ -3241,6 +3306,11 @@
"resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz",
"integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ=="
},
"node-fetch": {
"version": "2.6.1",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz",
"integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw=="
},
"oauth-sign": {
"version": "0.9.0",
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
@@ -3651,14 +3721,6 @@
"glob": "^7.1.7",
"json-stable-stringify": "^1.0.1",
"typescript": "~4.3.2"
},
"dependencies": {
"typescript": {
"version": "4.3.2",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.3.2.tgz",
"integrity": "sha512-zZ4hShnmnoVnAHpVHWpTcxdv7dWP60S2FsydQLV8V5PbS3FifjWFFRiHSWpDJahly88PRyV5teTSLoq4eG7mKw==",
"dev": true
}
}
},
"ts-morph": {
@@ -3712,10 +3774,9 @@
"integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q="
},
"typescript": {
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.1.5.tgz",
"integrity": "sha512-6OSu9PTIzmn9TCDiovULTnET6BgXtDYL4Gg4szY+cGsc3JP1dQL8qvE8kShTRx1NIw4Q9IBHlwODjkjWEtMUyA==",
"dev": true
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.3.4.tgz",
"integrity": "sha512-uauPG7XZn9F/mo+7MrsRjyvbxFpzemRjKEZXS4AK83oP2KKOJPvb+9cO/gmnv8arWZvhnjVOXz7B49m1l0e9Ew=="
},
"typescript-json-schema": {
"version": "0.50.1",

View File

@@ -34,12 +34,14 @@
"json5": "^2.2.0",
"memory-cache": "^0.2.0",
"mustache": "^4.2.0",
"node-fetch": "^2.6.1",
"object-hash": "^2.2.0",
"p-event": "^4.2.0",
"pako": "^0.2.6",
"safe-stable-stringify": "^1.1.1",
"snoostorm": "^1.5.2",
"snoowrap": "^1.23.0",
"typescript": "^4.3.4",
"winston": "FoxxMD/winston#fbab8de969ecee578981c77846156c7f43b5f01e",
"winston-daily-rotate-file": "^4.5.5",
"zlib": "^1.0.5"
@@ -52,6 +54,7 @@
"@types/minimist": "^1.2.1",
"@types/mustache": "^4.1.1",
"@types/node": "^15.6.1",
"@types/node-fetch": "^2.5.10",
"@types/object-hash": "^2.1.0",
"@types/pako": "^1.0.1",
"ts-auto-guard": "*",

View File

@@ -2,6 +2,7 @@ import {ActionJson, ActionConfig} from "./index";
import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../Rule";
import {activityIsRemoved} from "../Utils/SnoowrapUtils";
export class RemoveAction extends Action {
getKind() {
@@ -11,7 +12,7 @@ export class RemoveAction extends Action {
async process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
// issue with snoowrap typings, doesn't think prop exists on Submission
// @ts-ignore
if (item.removed === true) {
if (activityIsRemoved(item)) {
this.logger.warn('Item is already removed');
return;
}

View File

@@ -1,13 +1,14 @@
import Snoowrap from "snoowrap";
import Snoowrap, { Subreddit } from "snoowrap";
import {Manager} from "./Subreddit/Manager";
import winston, {Logger} from "winston";
import {argParseInt, labelledFormat, parseBool, parseFromJsonOrYamlToObject, sleep} from "./util";
import {argParseInt, labelledFormat, parseBool, parseFromJsonOrYamlToObject, parseSubredditName, sleep} from "./util";
import snoowrap from "snoowrap";
import pEvent from "p-event";
import EventEmitter from "events";
import CacheManager from './Subreddit/SubredditResources';
import dayjs, {Dayjs} from "dayjs";
import LoggedError from "./Utils/LoggedError";
import ConfigParseError from "./Utils/ConfigParseError";
const {transports} = winston;
@@ -64,6 +65,7 @@ export class App {
const myTransports = [
consoleTransport,
];
let errorTransports = [];
if (logDir !== false) {
let logPath = logDir;
@@ -80,6 +82,7 @@ export class App {
});
// @ts-ignore
myTransports.push(rotateTransport);
errorTransports.push(rotateTransport);
}
const loggerOptions = {
@@ -95,7 +98,9 @@ export class App {
debug: 5,
trace: 5,
silly: 6
}
},
exceptionHandlers: errorTransports,
rejectionHandlers: errorTransports,
};
winston.loggers.add('default', loggerOptions);
@@ -114,7 +119,7 @@ export class App {
subredditsArg = subreddits.split(',');
}
}
this.subreddits = subredditsArg;
this.subreddits = subredditsArg.map(parseSubredditName);
const creds = {
userAgent: `web:contextBot:${version}`,
@@ -145,12 +150,12 @@ export class App {
}
this.logger.info(`/u/${name} is a moderator of these subreddits: ${availSubs.map(x => x.display_name_prefixed).join(', ')}`);
let subsToRun = [];
const subsToUse = subreddits.length > 0 ? subreddits : this.subreddits;
let subsToRun: Subreddit[] = [];
const subsToUse = subreddits.length > 0 ? subreddits.map(parseSubredditName) : this.subreddits;
if (subsToUse.length > 0) {
this.logger.info(`User-defined subreddit constraints detected (CLI argument or environmental variable), will try to run on: ${subsToUse.join(', ')}`);
for (const sub of subsToUse) {
const asub = availSubs.find(x => x.display_name.toLowerCase() === sub.trim().toLowerCase())
const asub = availSubs.find(x => x.display_name.toLowerCase() === sub.toLowerCase())
if (asub === undefined) {
this.logger.warn(`Will not run on ${sub} because is not modded by, or does not have appropriate permissions to mod with, for this client.`);
} else {
@@ -169,9 +174,11 @@ export class App {
// get configs for subs we want to run on and build/validate them
for (const sub of subsToRun) {
let content = undefined;
let wiki;
try {
const wiki = sub.getWikiPage(this.wikiLocation);
content = await wiki.content_md;
// @ts-ignore
wiki = await sub.getWikiPage(this.wikiLocation).fetch();
content = wiki.content_md;
} catch (err) {
this.logger.error(`[${sub.display_name_prefixed}] Could not read wiki configuration. Please ensure the page https://reddit.com${sub.url}wiki/${this.wikiLocation} exists and is readable -- error: ${err.message}`);
continue;
@@ -192,7 +199,10 @@ export class App {
}
try {
subSchedule.push(new Manager(sub, this.client, this.logger, configObj, {dryRun: this.dryRun}));
const manager = new Manager(sub, this.client, this.logger, configObj, {dryRun: this.dryRun});
manager.lastWikiCheck = dayjs();
manager.lastWikiRevision = dayjs.unix(wiki.revision_date);
subSchedule.push(manager);
} catch (err) {
if(!(err instanceof LoggedError)) {
this.logger.error(`[${sub.display_name_prefixed}] Config was not valid`, err);
@@ -213,6 +223,17 @@ export class App {
} else {
this.logger.info(heartbeat);
}
for(const s of this.subManagers) {
try {
await s.parseConfiguration();
if(!s.running) {
s.handle();
}
} catch (err) {
s.stop();
this.logger.info('Will retry parsing config on next heartbeat...');
}
}
}
} finally {
this.heartBeating = false;

View File

@@ -108,6 +108,10 @@ export class Author implements AuthorCriteria {
isMod?: boolean;
userNotes?: UserNoteCriteria[];
age?: string;
commentKarma?: string;
linkKarma?: string;
totalKarma?: string;
verified?: boolean;
constructor(options: AuthorCriteria) {
this.name = options.name;
@@ -116,6 +120,9 @@ export class Author implements AuthorCriteria {
this.isMod = options.isMod;
this.userNotes = options.userNotes;
this.age = options.age;
this.commentKarma = options.commentKarma;
this.linkKarma = options.linkKarma;
this.totalKarma = options.totalKarma;
}
}

View File

@@ -93,7 +93,7 @@ export interface ActivityWindowCriteria {
*
* **If `any` then it will retrieve Activities until one of the criteria is met, whichever occurs first**
*
* EX `{count: 100, duration: {days: 90}}`:
* EX `{"count": 100, duration: "90 days"}`:
* * If 90 days of activities = 40 activities => returns 40 activities
* * If 100 activities is only 20 days => 100 activities
*
@@ -101,7 +101,7 @@ export interface ActivityWindowCriteria {
*
* Effectively, whichever criteria produces the most Activities...
*
* EX `{count: 100, duration: {days: 90}}`:
* EX `{"count": 100, duration: "90 days"}`:
* * If at 90 days of activities => 40 activities, continue retrieving results until 100 => results in >90 days of activities
* * If at 100 activities => 20 days of activities, continue retrieving results until 90 days => results in >100 activities
*
@@ -109,6 +109,28 @@ export interface ActivityWindowCriteria {
* @default any
* */
satisfyOn?: 'any' | 'all';
/**
* Filter which subreddits (case-insensitive) Activities are retrieved from.
*
* **Note:** Filtering occurs **before** `duration/count` checks are performed.
* */
subreddits?: {
/**
* Include only results from these subreddits
*
* @examples [["mealtimevideos","askscience"]]
* */
include?: string[],
/**
* Exclude any results from these subreddits
*
* **Note:** `exclude` is ignored if `include` is present
*
* @examples [["mealtimevideos","askscience"]]
* */
exclude?: string[],
}
}
/**
@@ -200,12 +222,23 @@ export interface RichContent {
/**
* The Content to submit for this Action. Content is interpreted as reddit-flavored Markdown.
*
* If value starts with `wiki:` then the proceeding value will be used to get a wiki page
* If value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit
*
* * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/mybot`
*
* If the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page
*
* * EX `wiki:replytemplates/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/replytemplates/test`
*
* If the value starts with `url:` then the value is fetched as an external url and expects raw text returned
*
* * EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`
*
* If none of the above is used the value is treated as the raw context
*
* * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/mySubredditExample/wiki/botconfig/mybot`
* * EX `this is **bold** markdown text` => "this is **bold** markdown text"
*
* Content is rendered using [mustache](https://github.com/janl/mustache.js/#templates) to enable [Action Templating](https://github.com/FoxxMD/reddit-context-bot#action-templating).
* All Content is rendered using [mustache](https://github.com/janl/mustache.js/#templates) to enable [Action Templating](https://github.com/FoxxMD/reddit-context-bot#action-templating).
*
* The following properties are always available in the template (view individual Rules to see rule-specific template data):
* ```
@@ -227,9 +260,9 @@ export interface RequiredRichContent extends RichContent {
}
/**
* A list of subreddits (case-insensitive) to look for. Do not include "r/" prefix.
* A list of subreddits (case-insensitive) to look for.
*
* EX to match against /r/mealtimevideos and /r/askscience use ["mealtimevideos","askscience"]
* EX ["mealtimevideos","askscience"]
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
@@ -237,9 +270,9 @@ export type SubredditList = string[];
export interface SubredditCriteria {
/**
* A list of subreddits (case-insensitive) to look for. Do not include "r/" prefix.
* A list of Subreddits (by name, case-insensitive) to look for.
*
* EX to match against /r/mealtimevideos and /r/askscience use ["mealtimevideos","askscience"]
* EX ["mealtimevideos","askscience"]
* @examples [["mealtimevideos","askscience"]]
* @minItems 1
* */
@@ -506,6 +539,8 @@ export interface ChecksActivityState {
export interface ActivityState {
removed?: boolean
filtered?: boolean
deleted?: boolean
locked?: boolean
spam?: boolean
stickied?: boolean
@@ -544,5 +579,6 @@ export interface DomainInfo {
display: string,
domain: string,
aliases: string[],
provider?: string
provider?: string,
mediaType?: string
}

View File

@@ -4,7 +4,14 @@ import {Rule, RuleOptions, RuleResult} from "./index";
import Submission from "snoowrap/dist/objects/Submission";
import {getAttributionIdentifier} from "../Utils/SnoowrapUtils";
import dayjs from "dayjs";
import {comparisonTextOp, FAIL, formatNumber, parseGenericValueOrPercentComparison, PASS} from "../util";
import {
comparisonTextOp,
FAIL,
formatNumber,
parseGenericValueOrPercentComparison,
parseSubredditName,
PASS
} from "../util";
import { Comment } from "snoowrap/dist/objects";
import SimpleError from "../Utils/SimpleError";
@@ -70,21 +77,20 @@ export interface AttributionCriteria {
domainsCombined?: boolean,
/**
* Only include Activities from this list of Subreddits.
* Only include Activities from this list of Subreddits (by name, case-insensitive)
*
* A list of subreddits (case-insensitive) to look for. Do not include "r/" prefix.
*
* EX to match against /r/mealtimevideos and /r/askscience use ["mealtimevideos","askscience"]
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
include?: string[],
/**
* Do not include Activities from this list of Subreddits. Will be ignored if `include` is present.
* Do not include Activities from this list of Subreddits (by name, case-insensitive)
*
* A list of subreddits (case-insensitive) to look for. Do not include "r/" prefix.
* Will be ignored if `include` is present.
*
* EX to match against /r/mealtimevideos and /r/askscience use ["mealtimevideos","askscience"]
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
@@ -176,8 +182,8 @@ export class AttributionRule extends Rule {
exclude: excludeRaw = [],
} = criteria;
const include = includeRaw.map(x => x.toLowerCase());
const exclude = excludeRaw.map(x => x.toLowerCase());
const include = includeRaw.map(x => parseSubredditName(x).toLowerCase());
const exclude = excludeRaw.map(x => parseSubredditName(x).toLowerCase());
const {operator, value, isPercent, extra = ''} = parseGenericValueOrPercentComparison(threshold);
@@ -194,6 +200,11 @@ export class AttributionRule extends Rule {
let activityTotal = 0;
let firstActivity, lastActivity;
if(activities.length === 0) {
this.logger.debug(`No activities retrieved for criteria`);
continue;
}
activityTotal = activities.length;
firstActivity = activities[0];
lastActivity = activities[activities.length - 1];

View File

@@ -8,7 +8,7 @@ import {
comparisonTextOp,
FAIL,
formatNumber,
parseGenericValueOrPercentComparison,
parseGenericValueOrPercentComparison, parseSubredditName,
PASS,
percentFromString
} from "../util";
@@ -85,8 +85,8 @@ export class HistoryRule extends Rule {
if (this.criteria.length === 0) {
throw new Error('Must provide at least one HistoryCriteria');
}
this.include = include.map(x => x.toLowerCase());
this.exclude = exclude.map(x => x.toLowerCase());
this.include = include.map(x => parseSubredditName(x).toLowerCase());
this.exclude = exclude.map(x => parseSubredditName(x).toLowerCase());
}
getKind(): string {
@@ -299,21 +299,17 @@ interface HistoryConfig {
condition?: 'AND' | 'OR'
/**
* Only include Submissions from this list of Subreddits.
* Only include Submissions from this list of Subreddits (by name, case-insensitive)
*
* A list of subreddits (case-insensitive) to look for. Do not include "r/" prefix.
*
* EX to match against /r/mealtimevideos and /r/askscience use ["mealtimevideos","askscience"]
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
include?: string[],
/**
* Do not include Submissions from this list of Subreddits.
* Do not include Submissions from this list of Subreddits (by name, case-insensitive)
*
* A list of subreddits (case-insensitive) to look for. Do not include "r/" prefix.
*
* EX to match against /r/mealtimevideos and /r/askscience use ["mealtimevideos","askscience"]
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */

View File

@@ -4,7 +4,7 @@ import Submission from "snoowrap/dist/objects/Submission";
import {
activityWindowText,
comparisonTextOp, FAIL, formatNumber,
parseGenericValueOrPercentComparison,
parseGenericValueOrPercentComparison, parseSubredditName,
parseUsableLinkIdentifier,
PASS
} from "../util";
@@ -97,7 +97,7 @@ export class RecentActivityRule extends Rule {
let currCount = 0;
const presentSubs = [];
const {threshold = '>= 1', subreddits = []} = triggerSet;
for (const sub of subreddits) {
for (const sub of subreddits.map(x => parseSubredditName(x))) {
const isub = sub.toLowerCase();
const {[isub]: tSub = []} = groupedActivity;
if (tSub.length > 0) {

View File

@@ -4,7 +4,7 @@ import {Comment} from "snoowrap";
import {
activityWindowText,
comparisonTextOp, FAIL,
parseGenericValueComparison,
parseGenericValueComparison, parseSubredditName,
parseUsableLinkIdentifier as linkParser, PASS
} from "../../util";
import {ActivityWindow, ActivityWindowType, ReferenceSubmission} from "../../Common/interfaces";
@@ -45,6 +45,7 @@ export class RepeatActivityRule extends SubmissionRule {
lookAt: 'submissions' | 'all';
include: string[];
exclude: string[];
keepRemoved: boolean;
constructor(options: RepeatActivityOptions) {
super(options);
@@ -55,14 +56,16 @@ export class RepeatActivityRule extends SubmissionRule {
useSubmissionAsReference = true,
lookAt = 'all',
include = [],
exclude = []
exclude = [],
keepRemoved = false,
} = options;
this.keepRemoved = keepRemoved;
this.threshold = threshold;
this.window = window;
this.gapAllowance = gapAllowance;
this.useSubmissionAsReference = useSubmissionAsReference;
this.include = include;
this.exclude = exclude;
this.include = include.map(x => parseSubredditName(x).toLowerCase());
this.exclude = exclude.map(x => parseSubredditName(x).toLowerCase());
this.lookAt = lookAt;
}
@@ -88,13 +91,20 @@ export class RepeatActivityRule extends SubmissionRule {
return Promise.resolve([false, this.getResult(false)]);
}
let filterFunc = (x: any) => true;
if(this.include.length > 0) {
filterFunc = (x: Submission|Comment) => this.include.includes(x.subreddit.display_name.toLowerCase());
} else if(this.exclude.length > 0) {
filterFunc = (x: Submission|Comment) => !this.exclude.includes(x.subreddit.display_name.toLowerCase());
}
let activities: (Submission | Comment)[] = [];
switch (this.lookAt) {
case 'submissions':
activities = await this.resources.getAuthorSubmissions(item.author, {window: this.window});
activities = await this.resources.getAuthorSubmissions(item.author, {window: this.window, keepRemoved: this.keepRemoved});
break;
default:
activities = await this.resources.getAuthorActivities(item.author, {window: this.window});
activities = await this.resources.getAuthorActivities(item.author, {window: this.window, keepRemoved: this.keepRemoved});
break;
}
@@ -102,16 +112,18 @@ export class RepeatActivityRule extends SubmissionRule {
const {openSets = [], allSets = []} = acc;
let identifier = getActivityIdentifier(activity);
const validSub = filterFunc(activity);
let updatedAllSets = [...allSets];
let updatedOpenSets = [];
let updatedOpenSets: RepeatActivityData[] = [];
let currIdentifierInOpen = false;
const bufferedActivities = this.gapAllowance === undefined || this.gapAllowance === 0 ? [] : activities.slice(Math.max(0, index - this.gapAllowance), Math.max(0, index));
for (const o of openSets) {
if (o.identifier === identifier) {
if (o.identifier === identifier && validSub) {
updatedOpenSets.push({...o, sets: [...o.sets, activity]});
currIdentifierInOpen = true;
} else if (bufferedActivities.some(x => getActivityIdentifier(x) === identifier)) {
} else if (bufferedActivities.some(x => getActivityIdentifier(x) === identifier) && validSub) {
updatedOpenSets.push(o);
} else {
updatedAllSets.push(o);
@@ -233,21 +245,17 @@ interface RepeatActivityConfig extends ActivityWindow, ReferenceSubmission {
* */
gapAllowance?: number,
/**
* Only include Submissions from this list of Subreddits.
* Only include Submissions from this list of Subreddits (by name, case-insensitive)
*
* A list of subreddits (case-insensitive) to look for. Do not include "r/" prefix.
*
* EX to match against /r/mealtimevideos and /r/askscience use ["mealtimevideos","askscience"]
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
include?: string[],
/**
* Do not include Submissions from this list of Subreddits.
* Do not include Submissions from this list of Subreddits (by name, case-insensitive)
*
* A list of subreddits (case-insensitive) to look for. Do not include "r/" prefix.
*
* EX to match against /r/mealtimevideos and /r/askscience use ["mealtimevideos","askscience"]
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
@@ -262,6 +270,16 @@ interface RepeatActivityConfig extends ActivityWindow, ReferenceSubmission {
* @default all
* */
lookAt?: 'submissions' | 'all',
/**
* Count submissions/comments that have previously been removed.
*
* By default all `Submissions/Commments` that are in a `removed` state will be filtered from `window` (only applies to subreddits you mod).
*
* Setting to `true` could be useful if you also want to also detected removed repeat posts by a user like for example if automoderator removes multiple, consecutive submissions for not following title format correctly.
*
* @default false
* */
keepRemoved?: boolean
}
export interface RepeatActivityOptions extends RepeatActivityConfig, RuleOptions {

View File

@@ -135,9 +135,15 @@
"approved": {
"type": "boolean"
},
"deleted": {
"type": "boolean"
},
"distinguished": {
"type": "boolean"
},
"filtered": {
"type": "boolean"
},
"locked": {
"type": "boolean"
},
@@ -168,9 +174,15 @@
"approved": {
"type": "boolean"
},
"deleted": {
"type": "boolean"
},
"distinguished": {
"type": "boolean"
},
"filtered": {
"type": "boolean"
},
"is_self": {
"type": "boolean"
},

View File

@@ -41,7 +41,7 @@
},
"satisfyOn": {
"default": "any",
"description": "Define the condition under which both criteria are considered met\n\n**If `any` then it will retrieve Activities until one of the criteria is met, whichever occurs first**\n\nEX `{count: 100, duration: {days: 90}}`:\n* If 90 days of activities = 40 activities => returns 40 activities\n* If 100 activities is only 20 days => 100 activities\n\n**If `all` then both criteria must be met.**\n\nEffectively, whichever criteria produces the most Activities...\n\nEX `{count: 100, duration: {days: 90}}`:\n* If at 90 days of activities => 40 activities, continue retrieving results until 100 => results in >90 days of activities\n* If at 100 activities => 20 days of activities, continue retrieving results until 90 days => results in >100 activities",
"description": "Define the condition under which both criteria are considered met\n\n**If `any` then it will retrieve Activities until one of the criteria is met, whichever occurs first**\n\nEX `{\"count\": 100, duration: \"90 days\"}`:\n* If 90 days of activities = 40 activities => returns 40 activities\n* If 100 activities is only 20 days => 100 activities\n\n**If `all` then both criteria must be met.**\n\nEffectively, whichever criteria produces the most Activities...\n\nEX `{\"count\": 100, duration: \"90 days\"}`:\n* If at 90 days of activities => 40 activities, continue retrieving results until 100 => results in >90 days of activities\n* If at 100 activities => 20 days of activities, continue retrieving results until 90 days => results in >100 activities",
"enum": [
"all",
"any"
@@ -50,6 +50,38 @@
"any"
],
"type": "string"
},
"subreddits": {
"description": "Filter which subreddits (case-insensitive) Activities are retrieved from.\n\n**Note:** Filtering occurs **before** `duration/count` checks are performed.",
"properties": {
"exclude": {
"description": "Exclude any results from these subreddits\n\n**Note:** `exclude` is ignored if `include` is present",
"examples": [
[
"mealtimevideos",
"askscience"
]
],
"items": {
"type": "string"
},
"type": "array"
},
"include": {
"description": "Include only results from these subreddits",
"examples": [
[
"mealtimevideos",
"askscience"
]
],
"items": {
"type": "string"
},
"type": "array"
}
},
"type": "object"
}
},
"type": "object"
@@ -170,7 +202,7 @@
"type": "boolean"
},
"exclude": {
"description": "Do not include Activities from this list of Subreddits. Will be ignored if `include` is present.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Do not include Activities from this list of Subreddits (by name, case-insensitive)\n\nWill be ignored if `include` is present.\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -182,7 +214,7 @@
"type": "array"
},
"include": {
"description": "Only include Activities from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Only include Activities from this list of Subreddits (by name, case-insensitive)\n\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -661,7 +693,7 @@
]
},
"content": {
"description": "The Content to submit for this Action. Content is interpreted as reddit-flavored Markdown.\n\nIf value starts with `wiki:` then the proceeding value will be used to get a wiki page\n\n * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/mySubredditExample/wiki/botconfig/mybot`\n * EX `this is **bold** markdown text` => \"this is **bold** markdown text\"\n\nContent is rendered using [mustache](https://github.com/janl/mustache.js/#templates) to enable [Action Templating](https://github.com/FoxxMD/reddit-context-bot#action-templating).\n\nThe following properties are always available in the template (view individual Rules to see rule-specific template data):\n```\nitem.kind => The type of Activity that was checked (comment/submission)\nitem.author => The name of the Author of the Activity EX FoxxMD\nitem.permalink => A permalink URL to the Activity EX https://reddit.com/r/yourSub/comments/o1h0i0/title_name/1v3b7x\nitem.url => If the Activity is Link Sumbission then the external URL\nitem.title => If the Activity is a Submission then the title of that Submission\nrules => An object containing RuleResults of all the rules run for this check. See Action Templating for more details on naming\n```",
"description": "The Content to submit for this Action. Content is interpreted as reddit-flavored Markdown.\n\nIf value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit\n\n * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/mybot`\n\nIf the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page\n\n* EX `wiki:replytemplates/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/replytemplates/test`\n\nIf the value starts with `url:` then the value is fetched as an external url and expects raw text returned\n\n* EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`\n\nIf none of the above is used the value is treated as the raw context\n\n * EX `this is **bold** markdown text` => \"this is **bold** markdown text\"\n\nAll Content is rendered using [mustache](https://github.com/janl/mustache.js/#templates) to enable [Action Templating](https://github.com/FoxxMD/reddit-context-bot#action-templating).\n\nThe following properties are always available in the template (view individual Rules to see rule-specific template data):\n```\nitem.kind => The type of Activity that was checked (comment/submission)\nitem.author => The name of the Author of the Activity EX FoxxMD\nitem.permalink => A permalink URL to the Activity EX https://reddit.com/r/yourSub/comments/o1h0i0/title_name/1v3b7x\nitem.url => If the Activity is Link Sumbission then the external URL\nitem.title => If the Activity is a Submission then the title of that Submission\nrules => An object containing RuleResults of all the rules run for this check. See Action Templating for more details on naming\n```",
"type": "string"
},
"distinguish": {
@@ -904,9 +936,15 @@
"approved": {
"type": "boolean"
},
"deleted": {
"type": "boolean"
},
"distinguished": {
"type": "boolean"
},
"filtered": {
"type": "boolean"
},
"locked": {
"type": "boolean"
},
@@ -1144,7 +1182,7 @@
"type": "array"
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -1156,7 +1194,7 @@
"type": "array"
},
"include": {
"description": "Only include Submissions from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -1518,7 +1556,7 @@
]
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -1534,7 +1572,7 @@
"type": "number"
},
"include": {
"description": "Only include Submissions from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -1562,6 +1600,11 @@
],
"description": "A list of criteria to test the state of the `Activity` against before running the Rule.\n\nIf any set of criteria passes the Rule will be run. If the criteria fails then the Rule is skipped."
},
"keepRemoved": {
"default": false,
"description": "Count submissions/comments that have previously been removed.\n\nBy default all `Submissions/Commments` that are in a `removed` state will be filtered from `window` (only applies to subreddits you mod).\n\nSetting to `true` could be useful if you also want to also detected removed repeat posts by a user like for example if automoderator removes multiple, consecutive submissions for not following title format correctly.",
"type": "boolean"
},
"kind": {
"description": "The kind of rule to run",
"enum": [
@@ -1645,7 +1688,7 @@
]
},
"content": {
"description": "The Content to submit for this Action. Content is interpreted as reddit-flavored Markdown.\n\nIf value starts with `wiki:` then the proceeding value will be used to get a wiki page\n\n * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/mySubredditExample/wiki/botconfig/mybot`\n * EX `this is **bold** markdown text` => \"this is **bold** markdown text\"\n\nContent is rendered using [mustache](https://github.com/janl/mustache.js/#templates) to enable [Action Templating](https://github.com/FoxxMD/reddit-context-bot#action-templating).\n\nThe following properties are always available in the template (view individual Rules to see rule-specific template data):\n```\nitem.kind => The type of Activity that was checked (comment/submission)\nitem.author => The name of the Author of the Activity EX FoxxMD\nitem.permalink => A permalink URL to the Activity EX https://reddit.com/r/yourSub/comments/o1h0i0/title_name/1v3b7x\nitem.url => If the Activity is Link Sumbission then the external URL\nitem.title => If the Activity is a Submission then the title of that Submission\nrules => An object containing RuleResults of all the rules run for this check. See Action Templating for more details on naming\n```",
"description": "The Content to submit for this Action. Content is interpreted as reddit-flavored Markdown.\n\nIf value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit\n\n * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/mybot`\n\nIf the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page\n\n* EX `wiki:replytemplates/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/replytemplates/test`\n\nIf the value starts with `url:` then the value is fetched as an external url and expects raw text returned\n\n* EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`\n\nIf none of the above is used the value is treated as the raw context\n\n * EX `this is **bold** markdown text` => \"this is **bold** markdown text\"\n\nAll Content is rendered using [mustache](https://github.com/janl/mustache.js/#templates) to enable [Action Templating](https://github.com/FoxxMD/reddit-context-bot#action-templating).\n\nThe following properties are always available in the template (view individual Rules to see rule-specific template data):\n```\nitem.kind => The type of Activity that was checked (comment/submission)\nitem.author => The name of the Author of the Activity EX FoxxMD\nitem.permalink => A permalink URL to the Activity EX https://reddit.com/r/yourSub/comments/o1h0i0/title_name/1v3b7x\nitem.url => If the Activity is Link Sumbission then the external URL\nitem.title => If the Activity is a Submission then the title of that Submission\nrules => An object containing RuleResults of all the rules run for this check. See Action Templating for more details on naming\n```",
"examples": [
"This is the content of a comment/report/usernote",
"this is **bold** markdown text",
@@ -1754,7 +1797,7 @@
"minProperties": 1,
"properties": {
"subreddits": {
"description": "A list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "A list of Subreddits (by name, case-insensitive) to look for.\n\nEX [\"mealtimevideos\",\"askscience\"]",
"examples": [
[
"mealtimevideos",
@@ -1948,9 +1991,15 @@
"approved": {
"type": "boolean"
},
"deleted": {
"type": "boolean"
},
"distinguished": {
"type": "boolean"
},
"filtered": {
"type": "boolean"
},
"is_self": {
"type": "boolean"
},
@@ -2039,7 +2088,7 @@
]
},
"content": {
"description": "The Content to submit for this Action. Content is interpreted as reddit-flavored Markdown.\n\nIf value starts with `wiki:` then the proceeding value will be used to get a wiki page\n\n * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/mySubredditExample/wiki/botconfig/mybot`\n * EX `this is **bold** markdown text` => \"this is **bold** markdown text\"\n\nContent is rendered using [mustache](https://github.com/janl/mustache.js/#templates) to enable [Action Templating](https://github.com/FoxxMD/reddit-context-bot#action-templating).\n\nThe following properties are always available in the template (view individual Rules to see rule-specific template data):\n```\nitem.kind => The type of Activity that was checked (comment/submission)\nitem.author => The name of the Author of the Activity EX FoxxMD\nitem.permalink => A permalink URL to the Activity EX https://reddit.com/r/yourSub/comments/o1h0i0/title_name/1v3b7x\nitem.url => If the Activity is Link Sumbission then the external URL\nitem.title => If the Activity is a Submission then the title of that Submission\nrules => An object containing RuleResults of all the rules run for this check. See Action Templating for more details on naming\n```",
"description": "The Content to submit for this Action. Content is interpreted as reddit-flavored Markdown.\n\nIf value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit\n\n * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/mybot`\n\nIf the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page\n\n* EX `wiki:replytemplates/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/replytemplates/test`\n\nIf the value starts with `url:` then the value is fetched as an external url and expects raw text returned\n\n* EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`\n\nIf none of the above is used the value is treated as the raw context\n\n * EX `this is **bold** markdown text` => \"this is **bold** markdown text\"\n\nAll Content is rendered using [mustache](https://github.com/janl/mustache.js/#templates) to enable [Action Templating](https://github.com/FoxxMD/reddit-context-bot#action-templating).\n\nThe following properties are always available in the template (view individual Rules to see rule-specific template data):\n```\nitem.kind => The type of Activity that was checked (comment/submission)\nitem.author => The name of the Author of the Activity EX FoxxMD\nitem.permalink => A permalink URL to the Activity EX https://reddit.com/r/yourSub/comments/o1h0i0/title_name/1v3b7x\nitem.url => If the Activity is Link Sumbission then the external URL\nitem.title => If the Activity is a Submission then the title of that Submission\nrules => An object containing RuleResults of all the rules run for this check. See Action Templating for more details on naming\n```",
"examples": [
"This is the content of a comment/report/usernote",
"this is **bold** markdown text",

View File

@@ -61,7 +61,7 @@
},
"satisfyOn": {
"default": "any",
"description": "Define the condition under which both criteria are considered met\n\n**If `any` then it will retrieve Activities until one of the criteria is met, whichever occurs first**\n\nEX `{count: 100, duration: {days: 90}}`:\n* If 90 days of activities = 40 activities => returns 40 activities\n* If 100 activities is only 20 days => 100 activities\n\n**If `all` then both criteria must be met.**\n\nEffectively, whichever criteria produces the most Activities...\n\nEX `{count: 100, duration: {days: 90}}`:\n* If at 90 days of activities => 40 activities, continue retrieving results until 100 => results in >90 days of activities\n* If at 100 activities => 20 days of activities, continue retrieving results until 90 days => results in >100 activities",
"description": "Define the condition under which both criteria are considered met\n\n**If `any` then it will retrieve Activities until one of the criteria is met, whichever occurs first**\n\nEX `{\"count\": 100, duration: \"90 days\"}`:\n* If 90 days of activities = 40 activities => returns 40 activities\n* If 100 activities is only 20 days => 100 activities\n\n**If `all` then both criteria must be met.**\n\nEffectively, whichever criteria produces the most Activities...\n\nEX `{\"count\": 100, duration: \"90 days\"}`:\n* If at 90 days of activities => 40 activities, continue retrieving results until 100 => results in >90 days of activities\n* If at 100 activities => 20 days of activities, continue retrieving results until 90 days => results in >100 activities",
"enum": [
"all",
"any"
@@ -70,6 +70,38 @@
"any"
],
"type": "string"
},
"subreddits": {
"description": "Filter which subreddits (case-insensitive) Activities are retrieved from.\n\n**Note:** Filtering occurs **before** `duration/count` checks are performed.",
"properties": {
"exclude": {
"description": "Exclude any results from these subreddits\n\n**Note:** `exclude` is ignored if `include` is present",
"examples": [
[
"mealtimevideos",
"askscience"
]
],
"items": {
"type": "string"
},
"type": "array"
},
"include": {
"description": "Include only results from these subreddits",
"examples": [
[
"mealtimevideos",
"askscience"
]
],
"items": {
"type": "string"
},
"type": "array"
}
},
"type": "object"
}
},
"type": "object"
@@ -121,7 +153,7 @@
"type": "boolean"
},
"exclude": {
"description": "Do not include Activities from this list of Subreddits. Will be ignored if `include` is present.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Do not include Activities from this list of Subreddits (by name, case-insensitive)\n\nWill be ignored if `include` is present.\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -133,7 +165,7 @@
"type": "array"
},
"include": {
"description": "Only include Activities from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Only include Activities from this list of Subreddits (by name, case-insensitive)\n\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -484,9 +516,15 @@
"approved": {
"type": "boolean"
},
"deleted": {
"type": "boolean"
},
"distinguished": {
"type": "boolean"
},
"filtered": {
"type": "boolean"
},
"locked": {
"type": "boolean"
},
@@ -647,7 +685,7 @@
"type": "array"
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -659,7 +697,7 @@
"type": "array"
},
"include": {
"description": "Only include Submissions from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -841,7 +879,7 @@
]
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -857,7 +895,7 @@
"type": "number"
},
"include": {
"description": "Only include Submissions from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -885,6 +923,11 @@
],
"description": "A list of criteria to test the state of the `Activity` against before running the Rule.\n\nIf any set of criteria passes the Rule will be run. If the criteria fails then the Rule is skipped."
},
"keepRemoved": {
"default": false,
"description": "Count submissions/comments that have previously been removed.\n\nBy default all `Submissions/Commments` that are in a `removed` state will be filtered from `window` (only applies to subreddits you mod).\n\nSetting to `true` could be useful if you also want to also detected removed repeat posts by a user like for example if automoderator removes multiple, consecutive submissions for not following title format correctly.",
"type": "boolean"
},
"kind": {
"description": "The kind of rule to run",
"enum": [
@@ -951,7 +994,7 @@
"minProperties": 1,
"properties": {
"subreddits": {
"description": "A list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "A list of Subreddits (by name, case-insensitive) to look for.\n\nEX [\"mealtimevideos\",\"askscience\"]",
"examples": [
[
"mealtimevideos",
@@ -991,9 +1034,15 @@
"approved": {
"type": "boolean"
},
"deleted": {
"type": "boolean"
},
"distinguished": {
"type": "boolean"
},
"filtered": {
"type": "boolean"
},
"is_self": {
"type": "boolean"
},

View File

@@ -41,7 +41,7 @@
},
"satisfyOn": {
"default": "any",
"description": "Define the condition under which both criteria are considered met\n\n**If `any` then it will retrieve Activities until one of the criteria is met, whichever occurs first**\n\nEX `{count: 100, duration: {days: 90}}`:\n* If 90 days of activities = 40 activities => returns 40 activities\n* If 100 activities is only 20 days => 100 activities\n\n**If `all` then both criteria must be met.**\n\nEffectively, whichever criteria produces the most Activities...\n\nEX `{count: 100, duration: {days: 90}}`:\n* If at 90 days of activities => 40 activities, continue retrieving results until 100 => results in >90 days of activities\n* If at 100 activities => 20 days of activities, continue retrieving results until 90 days => results in >100 activities",
"description": "Define the condition under which both criteria are considered met\n\n**If `any` then it will retrieve Activities until one of the criteria is met, whichever occurs first**\n\nEX `{\"count\": 100, duration: \"90 days\"}`:\n* If 90 days of activities = 40 activities => returns 40 activities\n* If 100 activities is only 20 days => 100 activities\n\n**If `all` then both criteria must be met.**\n\nEffectively, whichever criteria produces the most Activities...\n\nEX `{\"count\": 100, duration: \"90 days\"}`:\n* If at 90 days of activities => 40 activities, continue retrieving results until 100 => results in >90 days of activities\n* If at 100 activities => 20 days of activities, continue retrieving results until 90 days => results in >100 activities",
"enum": [
"all",
"any"
@@ -50,6 +50,38 @@
"any"
],
"type": "string"
},
"subreddits": {
"description": "Filter which subreddits (case-insensitive) Activities are retrieved from.\n\n**Note:** Filtering occurs **before** `duration/count` checks are performed.",
"properties": {
"exclude": {
"description": "Exclude any results from these subreddits\n\n**Note:** `exclude` is ignored if `include` is present",
"examples": [
[
"mealtimevideos",
"askscience"
]
],
"items": {
"type": "string"
},
"type": "array"
},
"include": {
"description": "Include only results from these subreddits",
"examples": [
[
"mealtimevideos",
"askscience"
]
],
"items": {
"type": "string"
},
"type": "array"
}
},
"type": "object"
}
},
"type": "object"
@@ -101,7 +133,7 @@
"type": "boolean"
},
"exclude": {
"description": "Do not include Activities from this list of Subreddits. Will be ignored if `include` is present.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Do not include Activities from this list of Subreddits (by name, case-insensitive)\n\nWill be ignored if `include` is present.\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -113,7 +145,7 @@
"type": "array"
},
"include": {
"description": "Only include Activities from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Only include Activities from this list of Subreddits (by name, case-insensitive)\n\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -464,9 +496,15 @@
"approved": {
"type": "boolean"
},
"deleted": {
"type": "boolean"
},
"distinguished": {
"type": "boolean"
},
"filtered": {
"type": "boolean"
},
"locked": {
"type": "boolean"
},
@@ -627,7 +665,7 @@
"type": "array"
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -639,7 +677,7 @@
"type": "array"
},
"include": {
"description": "Only include Submissions from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -821,7 +859,7 @@
]
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -837,7 +875,7 @@
"type": "number"
},
"include": {
"description": "Only include Submissions from this list of Subreddits.\n\nA list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"examples": [
"mealtimevideos",
"askscience"
@@ -865,6 +903,11 @@
],
"description": "A list of criteria to test the state of the `Activity` against before running the Rule.\n\nIf any set of criteria passes the Rule will be run. If the criteria fails then the Rule is skipped."
},
"keepRemoved": {
"default": false,
"description": "Count submissions/comments that have previously been removed.\n\nBy default all `Submissions/Commments` that are in a `removed` state will be filtered from `window` (only applies to subreddits you mod).\n\nSetting to `true` could be useful if you also want to also detected removed repeat posts by a user like for example if automoderator removes multiple, consecutive submissions for not following title format correctly.",
"type": "boolean"
},
"kind": {
"description": "The kind of rule to run",
"enum": [
@@ -931,7 +974,7 @@
"minProperties": 1,
"properties": {
"subreddits": {
"description": "A list of subreddits (case-insensitive) to look for. Do not include \"r/\" prefix.\n\nEX to match against /r/mealtimevideos and /r/askscience use [\"mealtimevideos\",\"askscience\"]",
"description": "A list of Subreddits (by name, case-insensitive) to look for.\n\nEX [\"mealtimevideos\",\"askscience\"]",
"examples": [
[
"mealtimevideos",
@@ -971,9 +1014,15 @@
"approved": {
"type": "boolean"
},
"deleted": {
"type": "boolean"
},
"distinguished": {
"type": "boolean"
},
"filtered": {
"type": "boolean"
},
"is_self": {
"type": "boolean"
},

View File

@@ -4,7 +4,7 @@ import {SubmissionCheck} from "../Check/SubmissionCheck";
import {CommentCheck} from "../Check/CommentCheck";
import {
determineNewResults,
mergeArr,
mergeArr, parseFromJsonOrYamlToObject, sleep,
} from "../util";
import {CommentStream, SubmissionStream, Poll, ModQueueStream} from "snoostorm";
import pEvent from "p-event";
@@ -14,22 +14,34 @@ import {ManagerOptions, PollingOptionsStrong} from "../Common/interfaces";
import Submission from "snoowrap/dist/objects/Submission";
import {itemContentPeek} from "../Utils/SnoowrapUtils";
import LoggedError from "../Utils/LoggedError";
import ResourceManager, {SubredditResourceOptions, SubredditResources} from "./SubredditResources";
import ResourceManager, {
SubredditResourceOptions,
SubredditResources,
SubredditResourceSetOptions
} from "./SubredditResources";
import {UnmoderatedStream} from "./Streams";
import EventEmitter from "events";
import ConfigParseError from "../Utils/ConfigParseError";
import dayjs, { Dayjs as DayjsObj } from "dayjs";
export class Manager {
subreddit: Subreddit;
client: Snoowrap;
logger: Logger;
pollOptions: PollingOptionsStrong[];
submissionChecks: SubmissionCheck[];
commentChecks: CommentCheck[];
resources: SubredditResources;
pollOptions!: PollingOptionsStrong[];
submissionChecks!: SubmissionCheck[];
commentChecks!: CommentCheck[];
resources!: SubredditResources;
wikiLocation: string = 'botconfig/contextbot';
lastWikiRevision?: DayjsObj
lastWikiCheck: DayjsObj = dayjs();
wikiUpdateRunning: boolean = false;
streamListedOnce: string[] = [];
streams: Poll<Snoowrap.Submission | Snoowrap.Comment>[] = [];
dryRun?: boolean;
globalDryRun?: boolean;
emitter: EventEmitter = new EventEmitter();
displayLabel: string;
currentLabels?: string[];
@@ -52,35 +64,54 @@ export class Manager {
return getLabels()
}
}, mergeArr);
const configBuilder = new ConfigBuilder({logger: this.logger});
const validJson = configBuilder.validateJson(sourceData);
const {checks, ...configManagerOpts} = validJson;
const {polling = [{pollOn: 'unmoderated', limit: 25, interval: 20000}], caching, dryRun, footer, nickname} = configManagerOpts || {};
this.pollOptions = buildPollingOptions(polling);
this.subreddit = sub;
this.client = client;
this.dryRun = opts.dryRun || dryRun;
this.parseConfigurationFromObject(sourceData);
}
protected parseConfigurationFromObject(configObj: object) {
const configBuilder = new ConfigBuilder({logger: this.logger});
const validJson = configBuilder.validateJson(configObj);
const {checks, ...configManagerOpts} = validJson;
const {
polling = [{pollOn: 'unmoderated', limit: 25, interval: 20000}],
caching,
dryRun,
footer,
nickname
} = configManagerOpts || {};
this.pollOptions = buildPollingOptions(polling);
this.dryRun = this.globalDryRun || dryRun;
if(nickname !== undefined) {
this.displayLabel = nickname;
this.currentLabels = [this.displayLabel];
}
let resourceConfig: SubredditResourceOptions = {
logger: this.logger,
subreddit: sub,
if(footer !== undefined) {
this.resources.footer = footer;
}
let resourceConfig: SubredditResourceSetOptions = {
footer,
enabled: true
};
if(caching === false) {
resourceConfig.enabled = false;
} else {
resourceConfig = {...resourceConfig, ...caching};
}
if(this.resources === undefined) {
this.resources = ResourceManager.set(this.subreddit.display_name, {
...resourceConfig,
logger: this.logger,
subreddit: this.subreddit
});
}
this.resources.setOptions(resourceConfig);
this.resources = ResourceManager.set(sub.display_name, resourceConfig);
this.logger.info('Subreddit-specific options updated');
this.logger.info('Building Checks...');
const commentChecks: Array<CommentCheck> = [];
const subChecks: Array<SubmissionCheck> = [];
@@ -90,7 +121,7 @@ export class Manager {
...jCheck,
dryRun: this.dryRun || jCheck.dryRun,
logger: this.logger,
subredditName: sub.display_name
subredditName: this.subreddit.display_name
};
if (jCheck.kind === 'comment') {
commentChecks.push(new CommentCheck(checkConfig));
@@ -109,6 +140,54 @@ export class Manager {
}
}
async parseConfiguration(force: boolean = false) {
this.wikiUpdateRunning = true;
this.lastWikiCheck = dayjs();
let sourceData: string;
try {
// @ts-ignore
const wiki = await this.subreddit.getWikiPage(this.wikiLocation).fetch();
const revisionDate = dayjs.unix(wiki.revision_date);
if (!force && (this.lastWikiRevision !== undefined && this.lastWikiRevision.isSame(revisionDate))) {
// nothing to do, we already have this revision
this.wikiUpdateRunning = false;
this.logger.verbose('Config is up to date');
return;
}
if (this.lastWikiRevision !== undefined) {
this.logger.info(`Updating config due to stale wiki page (${dayjs.duration(dayjs().diff(revisionDate)).humanize()} old)`)
}
this.lastWikiRevision = revisionDate;
sourceData = await wiki.content_md;
} catch (err) {
const msg = `Could not read wiki configuration. Please ensure the page https://reddit.com${this.subreddit.url}wiki/${this.wikiLocation} exists and is readable -- error: ${err.message}`;
this.logger.error(msg);
this.wikiUpdateRunning = false;
throw new ConfigParseError(msg);
}
if (sourceData === '') {
this.logger.error(`Wiki page contents was empty`);
this.wikiUpdateRunning = false;
throw new ConfigParseError('Wiki page contents was empty');
}
const [configObj, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(sourceData);
if (configObj === undefined) {
this.logger.error(`Could not parse wiki page contents as JSON or YAML:`);
this.logger.error(jsonErr);
this.logger.error(yamlErr);
this.wikiUpdateRunning = false;
throw new ConfigParseError('Could not parse wiki page contents as JSON or YAML:')
}
this.wikiUpdateRunning = false;
this.parseConfigurationFromObject(configObj);
this.logger.info('Checks updated');
}
async runChecks(checkType: ('Comment' | 'Submission'), item: (Submission | Comment), checkNames: string[] = []): Promise<void> {
const checks = checkType === 'Comment' ? this.commentChecks : this.submissionChecks;
const itemId = await item.id;
@@ -117,6 +196,17 @@ export class Manager {
this.currentLabels = [this.displayLabel, itemIdentifier];
const [peek, _] = await itemContentPeek(item);
this.logger.info(`<EVENT> ${peek}`);
while(this.wikiUpdateRunning) {
// sleep for a few seconds while we get new config zzzz
this.logger.verbose('A wiki config update is running, delaying checks by 3 seconds');
await sleep(3000);
}
if(this.lastWikiCheck.diff(dayjs(), 's') > 60) {
// last checked more than 60 seconds ago for config, try and update
await this.parseConfiguration();
}
const startingApiLimit = this.client.ratelimitRemaining;
if(item instanceof Submission) {
@@ -247,12 +337,21 @@ export class Manager {
this.running = true;
this.logger.info('Bot Running');
const emitter = new EventEmitter();
await pEvent(emitter, 'end');
await pEvent(this.emitter, 'end');
} catch (err) {
this.logger.error('Encountered unhandled error, manager is bailing out');
this.logger.error(err);
} finally {
this.stop();
}
}
stop() {
if(this.running) {
for(const s of this.streams) {
s.end();
}
this.emitter.emit('end');
this.running = false;
this.logger.info('Bot Stopped');
}

View File

@@ -1,4 +1,4 @@
import {RedditUser, Comment, Submission} from "snoowrap";
import Snoowrap, {RedditUser, Comment, Submission} from "snoowrap";
import cache from 'memory-cache';
import objectHash from 'object-hash';
import {
@@ -9,7 +9,8 @@ import {
} from "../Utils/SnoowrapUtils";
import Subreddit from 'snoowrap/dist/objects/Subreddit';
import winston, {Logger} from "winston";
import {mergeArr} from "../util";
import fetch from 'node-fetch';
import {mergeArr, parseExternalUrl, parseWikiContext} from "../util";
import LoggedError from "../Utils/LoggedError";
import {Footer, SubredditCacheConfig} from "../Common/interfaces";
import UserNotes from "./UserNotes";
@@ -17,7 +18,6 @@ import Mustache from "mustache";
import he from "he";
import {AuthorCriteria} from "../Author/Author";
export const WIKI_DESCRIM = 'wiki:';
export const DEFAULT_FOOTER = '\r\n*****\r\nThis action was performed by [a bot.]({{botLink}}) Mention a moderator or [send a modmail]({{modmailLink}}) if you any ideas, questions, or concerns about this action.';
export interface SubredditResourceOptions extends SubredditCacheConfig, Footer {
@@ -26,24 +26,48 @@ export interface SubredditResourceOptions extends SubredditCacheConfig, Footer {
logger: Logger;
}
export class SubredditResources {
export interface SubredditResourceSetOptions extends SubredditCacheConfig, Footer {
enabled: boolean;
protected authorTTL: number;
protected useSubredditAuthorCache: boolean;
protected wikiTTL: number;
}
export class SubredditResources {
enabled!: boolean;
protected authorTTL!: number;
protected useSubredditAuthorCache!: boolean;
protected wikiTTL!: number;
name: string;
protected logger: Logger;
userNotes: UserNotes;
footer: false | string;
footer!: false | string;
subreddit: Subreddit
constructor(name: string, options: SubredditResourceOptions) {
const {
subreddit,
logger,
enabled = true,
userNotesTTL = 60000,
} = options || {};
this.subreddit = subreddit;
this.name = name;
if (logger === undefined) {
const alogger = winston.loggers.get('default')
this.logger = alogger.child({labels: [this.name, 'Resource Cache']}, mergeArr);
} else {
this.logger = logger.child({labels: ['Resource Cache']}, mergeArr);
}
this.userNotes = new UserNotes(enabled ? userNotesTTL : 0, this.subreddit, this.logger)
this.setOptions(options);
}
setOptions (options: SubredditResourceSetOptions) {
const {
enabled = true,
authorTTL,
subreddit,
userNotesTTL = 60000,
wikiTTL = 300000, // 5 minutes
logger,
footer = DEFAULT_FOOTER
} = options || {};
@@ -57,16 +81,7 @@ export class SubredditResources {
this.authorTTL = authorTTL;
}
this.wikiTTL = wikiTTL;
this.userNotes = new UserNotes(enabled ? userNotesTTL : 0, subreddit, logger);
this.name = name;
if (logger === undefined) {
const alogger = winston.loggers.get('default')
this.logger = alogger.child({labels: [this.name, 'Resource Cache']}, mergeArr);
} else {
this.logger = logger.child({labels: ['Resource Cache']}, mergeArr);
}
this.userNotes.notesTTL = enabled ? userNotesTTL : 0;
}
async getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
@@ -107,38 +122,69 @@ export class SubredditResources {
}) as unknown as Promise<Submission[]>;
}
async getContent(val: string, subreddit: Subreddit): Promise<string> {
const hasWiki = val.trim().substring(0, WIKI_DESCRIM.length) === WIKI_DESCRIM;
if (!hasWiki) {
async getContent(val: string, subredditArg?: Subreddit): Promise<string> {
const subreddit = subredditArg || this.subreddit;
let cacheKey;
const wikiContext = parseWikiContext(val);
if (wikiContext !== undefined) {
cacheKey = `${wikiContext.wiki}${wikiContext.subreddit !== undefined ? `|${wikiContext.subreddit}` : ''}`;
}
const extUrl = wikiContext === undefined ? parseExternalUrl(val) : undefined;
if (extUrl !== undefined) {
cacheKey = extUrl;
}
if (cacheKey === undefined) {
return val;
} else {
const useCache = this.enabled && this.wikiTTL > 0;
const wikiPath = val.trim().substring(WIKI_DESCRIM.length);
}
let hash = `${subreddit.display_name}-${wikiPath}`;
if (useCache) {
const cachedContent = cache.get(`${subreddit.display_name}-${wikiPath}`);
if (cachedContent !== null) {
this.logger.debug(`Cache Hit: ${wikiPath}`);
return cachedContent;
}
const useCache = this.enabled && this.wikiTTL > 0;
// try to get cached value first
let hash = `${subreddit.display_name}-${cacheKey}`;
if (useCache) {
const cachedContent = cache.get(hash);
if (cachedContent !== null) {
this.logger.debug(`Cache Hit: ${cacheKey}`);
return cachedContent;
}
}
let wikiContent: string;
// no cache hit, get from source
if (wikiContext !== undefined) {
let sub;
if (wikiContext.subreddit === undefined || wikiContext.subreddit.toLowerCase() === subreddit.display_name) {
sub = subreddit;
} else {
// @ts-ignore
const client = subreddit._r as Snoowrap;
sub = client.getSubreddit(wikiContext.subreddit);
}
try {
const wikiPage = subreddit.getWikiPage(wikiPath);
const wikiContent = await wikiPage.content_md;
if (useCache) {
cache.put(hash, wikiContent, this.wikiTTL);
}
return wikiContent;
const wikiPage = sub.getWikiPage(wikiContext.wiki);
wikiContent = await wikiPage.content_md;
} catch (err) {
const msg = `Could not read wiki page. Please ensure the page 'https://reddit.com${subreddit.display_name_prefixed}wiki/${wikiPath}' exists and is readable`;
const msg = `Could not read wiki page. Please ensure the page 'https://reddit.com${sub.display_name_prefixed}wiki/${wikiContext}' exists and is readable`;
this.logger.error(msg, err);
throw new LoggedError(msg);
}
} else {
try {
const response = await fetch(extUrl as string);
wikiContent = await response.text();
} catch (err) {
const msg = `Error occurred while trying to fetch the url ${extUrl}`;
this.logger.error(msg, err);
throw new LoggedError(msg);
}
}
if (useCache) {
cache.put(hash, wikiContent, this.wikiTTL);
}
return wikiContent;
}
async testAuthorCriteria(item: (Comment | Submission), authorOpts: AuthorCriteria, include = true) {

View File

@@ -0,0 +1,7 @@
import LoggedError from "./LoggedError";
class ConfigParseError extends LoggedError {
}
export default ConfigParseError

View File

@@ -1,5 +1,6 @@
import Snoowrap, {Comment, RedditUser} from "snoowrap";
import Snoowrap, {RedditUser} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import Comment from "snoowrap/dist/objects/Comment";
import {Duration, DurationUnitsObjectType} from "dayjs/plugin/duration";
import dayjs, {Dayjs} from "dayjs";
import Mustache from "mustache";
@@ -15,7 +16,7 @@ import {
compareDurationValue, comparisonTextOp,
isActivityWindowCriteria,
normalizeName, parseDuration,
parseDurationComparison, parseGenericValueComparison, parseGenericValueOrPercentComparison,
parseDurationComparison, parseGenericValueComparison, parseGenericValueOrPercentComparison, parseSubredditName,
truncateStringToLength
} from "../util";
import UserNotes from "../Subreddit/UserNotes";
@@ -23,6 +24,7 @@ import {Logger} from "winston";
import InvalidRegexError from "./InvalidRegexError";
import SimpleError from "./SimpleError";
import {AuthorCriteria} from "../Author/Author";
import { URL } from "url";
export const BOT_LINK = 'https://www.reddit.com/r/ContextModBot/comments/o1dugk/introduction_to_contextmodbot_and_rcb';
@@ -33,13 +35,16 @@ export interface AuthorTypedActivitiesOptions extends AuthorActivitiesOptions {
export interface AuthorActivitiesOptions {
window: ActivityWindowType | Duration
chunkSize?: number,
// TODO maybe move this into window
keepRemoved?: boolean,
}
export async function getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
const {
chunkSize: cs = 100,
window: optWindow
window: optWindow,
keepRemoved = true,
} = options;
let satisfiedCount: number | undefined,
@@ -50,8 +55,27 @@ export async function getAuthorActivities(user: RedditUser, options: AuthorTyped
let durVal: DurationVal | undefined;
let duration: Duration | undefined;
let includes: string[] = [];
let excludes: string[] = [];
if(isActivityWindowCriteria(optWindow)) {
const { satisfyOn = 'any', count, duration } = optWindow;
const {
satisfyOn = 'any',
count,
duration,
subreddits: {
include = [],
exclude = [],
} = {},
} = optWindow;
includes = include.map(x => parseSubredditName(x).toLowerCase());
excludes = exclude.map(x => parseSubredditName(x).toLowerCase());
if(includes.length > 0 && excludes.length > 0) {
// TODO add logger so this can be logged...
// this.logger.warn('include and exclude both specified, exclude will be ignored');
}
satisfiedCount = count;
durVal = duration;
satisfy = satisfyOn
@@ -114,7 +138,26 @@ export async function getAuthorActivities(user: RedditUser, options: AuthorTyped
let countOk = false,
timeOk = false;
const listSlice = listing.slice(offset - chunkSize)
let listSlice = listing.slice(offset - chunkSize)
// TODO partition list by filtered so we can log a debug statement with count of filtered out activities
if (includes.length > 0) {
listSlice = listSlice.filter(x => {
const actSub = x.subreddit.display_name.toLowerCase();
return includes.includes(actSub);
});
} else if (excludes.length > 0) {
listSlice = listSlice.filter(x => {
const actSub = x.subreddit.display_name.toLowerCase();
return !excludes.includes(actSub);
});
}
if(!keepRemoved) {
// snoowrap typings think 'removed' property does not exist on submission
// @ts-ignore
listSlice = listSlice.filter(x => !activityIsRemoved(x));
}
if (satisfiedCount !== undefined && items.length + listSlice.length >= satisfiedCount) {
// satisfied count
if(satisfy === 'any') {
@@ -483,29 +526,86 @@ export const getSubmissionFromComment = async (item: Comment): Promise<Submissio
}
}
const SPOTIFY_PODCAST_AUTHOR_REGEX: RegExp = /this episode from (?<author>.*?) on Spotify./;
const SPOTIFY_PODCAST_AUTHOR_REGEX_URL = 'https://regexr.com/61c2f';
const SPOTIFY_MUSIC_AUTHOR_REGEX: RegExp = /Listen to .*? on Spotify.\s(?<author>.+?)\s·\s(?<mediaType>.+?)\s/;
const SPOTIFY_MUSIC_AUTHOR_REGEX_URL = 'https://regexr.com/61c2r';
const ANCHOR_AUTHOR_REGEX: RegExp = /by (?<author>.+?)$/;
const ANCHOR_AUTHOR_REGEX_URL = 'https://regexr.com/61c31';
export const getAttributionIdentifier = (sub: Submission, useParentMediaDomain = false): DomainInfo => {
let domain: string = '';
let displayDomain: string = '';
let domainIdents: string[] = [sub.domain];
let domainIdents: string[] = useParentMediaDomain ? [sub.domain] : [];
let provider: string | undefined;
let mediaType: string | undefined;
if (!useParentMediaDomain && sub.secure_media?.oembed !== undefined) {
const {
author_url,
author_name,
description,
provider_name,
} = sub.secure_media?.oembed;
if (author_name !== undefined) {
domainIdents.push(author_name);
if (displayDomain === '') {
displayDomain = author_name;
switch(provider_name) {
case 'Spotify':
if(description !== undefined) {
let match = description.match(SPOTIFY_PODCAST_AUTHOR_REGEX);
if(match !== null) {
const {author} = match.groups as any;
displayDomain = author;
domainIdents.push(author);
mediaType = 'Podcast';
} else {
match = description.match(SPOTIFY_MUSIC_AUTHOR_REGEX);
if(match !== null) {
const {author, mediaType: mt} = match.groups as any;
displayDomain = author;
domainIdents.push(author);
mediaType = mt.toLowerCase();
}
}
}
break;
case 'Anchor FM Inc.':
if(author_name !== undefined) {
let match = author_name.match(ANCHOR_AUTHOR_REGEX);
if(match !== null) {
const {author} = match.groups as any;
displayDomain = author;
domainIdents.push(author);
mediaType = 'podcast';
}
}
break;
case 'YouTube':
mediaType = 'Video/Audio';
break;
default:
// nah
}
// handles yt, vimeo, twitter fine
if(displayDomain === '') {
if (author_name !== undefined) {
domainIdents.push(author_name);
if (displayDomain === '') {
displayDomain = author_name;
}
}
if (author_url !== undefined) {
domainIdents.push(author_url);
domain = author_url;
if (displayDomain === '') {
displayDomain = author_url;
}
}
}
if (author_url !== undefined) {
domainIdents.push(author_url);
domain = author_url;
if (displayDomain === '') {
displayDomain = author_url;
}
if(displayDomain === '') {
// we have media but could not parse stuff for some reason just use url
const u = new URL(sub.url);
displayDomain = u.pathname;
domainIdents.push(u.pathname);
}
provider = provider_name;
} else if(sub.secure_media?.type !== undefined) {
@@ -522,7 +622,7 @@ export const getAttributionIdentifier = (sub: Submission, useParentMediaDomain =
displayDomain = domain;
}
return {display: displayDomain, domain, aliases: domainIdents, provider};
return {display: displayDomain, domain, aliases: domainIdents, provider, mediaType};
}
export const isItem = (item: Submission | Comment, stateCriteria: TypedActivityStates, logger: Logger): [boolean, SubmissionState|CommentState|undefined] => {
@@ -537,16 +637,44 @@ export const isItem = (item: Submission | Comment, stateCriteria: TypedActivityS
for (const k of Object.keys(crit)) {
// @ts-ignore
if (crit[k] !== undefined) {
// @ts-ignore
if (item[k] !== undefined) {
// @ts-ignore
if (item[k] !== crit[k]) {
switch(k) {
case 'removed':
const removed = activityIsRemoved(item);
if (removed !== crit['removed']) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${removed}`)
return [false, crit];
}
break;
case 'deleted':
const deleted = activityIsDeleted(item);
if (deleted !== crit['deleted']) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${deleted}`)
return [false, crit];
}
break;
case 'filtered':
const filtered = activityIsFiltered(item);
if (filtered !== crit['filtered']) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${filtered}`)
return [false, crit];
}
break;
default:
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${item[k]}`)
return [false, crit];
}
} else {
log.warn(`Tried to test for Item property '${k}' but it did not exist`);
if (item[k] !== undefined) {
// @ts-ignore
if (item[k] !== crit[k]) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${item[k]}`)
return [false, crit];
}
} else {
log.warn(`Tried to test for Item property '${k}' but it did not exist`);
}
break;
}
}
}
@@ -559,3 +687,30 @@ export const isItem = (item: Submission | Comment, stateCriteria: TypedActivityS
}
return [false, undefined];
}
export const activityIsRemoved = (item: Submission|Comment): boolean => {
if(item instanceof Submission) {
// when automod filters a post it gets this category
return item.banned_at_utc !== null && item.removed_by_category !== 'automod_filtered';
}
// when automod filters a comment item.removed === false
// so if we want to processing filtered comments we need to check for this
return item.banned_at_utc !== null && item.removed;
}
export const activityIsFiltered = (item: Submission|Comment): boolean => {
if(item instanceof Submission) {
// when automod filters a post it gets this category
return item.banned_at_utc !== null && item.removed_by_category === 'automod_filtered';
}
// when automod filters a comment item.removed === false
// so if we want to processing filtered comments we need to check for this
return item.banned_at_utc !== null && !item.removed;
}
export const activityIsDeleted = (item: Submission|Comment): boolean => {
if(item instanceof Submission) {
return item.removed_by_category === 'deleted';
}
return item.author.name === '[deleted]'
}

View File

@@ -518,3 +518,38 @@ export const compareDurationValue = (comp: DurationComparison, date: Dayjs) => {
const dateToCompare = dayjs().subtract(comp.duration.asSeconds(), 'seconds');
return dateComparisonTextOp(date, comp.operator, dateToCompare);
}
const SUBREDDIT_NAME_REGEX: RegExp = /^\s*(?:\/r\/|r\/)*(\w+)*\s*$/;
const SUBREDDIT_NAME_REGEX_URL = 'https://regexr.com/61a1d';
export const parseSubredditName = (val:string): string => {
const matches = val.match(SUBREDDIT_NAME_REGEX);
if (matches === null) {
throw new InvalidRegexError(SUBREDDIT_NAME_REGEX, val, SUBREDDIT_NAME_REGEX_URL)
}
return matches[1] as string;
}
const WIKI_REGEX: RegExp = /^\s*wiki:(?<url>[^|]+)\|*(?<subreddit>[^\s]*)\s*$/;
const WIKI_REGEX_URL = 'https://regexr.com/61bq1';
const URL_REGEX: RegExp = /^\s*url:(?<url>[^\s]+)\s*$/;
const URL_REGEX_URL = 'https://regexr.com/61bqd';
export const parseWikiContext = (val: string) => {
const matches = val.match(WIKI_REGEX);
if (matches === null) {
return undefined;
}
const sub = (matches.groups as any).subreddit as string;
return {
wiki: (matches.groups as any).url as string,
subreddit: sub === '' ? undefined : parseSubredditName(sub)
};
}
export const parseExternalUrl = (val: string) => {
const matches = val.match(URL_REGEX);
if (matches === null) {
return undefined;
}
return (matches.groups as any).url as string;
}