Compare commits

...

56 Commits
0.7.0 ... 0.9.0

Author SHA1 Message Date
FoxxMD
910f7f79ef Merge branch 'edge' 2021-09-20 10:54:32 -04:00
FoxxMD
641892cd3e fix: Fix activity push to manager
Should only be using firehose
2021-09-20 09:37:32 -04:00
FoxxMD
1dfb9779e7 feat(attribution): Allow specifying aggregateOn filter when using domain blacklist
May not make sense all the time but a properly configured config could take advantage of this
2021-09-17 15:14:36 -04:00
FoxxMD
40111c54a2 feat(message): Add a markdown formatted 'ruleSummary' property to content template data 2021-09-17 14:38:39 -04:00
FoxxMD
b4745e3b45 feat(message): Implement arbitrary message recipient to enable modmail
* Can send message to any entity (user/subreddit) using 'to' property, or leave unspecified to send to author of activity
* Parse entity type (user or subreddit) from to value and ensure its in a valid format we can understand with regex
2021-09-17 13:36:28 -04:00
FoxxMD
838da497ce feat: Add more detail to actioned events and logging for action results 2021-09-17 12:46:00 -04:00
FoxxMD
01755eada5 feat: De-dup activities from different polling sources
Previously CM would process the same activity multiple times if it was ingested from two different polling sources (modqueue and unmoderated/newSub). Introduce queue control flow to ensure activity is de-duped or refreshed before processing if this scenario occurs.

* Use a queue (firehose) to bottleneck all activities from different sources before pushing to worker queues
* Keep track of items currently ingested but not completely processed and use firehose to de-dupe queued items (flag to refresh) or re-queue if currently processing (and flag to refresh)
2021-09-17 11:50:49 -04:00
FoxxMD
1ff59ad6e8 feat: Add report count comparison to comment/submission state 2021-09-17 10:21:46 -04:00
FoxxMD
d8fd8e6140 feat: Add score (karma) comparison to comment/submission state 2021-09-17 10:13:21 -04:00
FoxxMD
255ffdb417 fix(recent): Deduplicate present subreddits 2021-09-16 16:48:00 -04:00
FoxxMD
f0199366a0 feat(history)!: Implement subreddit state and subreddit name parsing
* Implement total threshold to compare filtered activities against window activities

BREAKING CHANGE: include/exclude now filters POST activity window and all comparisons are done on those filtered activities against window activities
2021-09-16 15:36:06 -04:00
FoxxMD
20c724cab5 fix: Fix bug where non-media domains were not counted for attribution rule 2021-09-16 15:33:59 -04:00
FoxxMD
a670975f14 feat(repeat activity): Implement subreddit state and regex parsing 2021-09-16 14:12:16 -04:00
FoxxMD
ee13feaf57 feat(recent activity): Implement subreddit state and regex parsing for recent activity
* SubredditState can be used to check some subreddit attributes alongside, or in place of, a subreddit name
* Regex parsing for subreddit name string in recent activity
2021-09-16 13:34:19 -04:00
FoxxMD
23a24b4448 feat(regex)!: Simplify regex parsing from config
Reduce regex complexity in config by parsing a normal regex straight from config string value (including flags)

BREAKING CHANGE: regex must now be enclosed in forward slashes, flags must be on regex value, and regexFlags property has been removed
2021-09-16 10:53:33 -04:00
FoxxMD
a11b667d5e Merge branch 'edge' 2021-09-13 16:16:55 -04:00
FoxxMD
269b1620b9 fix(regex): fix when to add match sample to summary
Only include summary if there were non-zero matches
2021-09-07 15:04:30 -04:00
FoxxMD
6dee734440 fix(recent activity): fix subreddit summary in result
* Only include if any subreddits with activity found
* Use correct subreddit array
2021-09-07 15:03:56 -04:00
FoxxMD
3aea422eff fix(cache): check manager has cache (valid config) before trying to get actioned events 2021-09-07 14:56:03 -04:00
FoxxMD
e707e5a9a8 fix(manager): revert commented notification line from debugging 2021-09-07 14:07:00 -04:00
FoxxMD
2a24eea3a5 fix(rule): fix regex rule matching default behavior and improve log message
* Default to global flag if none specified so that all matches per activity are found
* Improve result message section ordering and display a sample of up to 4 found matches
2021-09-07 14:06:30 -04:00
FoxxMD
8ad8297c0e feat(rule): improve recent activity result (log message) by listing on subreddits with found activity 2021-09-07 14:04:26 -04:00
FoxxMD
0b94a14ac1 feat(ui): improve actioned events interactions
* Refactor api to get all accessible events, sorted by time, when subreddit is not specified
* Add subreddit name to actioned event data to differentiate between events
* Show actioned events link in "All" subreddit view
* Remove user-select css style (left over from config template)
* Format timestamp to be more human friendly
* Remove success/triggered text and just use checkmarks (same as log)
2021-09-07 13:33:35 -04:00
FoxxMD
a04e0d2a9b fix(cache): Set actioned events not to expire in cache 2021-09-07 13:26:30 -04:00
FoxxMD
3a1348c370 feat(ui): move actioned events link to "Actions Run" statistic
More intuitive location
2021-09-07 12:59:51 -04:00
FoxxMD
507818037f feat(cache): refactor actioned events into cache for persistence and make number stored configurable
* refactor actioned events into bot-configured cache so they can be persisted between restarts
* add config params for actionedEventsMax and actionedEventsDefault to allow defining defaults at operator/bot/subreddit level
2021-09-07 12:55:19 -04:00
FoxxMD
2c1f6daf4f Implement load config from URL for editor 2021-09-01 10:15:46 -04:00
FoxxMD
fef79472fe re-add missing heartbeat and improve bot exception handling
* Missed heartbeat during client-server refactor somehow...oops. Re-add heartbeat behavior
* Refactor nanny functionality to use date check rather than loop -- behaves same as heartbeat now
* use http retry handling in nanny to handle reddit outages
* try-catch on nanny and heartbeat for better exception handling at bot-level
* await health loop so we can catch bot-level exceptions in app to prevent entire app from crashing
2021-08-31 11:02:03 -04:00
FoxxMD
885e3fa765 Merge branch 'edge' 2021-08-26 16:04:01 -04:00
FoxxMD
0b2c0e6451 Add karma threshold for recent activity rule 2021-08-26 12:04:17 -04:00
FoxxMD
15806b5f1f Add regex documentation 2021-08-26 11:26:59 -04:00
FoxxMD
bf42cdf356 Fix author criteria hash 2021-08-25 15:26:41 -04:00
FoxxMD
e21acd86db Fix maxAge optional 2021-08-25 12:57:02 -04:00
FoxxMD
5dca1c9602 Refactor caching ttl data type to be more intuitive and flexible
* (BC) instead of 0 being 'disabled' must now be 'false'
* (BC) 0 now means cache indefinitely
* 'true' is an alias for '0'
2021-08-25 12:12:41 -04:00
FoxxMD
5274584d92 Improve readability/functionality for caching functions
* Make keys for readable by using plaintext for unique values and only hashing objects
* Improve author criteria caching by excluding item identifier from hash since result should be same at subreddit-level
2021-08-25 11:49:28 -04:00
FoxxMD
1d386c53a5 Make usernote author usage blind to author type
If it has to handle cached activities this will help
2021-08-25 11:30:19 -04:00
FoxxMD
d6e351b195 Fix missing TTL settings 2021-08-25 11:21:55 -04:00
FoxxMD
ea32dc0b62 Fix shared cache namespacing and key count
* Add special SHARED namespace for subreddits using default cache -- remove ns when cache is dedicated
* Check for redis cache type and include prefix pattern when getting key count
2021-08-24 15:59:15 -04:00
FoxxMD
dca57bb19e Fix key count display
Correctly count shared and non-shared
2021-08-24 15:58:04 -04:00
FoxxMD
43919f7f9c Fix connected users since cache can now be persistent 2021-08-24 15:57:48 -04:00
FoxxMD
a176b51148 Fix storing cache user result 2021-08-24 15:08:21 -04:00
FoxxMD
75ac5297df Refactor caching prefix building to provide unique defaults 2021-08-24 14:36:13 -04:00
FoxxMD
0ef2b99bd6 Refactor web to use more caching
* Implement invite code/data to use caching
* refactor operator config so session and invite use same cache instance
2021-08-24 14:10:12 -04:00
FoxxMD
9596a476b5 Improve caching for redis
* Re-add operator-level caching config so a global default cache config can be defined
* Expand provider options with index property so additional, redis specific, params can be provided
* namespace (prefix) bot and subreddit-level (When not shared) redis connections
* refactor subreddit and author name usage since it differs when objects are deserialized
* as type guard for submission based on instance type OR object shape hint since deserialized activities are plain objects
2021-08-24 13:10:13 -04:00
FoxxMD
92f52cada5 Better implementation of user cache result
* When check is triggered also store rule results in cache -- makes actioned events more complete when actions run from cached result
* Add config option to toggle run actions on/off from cached check (Defaults to on)
2021-08-24 10:30:37 -04:00
FoxxMD
a482e852c5 Add empty state to actioned events view 2021-08-24 10:24:38 -04:00
FoxxMD
e9055e5205 Better display for actioned events
* Use checks triggered display instead of own events actions stats (since it already exists and is the same)
* More visual separation between action events using shadowed boxes
* Move timestamp and title into a header (more visual distinction) and use short hyperlink
* Move rule summary up into check field
2021-08-23 16:58:06 -04:00
FoxxMD
df2c40d9c1 Use local time format for actioned events timestamp 2021-08-23 16:56:25 -04:00
FoxxMD
fc4eeb47fa Replace newlines with spaces to make comment peeks more compact 2021-08-23 16:49:40 -04:00
FoxxMD
9fb3eaa611 Implement actioned event history
* Refactor action return value to return results/success/error
* Store action event data after triggered from manager
* Display last 25 actioned events in ui
2021-08-23 14:33:40 -04:00
FoxxMD
23394ab5c2 Use clearer wording for scope error 2021-08-23 12:16:32 -04:00
FoxxMD
5417b26417 Read operator config as json5/yaml 2021-08-23 12:10:39 -04:00
FoxxMD
b6d638d6c5 Implement easier way to determine if reddit api error is due to insufficient oauth scope 2021-08-23 12:00:49 -04:00
FoxxMD
af1dd09e2d Remove batch usernote actions for now 2021-08-23 11:51:01 -04:00
FoxxMD
c42e56c68f Fix window duration when no activities are returned from history rule subreddit filtering 2021-08-20 16:36:52 -04:00
FoxxMD
561a007850 Fix instance qs name on check url 2021-08-20 16:36:07 -04:00
58 changed files with 3566 additions and 1110 deletions

View File

@@ -100,7 +100,7 @@ Find detailed descriptions of all the Rules, with examples, below:
* [Repeat Activity](/docs/examples/repeatActivity)
* [History](/docs/examples/history)
* [Author](/docs/examples/author)
* Regex
* [Regex](/docs/examples/regex)
### Rule Set

View File

@@ -16,6 +16,7 @@ This directory contains example of valid, ready-to-go configurations for Context
* [Repeat Activity](/docs/examples/repeatActivity)
* [History](/docs/examples/history)
* [Author](/docs/examples/author)
* [Regex](/docs/examples/regex)
* [Toolbox User Notes](/docs/examples/userNotes)
* [Advanced Concepts](/docs/examples/advancedConcepts)
* [Rule Sets](/docs/examples/advancedConcepts/ruleSets.json5)

View File

@@ -0,0 +1,20 @@
The **Regex** rule matches on text content from a comment or submission in the same way automod uses regex. The rule, however, provides additional functionality automod does not:
* Can set the **number** of matches that trigger the rule (`matchThreshold`)
Which can then be used in conjunction with a [`window`](https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md) to match against activities from the history of the Author of the Activity being checked (including the Activity being checked):
* Can set the **number of Activities** that meet the `matchThreshold` to trigger the rule (`activityMatchThreshold`)
* Can set the **number of total matches** across all Activities to trigger the rule (`totalMatchThreshold`)
* Can set the **type of Activities** to check (`lookAt`)
* When an Activity is a Submission can **specify which parts of the Submission to match against** IE title, body, and/or url (`testOn`)
### Examples
* [Trigger if regex matches against the current activity](/docs/examples/regex/matchAnyCurrentActivity.json5)
* [Trigger if regex matches 5 times against the current activity](/docs/examples/regex/matchThresholdCurrentActivity.json5)
* [Trigger if regex matches against any part of a Submission](/docs/examples/regex/matchSubmissionParts.json5)
* [Trigger if regex matches any of Author's last 10 activities](/docs/examples/regex/matchHistoryActivity.json5)
* [Trigger if regex matches at least 3 of Author's last 10 activities](/docs/examples/regex/matchActivityThresholdHistory.json5)
* [Trigger if there are 5 regex matches in the Author's last 10 activities](/docs/examples/regex/matchTotalHistoryActivity.json5)
* [Trigger if there are 5 regex matches in the Author's last 10 comments](/docs/examples/regex/matchSubsetHistoryActivity.json5)

View File

@@ -0,0 +1,20 @@
// goes inside
// "rules": []
{
"name": "swear",
"kind": "regex",
"criteria": [
// triggers if more than 3 activities in the last 10 match the regex
{
"regex": "/fuck|shit|damn/",
// this differs from "totalMatchThreshold"
//
// activityMatchThreshold => # of activities from window must match regex
// totalMatchThreshold => # of matches across all activities from window must match regex
"activityMatchThreshold": "> 3",
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
"window": 10,
},
]
}

View File

@@ -0,0 +1,14 @@
// goes inside
// "rules": []
{
"name": "swear",
"kind": "regex",
"criteria": [
// triggers if current activity has more than 0 matches
{
"regex": "/fuck|shit|damn/",
// if "matchThreshold" is not specified it defaults to this -- default behavior is to trigger if there are any matches
// "matchThreshold": "> 0"
},
]
}

View File

@@ -0,0 +1,15 @@
// goes inside
// "rules": []
{
"name": "swear",
"kind": "regex",
"criteria": [
// triggers if any activity in the last 10 (including current activity) match the regex
{
"regex": "/fuck|shit|damn/",
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
"window": 10,
},
]
}

View File

@@ -0,0 +1,19 @@
// goes inside
// "rules": []
{
"name": "swear",
"kind": "regex",
"criteria": [
{
// triggers if the current activity has more than 0 matches
// if the activity is a submission then matches against title, body, and url
// if "testOn" is not provided then `title, body` are the defaults
"regex": "/fuck|shit|damn/",
"testOn": [
"title",
"body",
"url"
]
},
]
}

View File

@@ -0,0 +1,23 @@
// goes inside
// "rules": []
{
"name": "swear",
"kind": "regex",
"criteria": [
// triggers if there are more than 5 regex matches in the last 10 activities (comments only)
{
"regex": "/fuck|shit|damn/",
// this differs from "activityMatchThreshold"
//
// activityMatchThreshold => # of activities from window must match regex
// totalMatchThreshold => # of matches across all activities from window must match regex
"totalMatchThreshold": "> 5",
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
"window": 10,
// determines which activities from window to consider
//defaults to "all" (submissions and comments)
"lookAt": "comments",
},
]
}

View File

@@ -0,0 +1,13 @@
// goes inside
// "rules": []
{
"name": "swear",
"kind": "regex",
"criteria": [
{
"regex": "/fuck|shit|damn/",
// triggers if current activity has greater than 5 matches
"matchThreshold": "> 5"
},
]
}

View File

@@ -0,0 +1,21 @@
// goes inside
// "rules": []
{
"name": "swear",
"kind": "regex",
"criteria": [
// triggers if there are more than 5 regex matches in the last 10 activities (comments or submission)
{
// triggers if there are more than 5 *total matches* across the last 10 activities
"regex": "/fuck|shit|damn/",
// this differs from "activityMatchThreshold"
//
// activityMatchThreshold => # of activities from window must match regex
// totalMatchThreshold => # of matches across all activities from window must match regex
"totalMatchThreshold": "> 5",
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
"window": 10,
},
]
}

137
package-lock.json generated
View File

@@ -9,6 +9,7 @@
"license": "ISC",
"dependencies": {
"@awaitjs/express": "^0.8.0",
"@stdlib/regexp-regexp": "^0.0.6",
"ajv": "^7.2.4",
"async": "^3.2.0",
"autolinker": "^3.14.3",
@@ -173,6 +174,112 @@
"url": "https://github.com/sindresorhus/is?sponsor=1"
}
},
"node_modules/@stdlib/regexp-regexp": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/@stdlib/regexp-regexp/-/regexp-regexp-0.0.6.tgz",
"integrity": "sha512-z/PfA7bpBe6FPNOAXEIu8Z5tePrJ1VXMMZBgcOIdxX2syM/tP4BuFUmeljb8zFXo0fnYxHlPuHqe7+SeIbV40Q==",
"os": [
"aix",
"darwin",
"freebsd",
"linux",
"macos",
"openbsd",
"sunos",
"win32",
"windows"
],
"dependencies": {
"@stdlib/utils-define-nonenumerable-read-only-property": "^0.0.x"
},
"engines": {
"node": ">=0.10.0",
"npm": ">2.7.0"
},
"funding": {
"type": "patreon",
"url": "https://www.patreon.com/athan"
}
},
"node_modules/@stdlib/types": {
"version": "0.0.13",
"resolved": "https://registry.npmjs.org/@stdlib/types/-/types-0.0.13.tgz",
"integrity": "sha512-8aPkDtaJM/XZENqhoj7BYuwENLGyxz1xfLIcf2zct7kLZMi0rODzks3n65LEMIR9Rh3rFDXlwc35XvzEkTpmZQ==",
"os": [
"aix",
"darwin",
"freebsd",
"linux",
"macos",
"openbsd",
"sunos",
"win32",
"windows"
],
"engines": {
"node": ">=0.10.0",
"npm": ">2.7.0"
},
"funding": {
"type": "patreon",
"url": "https://www.patreon.com/athan"
}
},
"node_modules/@stdlib/utils-define-nonenumerable-read-only-property": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/@stdlib/utils-define-nonenumerable-read-only-property/-/utils-define-nonenumerable-read-only-property-0.0.6.tgz",
"integrity": "sha512-j0NsSDFa0hUx9MtZ1mMj+Ak5nxI+cU7dEMB6C6JgJhkKDz9Hg1iT/KhzlGPaIn1Tsycvyydge906qVepjspXgQ==",
"os": [
"aix",
"darwin",
"freebsd",
"linux",
"macos",
"openbsd",
"sunos",
"win32",
"windows"
],
"dependencies": {
"@stdlib/types": "^0.0.x",
"@stdlib/utils-define-property": "^0.0.x"
},
"engines": {
"node": ">=0.10.0",
"npm": ">2.7.0"
},
"funding": {
"type": "patreon",
"url": "https://www.patreon.com/athan"
}
},
"node_modules/@stdlib/utils-define-property": {
"version": "0.0.7",
"resolved": "https://registry.npmjs.org/@stdlib/utils-define-property/-/utils-define-property-0.0.7.tgz",
"integrity": "sha512-druB11NwdtkDTP35lx8qeYnqXPcyddde78h1hflVB0OsKtFofUVbg0oo/pm30gBYFCWWtIhxL6iC7feeuUU/yQ==",
"os": [
"aix",
"darwin",
"freebsd",
"linux",
"macos",
"openbsd",
"sunos",
"win32",
"windows"
],
"dependencies": {
"@stdlib/types": "^0.0.x"
},
"engines": {
"node": ">=0.10.0",
"npm": ">2.7.0"
},
"funding": {
"type": "patreon",
"url": "https://www.patreon.com/athan"
}
},
"node_modules/@szmarczak/http-timer": {
"version": "4.0.6",
"resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz",
@@ -4234,6 +4341,36 @@
"resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.0.1.tgz",
"integrity": "sha512-Qm9hBEBu18wt1PO2flE7LPb30BHMQt1eQgbV76YntdNk73XZGpn3izvGTYxbGgzXKgbCjiia0uxTd3aTNQrY/g=="
},
"@stdlib/regexp-regexp": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/@stdlib/regexp-regexp/-/regexp-regexp-0.0.6.tgz",
"integrity": "sha512-z/PfA7bpBe6FPNOAXEIu8Z5tePrJ1VXMMZBgcOIdxX2syM/tP4BuFUmeljb8zFXo0fnYxHlPuHqe7+SeIbV40Q==",
"requires": {
"@stdlib/utils-define-nonenumerable-read-only-property": "^0.0.x"
}
},
"@stdlib/types": {
"version": "0.0.13",
"resolved": "https://registry.npmjs.org/@stdlib/types/-/types-0.0.13.tgz",
"integrity": "sha512-8aPkDtaJM/XZENqhoj7BYuwENLGyxz1xfLIcf2zct7kLZMi0rODzks3n65LEMIR9Rh3rFDXlwc35XvzEkTpmZQ=="
},
"@stdlib/utils-define-nonenumerable-read-only-property": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/@stdlib/utils-define-nonenumerable-read-only-property/-/utils-define-nonenumerable-read-only-property-0.0.6.tgz",
"integrity": "sha512-j0NsSDFa0hUx9MtZ1mMj+Ak5nxI+cU7dEMB6C6JgJhkKDz9Hg1iT/KhzlGPaIn1Tsycvyydge906qVepjspXgQ==",
"requires": {
"@stdlib/types": "^0.0.x",
"@stdlib/utils-define-property": "^0.0.x"
}
},
"@stdlib/utils-define-property": {
"version": "0.0.7",
"resolved": "https://registry.npmjs.org/@stdlib/utils-define-property/-/utils-define-property-0.0.7.tgz",
"integrity": "sha512-druB11NwdtkDTP35lx8qeYnqXPcyddde78h1hflVB0OsKtFofUVbg0oo/pm30gBYFCWWtIhxL6iC7feeuUU/yQ==",
"requires": {
"@stdlib/types": "^0.0.x"
}
},
"@szmarczak/http-timer": {
"version": "4.0.6",
"resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz",

View File

@@ -26,6 +26,7 @@
"license": "ISC",
"dependencies": {
"@awaitjs/express": "^0.8.0",
"@stdlib/regexp-regexp": "^0.0.6",
"ajv": "^7.2.4",
"async": "^3.2.0",
"autolinker": "^3.14.3",

View File

@@ -2,23 +2,33 @@ import {ActionJson, ActionConfig} from "./index";
import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../Rule";
import {ActionProcessResult} from "../Common/interfaces";
export class ApproveAction extends Action {
getKind() {
return 'Approve';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
//snoowrap typing issue, thinks comments can't be locked
// @ts-ignore
if (item.approved) {
this.logger.warn('Item is already approved');
return {
dryRun,
success: false,
result: 'Item is already approved'
}
}
if (!dryRun) {
// @ts-ignore
await item.approve();
}
return {
dryRun,
success: true,
}
}
}

View File

@@ -3,7 +3,7 @@ import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../Rule";
import {renderContent} from "../Utils/SnoowrapUtils";
import {Footer} from "../Common/interfaces";
import {ActionProcessResult, Footer} from "../Common/interfaces";
export class BanAction extends Action {
@@ -33,7 +33,7 @@ export class BanAction extends Action {
return 'Ban';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
const content = this.message === undefined ? undefined : await this.resources.getContent(this.message, item.subreddit);
const renderedBody = content === undefined ? undefined : await renderContent(content, item, ruleResults, this.resources.userNotes);
@@ -58,6 +58,11 @@ export class BanAction extends Action {
duration: this.duration
});
}
return {
dryRun,
success: true,
result: `Banned ${item.author.name} ${durText}${this.reason !== undefined ? ` (${this.reason})` : ''}`
};
}
}

View File

@@ -2,8 +2,9 @@ import Action, {ActionJson, ActionOptions} from "./index";
import {Comment} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {renderContent} from "../Utils/SnoowrapUtils";
import {Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
import {ActionProcessResult, Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
import {RuleResult} from "../Rule";
import {truncateStringToLength} from "../util";
export class CommentAction extends Action {
content: string;
@@ -32,7 +33,7 @@ export class CommentAction extends Action {
return 'Comment';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
const content = await this.resources.getContent(this.content, item.subreddit);
const body = await renderContent(content, item, ruleResults, this.resources.userNotes);
@@ -44,7 +45,11 @@ export class CommentAction extends Action {
if(item.archived) {
this.logger.warn('Cannot comment because Item is archived');
return;
return {
dryRun,
success: false,
result: 'Cannot comment because Item is archived'
};
}
let reply: Comment;
if(!dryRun) {
@@ -62,6 +67,19 @@ export class CommentAction extends Action {
// @ts-ignore
await reply.distinguish({sticky: this.sticky});
}
let modifiers = [];
if(this.distinguish) {
modifiers.push('Distinguished');
}
if(this.sticky) {
modifiers.push('Stickied');
}
const modifierStr = modifiers.length === 0 ? '' : `[${modifiers.join(' | ')}]`;
return {
dryRun,
success: true,
result: `${modifierStr}${this.lock ? ' - Locked Author\'s Activity - ' : ''}${truncateStringToLength(100)(body)}`
};
}
}

View File

@@ -2,24 +2,34 @@ import {ActionJson, ActionConfig} from "./index";
import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../Rule";
import {ActionProcessResult} from "../Common/interfaces";
export class LockAction extends Action {
getKind() {
return 'Lock';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
//snoowrap typing issue, thinks comments can't be locked
// @ts-ignore
if (item.locked) {
this.logger.warn('Item is already locked');
return {
dryRun,
success: false,
result: 'Item is already locked'
};
}
if (!dryRun) {
//snoowrap typing issue, thinks comments can't be locked
// @ts-ignore
await item.lock();
}
return {
dryRun,
success: true
}
}
}

View File

@@ -2,9 +2,17 @@ import Action, {ActionJson, ActionOptions} from "./index";
import {Comment, ComposeMessageParams} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {renderContent} from "../Utils/SnoowrapUtils";
import {Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
import {ActionProcessResult, Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
import {RuleResult} from "../Rule";
import {boolToString} from "../util";
import {
asSubmission,
boolToString,
isSubmission,
parseRedditEntity,
REDDIT_ENTITY_REGEX_URL,
truncateStringToLength
} from "../util";
import SimpleError from "../Utils/SimpleError";
export class MessageAction extends Action {
content: string;
@@ -14,6 +22,7 @@ export class MessageAction extends Action {
footer?: false | string;
title?: string;
to?: string;
asSubreddit: boolean;
constructor(options: MessageActionOptions) {
@@ -23,7 +32,9 @@ export class MessageAction extends Action {
asSubreddit,
title,
footer,
to,
} = options;
this.to = to;
this.footer = footer;
this.content = content;
this.asSubreddit = asSubreddit;
@@ -34,7 +45,7 @@ export class MessageAction extends Action {
return 'Message';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
const content = await this.resources.getContent(this.content);
const body = await renderContent(content, item, ruleResults, this.resources.userNotes);
@@ -42,19 +53,38 @@ export class MessageAction extends Action {
const footer = await this.resources.generateFooter(item, this.footer);
const renderedContent = `${body}${footer}`;
// @ts-ignore
const author = await item.author.fetch() as RedditUser;
let recipient = item.author.name;
if(this.to !== undefined) {
// parse to value
try {
const entityData = parseRedditEntity(this.to);
if(entityData.type === 'user') {
recipient = entityData.name;
} else {
recipient = `/r/${entityData.name}`;
}
} catch (err) {
this.logger.error(`'to' field for message was not in a valid format. See ${REDDIT_ENTITY_REGEX_URL} for valid examples`);
this.logger.error(err);
err.logged = true;
throw err;
}
if(recipient.includes('/r/') && this.asSubreddit) {
throw new SimpleError(`Cannot send a message as a subreddit to another subreddit. Requested recipient: ${recipient}`);
}
}
const msgOpts: ComposeMessageParams = {
to: author,
to: recipient,
text: renderedContent,
// @ts-ignore
fromSubreddit: this.asSubreddit ? await item.subreddit.fetch() : undefined,
subject: this.title || `Concerning your ${item instanceof Submission ? 'Submission' : 'Comment'}`,
subject: this.title || `Concerning your ${isSubmission(item) ? 'Submission' : 'Comment'}`,
};
const msgPreview = `\r\n
TO: ${author.name}\r\n
TO: ${recipient}\r\n
Subject: ${msgOpts.subject}\r\n
Sent As Modmail: ${boolToString(this.asSubreddit)}\r\n\r\n
${renderedContent}`;
@@ -64,6 +94,11 @@ export class MessageAction extends Action {
if (!dryRun) {
await this.client.composeMessage(msgOpts);
}
return {
dryRun,
success: true,
result: truncateStringToLength(200)(msgPreview)
}
}
}
@@ -73,6 +108,24 @@ export interface MessageActionConfig extends RequiredRichContent, Footer {
* */
asSubreddit: boolean
/**
* Entity to send message to.
*
* If not present Message be will sent to the Author of the Activity being checked.
*
* Valid formats:
*
* * `aUserName` -- send to /u/aUserName
* * `u/aUserName` -- send to /u/aUserName
* * `r/aSubreddit` -- sent to modmail of /r/aSubreddit
*
* **Note:** Reddit does not support sending a message AS a subreddit TO another subreddit
*
* @pattern ^\s*(\/[ru]\/|[ru]\/)*(\w+)*\s*$
* @examples ["aUserName","u/aUserName","r/aSubreddit"]
* */
to?: string
/**
* The title of the message
*

View File

@@ -3,24 +3,33 @@ import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../Rule";
import {activityIsRemoved} from "../Utils/SnoowrapUtils";
import {ActionProcessResult} from "../Common/interfaces";
export class RemoveAction extends Action {
getKind() {
return 'Remove';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
// issue with snoowrap typings, doesn't think prop exists on Submission
// @ts-ignore
if (activityIsRemoved(item)) {
this.logger.warn('Item is already removed');
return;
return {
dryRun,
success: false,
result: 'Item is already removed',
}
}
if (!dryRun) {
// @ts-ignore
await item.remove();
}
return {
dryRun,
success: true,
}
}
}

View File

@@ -4,7 +4,7 @@ import Snoowrap, {Comment, Submission} from "snoowrap";
import {truncateStringToLength} from "../util";
import {renderContent} from "../Utils/SnoowrapUtils";
import {RuleResult} from "../Rule";
import {RichContent} from "../Common/interfaces";
import {ActionProcessResult, RichContent} from "../Common/interfaces";
// https://www.reddit.com/dev/api/oauth#POST_api_report
// denotes 100 characters maximum
@@ -23,7 +23,7 @@ export class ReportAction extends Action {
return 'Report';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
const content = await this.resources.getContent(this.content, item.subreddit);
const renderedContent = await renderContent(content, item, ruleResults, this.resources.userNotes);
@@ -33,6 +33,12 @@ export class ReportAction extends Action {
// @ts-ignore
await item.report({reason: truncatedContent});
}
return {
dryRun,
success: true,
result: truncatedContent
};
}
}

View File

@@ -2,6 +2,7 @@ import {SubmissionActionConfig} from "./index";
import Action, {ActionJson, ActionOptions} from "../index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../../Rule";
import {ActionProcessResult} from "../../Common/interfaces";
export class FlairAction extends Action {
text: string;
@@ -20,7 +21,17 @@ export class FlairAction extends Action {
return 'Flair';
}
async process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
let flairParts = [];
if(this.text !== '') {
flairParts.push(`Text: ${this.text}`);
}
if(this.css !== '') {
flairParts.push(`CSS: ${this.css}`);
}
const flairSummary = flairParts.length === 0 ? 'No flair (unflaired)' : flairParts.join(' | ');
this.logger.verbose(flairSummary);
if (item instanceof Submission) {
if(!this.dryRun) {
// @ts-ignore
@@ -28,6 +39,16 @@ export class FlairAction extends Action {
}
} else {
this.logger.warn('Cannot flair Comment');
return {
dryRun,
success: false,
result: 'Cannot flair Comment',
}
}
return {
dryRun,
success: true,
result: flairSummary
}
}
}

View File

@@ -5,6 +5,7 @@ import {renderContent} from "../Utils/SnoowrapUtils";
import {RuleResult} from "../Rule";
import {UserNote, UserNoteJson} from "../Subreddit/UserNotes";
import Submission from "snoowrap/dist/objects/Submission";
import {ActionProcessResult} from "../Common/interfaces";
export class UserNoteAction extends Action {
@@ -24,7 +25,7 @@ export class UserNoteAction extends Action {
return 'User Note';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
const content = await this.resources.getContent(this.content, item.subreddit);
const renderedContent = await renderContent(content, item, ruleResults, this.resources.userNotes);
@@ -35,7 +36,11 @@ export class UserNoteAction extends Action {
const existingNote = notes.find((x) => x.link.includes(item.id));
if (existingNote) {
this.logger.info(`Will not add note because one already exists for this Activity (${existingNote.time.local().format()}) and allowDuplicate=false`);
return;
return {
dryRun,
success: false,
result: `Will not add note because one already exists for this Activity (${existingNote.time.local().format()}) and allowDuplicate=false`
};
}
}
if (!dryRun) {
@@ -43,6 +48,11 @@ export class UserNoteAction extends Action {
} else if (!await this.resources.userNotes.warningExists(this.type)) {
this.logger.warn(`UserNote type '${this.type}' does not exist. If you meant to use this please add it through Toolbox first.`);
}
return {
success: true,
dryRun,
result: `(${this.type}) ${renderedContent}`
}
}
}

View File

@@ -2,9 +2,10 @@ import Snoowrap, {Comment, Submission} from "snoowrap";
import {Logger} from "winston";
import {RuleResult} from "../Rule";
import {SubredditResources} from "../Subreddit/SubredditResources";
import {ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
import {ActionProcessResult, ActionResult, ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
import Author, {AuthorOptions} from "../Author/Author";
import {mergeArr} from "../util";
import LoggedError from "../Utils/LoggedError";
export abstract class Action {
name?: string;
@@ -53,47 +54,61 @@ export abstract class Action {
return this.name === this.getKind() ? this.getKind() : `${this.getKind()} - ${this.name}`;
}
async handle(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async handle(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionResult> {
const dryRun = runtimeDryrun || this.dryRun;
let actionRun = false;
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
if (!itemPass) {
this.logger.verbose(`Activity did not pass 'itemIs' test, Action not run`);
return;
}
const authorRun = async () => {
let actRes: ActionResult = {
kind: this.getKind(),
name: this.getActionUniqueName(),
run: false,
dryRun,
success: false,
};
try {
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
if (!itemPass) {
this.logger.verbose(`Activity did not pass 'itemIs' test, Action not run`);
actRes.runReason = `Activity did not pass 'itemIs' test, Action not run`;
return actRes;
}
if (this.authorIs.include !== undefined && this.authorIs.include.length > 0) {
for (const auth of this.authorIs.include) {
if (await this.resources.testAuthorCriteria(item, auth)) {
await this.process(item, ruleResults, runtimeDryrun);
return true;
actRes.run = true;
const results = await this.process(item, ruleResults, runtimeDryrun);
return {...actRes, ...results};
}
}
this.logger.verbose('Inclusive author criteria not matched, Action not run');
return false;
}
if (!actionRun && this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
actRes.runReason = 'Inclusive author criteria not matched';
return actRes;
} else if (this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
for (const auth of this.authorIs.exclude) {
if (await this.resources.testAuthorCriteria(item, auth, false)) {
await this.process(item, ruleResults, runtimeDryrun);
return true;
actRes.run = true;
const results = await this.process(item, ruleResults, runtimeDryrun);
return {...actRes, ...results};
}
}
this.logger.verbose('Exclusive author criteria not matched, Action not run');
return false;
actRes.runReason = 'Exclusive author criteria not matched';
return actRes;
}
return null;
actRes.run = true;
const results = await this.process(item, ruleResults, runtimeDryrun);
return {...actRes, ...results};
} catch (err) {
if(!(err instanceof LoggedError)) {
this.logger.error(`Encountered error while running`, err);
}
actRes.success = false;
actRes.result = err.message;
return actRes;
}
const authorRunResults = await authorRun();
if (null === authorRunResults) {
await this.process(item, ruleResults, runtimeDryrun);
} else if (!authorRunResults) {
return;
}
this.logger.verbose(`${dryRun ? 'DRYRUN - ' : ''}Done`);
}
abstract process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryun?: boolean): Promise<void>;
abstract process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryun?: boolean): Promise<ActionProcessResult>;
}
export interface ActionOptions extends ActionConfig {

View File

@@ -3,7 +3,6 @@ import dayjs, {Dayjs} from "dayjs";
import {getLogger} from "./Utils/loggerFactory";
import {Invokee, OperatorConfig} from "./Common/interfaces";
import Bot from "./Bot";
import {castArray} from "lodash";
import LoggedError from "./Utils/LoggedError";
export class App {
@@ -53,8 +52,11 @@ export class App {
}
async onTerminate(reason = 'The application was shutdown') {
for(const m of this.bots) {
//await m.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
for(const b of this.bots) {
for(const m of b.subManagers) {
await m.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
}
//await b.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
}
}

View File

@@ -3,7 +3,7 @@ import {Logger} from "winston";
import dayjs, {Dayjs} from "dayjs";
import {Duration} from "dayjs/plugin/duration";
import EventEmitter from "events";
import {BotInstanceConfig, Invokee, PAUSED, RUNNING, SYSTEM} from "../Common/interfaces";
import {BotInstanceConfig, Invokee, PAUSED, RUNNING, STOPPED, SYSTEM, USER} from "../Common/interfaces";
import {
createRetryHandler,
formatNumber,
@@ -33,12 +33,15 @@ class Bot {
excludeSubreddits: string[];
subManagers: Manager[] = [];
heartbeatInterval: number;
nextHeartbeat?: Dayjs;
nextHeartbeat: Dayjs = dayjs();
heartBeating: boolean = false;
softLimit: number | string = 250;
hardLimit: number | string = 50;
nannyMode?: 'soft' | 'hard';
nannyRunning: boolean = false;
nextNannyCheck: Dayjs = dayjs().add(10, 'second');
nannyRetryHandler: Function;
nextExpiration: Dayjs = dayjs();
botName?: string;
botLink?: string;
@@ -179,6 +182,7 @@ class Bot {
}
const retryHandler = createRetryHandler({maxRequestRetry: 8, maxOtherRetry: 1}, this.logger);
this.nannyRetryHandler = createRetryHandler({maxRequestRetry: 5, maxOtherRetry: 1}, this.logger);
const modStreamErrorListener = (name: string) => async (err: any) => {
this.logger.error('Polling error occurred', err);
@@ -322,7 +326,7 @@ class Bot {
async destroy(causedBy: Invokee) {
this.logger.info('Stopping heartbeat and nanny processes, may take up to 5 seconds...');
const processWait = Promise.all([pEvent(this.emitter, 'heartbeatStopped'), pEvent(this.emitter, 'nannyStopped')]);
const processWait = pEvent(this.emitter, 'healthStopped');
this.running = false;
await processWait;
for (const manager of this.subManagers) {
@@ -361,144 +365,210 @@ class Bot {
await this.runModStreams();
this.running = true;
this.runApiNanny();
this.nextNannyCheck = dayjs().add(10, 'second');
this.nextHeartbeat = dayjs().add(this.heartbeatInterval, 'second');
await this.healthLoop();
}
async healthLoop() {
while (this.running) {
await sleep(5000);
if (!this.running) {
break;
}
if (dayjs().isSameOrAfter(this.nextNannyCheck)) {
try {
await this.runApiNanny();
this.nextNannyCheck = dayjs().add(10, 'second');
} catch (err) {
this.logger.info('Delaying next nanny check for 1 minute due to emitted error');
this.nextNannyCheck = dayjs().add(120, 'second');
}
}
if(dayjs().isSameOrAfter(this.nextHeartbeat)) {
try {
await this.heartbeat();
} catch (err) {
this.logger.error(`Error occurred during heartbeat check: ${err.message}`);
}
this.nextHeartbeat = dayjs().add(this.heartbeatInterval, 'second');
}
}
this.emitter.emit('healthStopped');
}
async heartbeat() {
const heartbeat = `HEARTBEAT -- API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ~${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion === undefined ? 'N/A' : this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`
this.logger.info(heartbeat);
for (const s of this.subManagers) {
if(s.botState.state === STOPPED && s.botState.causedBy === USER) {
this.logger.debug('Skipping config check/restart on heartbeat due to previously being stopped by user', {subreddit: s.displayLabel});
continue;
}
try {
const newConfig = await s.parseConfiguration();
if(newConfig || (s.queueState.state !== RUNNING && s.queueState.causedBy === SYSTEM))
{
await s.startQueue('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running queue'});
}
if(newConfig || (s.eventsState.state !== RUNNING && s.eventsState.causedBy === SYSTEM))
{
await s.startEvents('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running events'});
}
if(s.botState.state !== RUNNING && s.eventsState.state === RUNNING && s.queueState.state === RUNNING) {
s.botState = {
state: RUNNING,
causedBy: 'system',
}
}
} catch (err) {
this.logger.info('Stopping event polling to prevent activity processing queue from backing up. Will be restarted when config update succeeds.')
await s.stopEvents('system', {reason: 'Invalid config will cause events to pile up in queue. Will be restarted when config update succeeds (next heartbeat).'});
if(!(err instanceof LoggedError)) {
this.logger.error(err, {subreddit: s.displayLabel});
}
if(this.nextHeartbeat !== undefined) {
this.logger.info(`Will retry parsing config on next heartbeat (in ${dayjs.duration(this.nextHeartbeat.diff(dayjs())).humanize()})`, {subreddit: s.displayLabel});
}
}
}
await this.runModStreams(true);
}
async runApiNanny() {
try {
mainLoop:
while (this.running) {
for(let i = 0; i < 2; i++) {
await sleep(5000);
if (!this.running) {
break mainLoop;
}
}
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
const nowish = dayjs().add(10, 'second');
if (nowish.isAfter(this.nextExpiration)) {
// it's possible no api calls are being made because of a hard limit
// need to make an api call to update this
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
const nowish = dayjs().add(10, 'second');
if (nowish.isAfter(this.nextExpiration)) {
// it's possible no api calls are being made because of a hard limit
// need to make an api call to update this
let shouldRetry = true;
while (shouldRetry) {
try {
// @ts-ignore
await this.client.getMe();
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
}
const rollingSample = this.apiSample.slice(0, 7)
rollingSample.unshift(this.client.ratelimitRemaining);
this.apiSample = rollingSample;
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
if (this.apiSample[index + 1] !== undefined) {
const d = Math.abs(curr - this.apiSample[index + 1]);
if (d === 0) {
return [...acc, 0];
}
return [...acc, d / 10];
shouldRetry = false;
} catch (err) {
shouldRetry = await this.nannyRetryHandler(err);
if (!shouldRetry) {
throw err;
}
return acc;
}, []);
this.apiRollingAvg = diff.reduce((acc, curr) => acc + curr, 0) / diff.length; // api requests per second
this.depletedInSecs = this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
this.apiEstDepletion = dayjs.duration({seconds: this.depletedInSecs});
this.logger.debug(`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`);
let hardLimitHit = false;
if (typeof this.hardLimit === 'string') {
const hardDur = parseDuration(this.hardLimit);
hardLimitHit = hardDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
hardLimitHit = this.hardLimit > this.client.ratelimitRemaining;
}
if (hardLimitHit) {
if (this.nannyMode === 'hard') {
continue;
}
this.logger.info(`Detected HARD LIMIT of ${this.hardLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${this.apiRollingAvg}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info(`All subreddit event polling has been paused`, {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
m.pauseEvents('system');
m.notificationManager.handle('runStateChanged', 'Hard Limit Triggered', `Hard Limit of ${this.hardLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit event polling has been paused.`, 'system', 'warn');
}
this.nannyMode = 'hard';
continue;
}
let softLimitHit = false;
if (typeof this.softLimit === 'string') {
const softDur = parseDuration(this.softLimit);
softLimitHit = softDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
softLimitHit = this.softLimit > this.client.ratelimitRemaining;
}
if (softLimitHit) {
if (this.nannyMode === 'soft') {
continue;
}
this.logger.info(`Detected SOFT LIMIT of ${this.softLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info('Trying to detect heavy usage subreddits...', {leaf: 'Api Nanny'});
let threshold = 0.5;
let offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
if (offenders.length === 0) {
threshold = 0.25;
// reduce threshold
offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
}
if (offenders.length > 0) {
this.logger.info(`Slowing subreddits using >- ${threshold}req/s:`, {leaf: 'Api Nanny'});
for (const m of offenders) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
} else {
this.logger.info(`Couldn't detect specific offenders, slowing all...`, {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
}
this.nannyMode = 'soft';
continue;
}
if (this.nannyMode !== undefined) {
this.logger.info('Turning off due to better conditions...', {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
if (m.delayBy !== undefined) {
m.delayBy = undefined;
m.notificationManager.handle('runStateChanged', 'Normal Processing Resumed', 'Slow Mode has been turned off due to better API conditions', 'system');
}
if (m.queueState.state === PAUSED && m.queueState.causedBy === SYSTEM) {
m.startQueue('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
if (m.eventsState.state === PAUSED && m.eventsState.causedBy === SYSTEM) {
await m.startEvents('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
}
this.nannyMode = undefined;
}
}
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
}
const rollingSample = this.apiSample.slice(0, 7)
rollingSample.unshift(this.client.ratelimitRemaining);
this.apiSample = rollingSample;
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
if (this.apiSample[index + 1] !== undefined) {
const d = Math.abs(curr - this.apiSample[index + 1]);
if (d === 0) {
return [...acc, 0];
}
return [...acc, d / 10];
}
return acc;
}, []);
this.apiRollingAvg = diff.reduce((acc, curr) => acc + curr, 0) / diff.length; // api requests per second
this.depletedInSecs = this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
this.apiEstDepletion = dayjs.duration({seconds: this.depletedInSecs});
this.logger.debug(`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`);
let hardLimitHit = false;
if (typeof this.hardLimit === 'string') {
const hardDur = parseDuration(this.hardLimit);
hardLimitHit = hardDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
hardLimitHit = this.hardLimit > this.client.ratelimitRemaining;
}
if (hardLimitHit) {
if (this.nannyMode === 'hard') {
return;
}
this.logger.info(`Detected HARD LIMIT of ${this.hardLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${this.apiRollingAvg}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info(`All subreddit event polling has been paused`, {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
m.pauseEvents('system');
m.notificationManager.handle('runStateChanged', 'Hard Limit Triggered', `Hard Limit of ${this.hardLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit event polling has been paused.`, 'system', 'warn');
}
this.nannyMode = 'hard';
return;
}
let softLimitHit = false;
if (typeof this.softLimit === 'string') {
const softDur = parseDuration(this.softLimit);
softLimitHit = softDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
softLimitHit = this.softLimit > this.client.ratelimitRemaining;
}
if (softLimitHit) {
if (this.nannyMode === 'soft') {
return;
}
this.logger.info(`Detected SOFT LIMIT of ${this.softLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info('Trying to detect heavy usage subreddits...', {leaf: 'Api Nanny'});
let threshold = 0.5;
let offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
if (offenders.length === 0) {
threshold = 0.25;
// reduce threshold
offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
}
if (offenders.length > 0) {
this.logger.info(`Slowing subreddits using >- ${threshold}req/s:`, {leaf: 'Api Nanny'});
for (const m of offenders) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
} else {
this.logger.info(`Couldn't detect specific offenders, slowing all...`, {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
}
this.nannyMode = 'soft';
return
}
if (this.nannyMode !== undefined) {
this.logger.info('Turning off due to better conditions...', {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
if (m.delayBy !== undefined) {
m.delayBy = undefined;
m.notificationManager.handle('runStateChanged', 'Normal Processing Resumed', 'Slow Mode has been turned off due to better API conditions', 'system');
}
if (m.queueState.state === PAUSED && m.queueState.causedBy === SYSTEM) {
m.startQueue('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
if (m.eventsState.state === PAUSED && m.eventsState.causedBy === SYSTEM) {
await m.startEvents('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
}
this.nannyMode = undefined;
}
} catch (err) {
this.logger.error('Error occurred during nanny loop', err);
this.logger.error(`Error occurred during nanny loop: ${err.message}`);
throw err;
} finally {
this.logger.info('Nanny stopped');
this.emitter.emit('nannyStopped');
}
}
}

View File

@@ -1,6 +1,7 @@
import {Check, CheckOptions, userResultCacheDefault, UserResultCacheOptions} from "./index";
import {CommentState} from "../Common/interfaces";
import {CommentState, UserResultCache} from "../Common/interfaces";
import {Submission, Comment} from "snoowrap/dist/objects";
import {RuleResult} from "../Rule";
export interface CommentCheckOptions extends CheckOptions {
cacheUserResult?: UserResultCacheOptions;
@@ -9,20 +10,12 @@ export interface CommentCheckOptions extends CheckOptions {
export class CommentCheck extends Check {
itemIs: CommentState[];
cacheUserResult: Required<UserResultCacheOptions>;
constructor(options: CommentCheckOptions) {
super(options);
const {
itemIs = [],
cacheUserResult = {},
} = options;
this.cacheUserResult = {
...userResultCacheDefault,
...cacheUserResult
}
this.itemIs = itemIs;
this.logSummary();
}
@@ -31,7 +24,7 @@ export class CommentCheck extends Check {
super.logSummary('comment');
}
async getCacheResult(item: Submission | Comment): Promise<boolean | undefined> {
async getCacheResult(item: Submission | Comment): Promise<UserResultCache | undefined> {
if (this.cacheUserResult.enable) {
return await this.resources.getCommentCheckCacheResult(item as Comment, {
name: this.name,
@@ -42,13 +35,22 @@ export class CommentCheck extends Check {
return undefined;
}
async setCacheResult(item: Submission | Comment, result: boolean): Promise<void> {
async setCacheResult(item: Submission | Comment, result: UserResultCache): Promise<void> {
if (this.cacheUserResult.enable) {
const {result: outcome, ruleResults} = result;
const res: UserResultCache = {
result: outcome,
// don't need to cache rule results if check was not triggered
// since we only use rule results for actions
ruleResults: outcome ? ruleResults : []
};
await this.resources.setCommentCheckCacheResult(item as Comment, {
name: this.name,
authorIs: this.authorIs,
itemIs: this.itemIs
}, result, this.cacheUserResult.ttl)
}, res, this.cacheUserResult.ttl)
}
}
}

View File

@@ -1,6 +1,7 @@
import {Check, CheckOptions} from "./index";
import {SubmissionState} from "../Common/interfaces";
import {SubmissionState, UserResultCache} from "../Common/interfaces";
import {Submission, Comment} from "snoowrap/dist/objects";
import {RuleResult} from "../Rule";
export class SubmissionCheck extends Check {
itemIs: SubmissionState[];
@@ -15,11 +16,4 @@ export class SubmissionCheck extends Check {
logSummary() {
super.logSummary('submission');
}
async getCacheResult(item: Submission | Comment) {
return undefined;
}
async setCacheResult(item: Submission | Comment, result: boolean) {
}
}

View File

@@ -16,12 +16,13 @@ import {
truncateStringToLength
} from "../util";
import {
ActionResult,
ChecksActivityState,
CommentState,
JoinCondition,
JoinOperands,
SubmissionState,
TypedActivityStates
TypedActivityStates, UserResultCache
} from "../Common/interfaces";
import * as RuleSchema from '../Schema/Rule.json';
import * as RuleSetSchema from '../Schema/RuleSet.json';
@@ -45,6 +46,7 @@ export abstract class Check implements ICheck {
include: AuthorCriteria[],
exclude: AuthorCriteria[]
};
cacheUserResult: Required<UserResultCacheOptions>;
dryRun?: boolean;
notifyOnTrigger: boolean;
resources: SubredditResources;
@@ -62,6 +64,7 @@ export abstract class Check implements ICheck {
actions = [],
notifyOnTrigger = false,
subredditName,
cacheUserResult = {},
itemIs = [],
authorIs: {
include = [],
@@ -88,6 +91,10 @@ export abstract class Check implements ICheck {
exclude: exclude.map(x => new Author(x)),
include: include.map(x => new Author(x)),
}
this.cacheUserResult = {
...userResultCacheDefault,
...cacheUserResult
}
this.dryRun = dryRun;
for (const r of rules) {
if (r instanceof Rule || r instanceof RuleSet) {
@@ -170,10 +177,14 @@ export abstract class Check implements ICheck {
}
}
abstract getCacheResult(item: Submission | Comment) : Promise<boolean | undefined>;
abstract setCacheResult(item: Submission | Comment, result: boolean): void;
async getCacheResult(item: Submission | Comment) : Promise<UserResultCache | undefined> {
return undefined;
}
async runRules(item: Submission | Comment, existingResults: RuleResult[] = []): Promise<[boolean, RuleResult[]]> {
async setCacheResult(item: Submission | Comment, result: UserResultCache): Promise<void> {
}
async runRules(item: Submission | Comment, existingResults: RuleResult[] = []): Promise<[boolean, RuleResult[], boolean?]> {
try {
let allRuleResults: RuleResult[] = [];
let allResults: (RuleResult | RuleSetResult)[] = [];
@@ -182,7 +193,7 @@ export abstract class Check implements ICheck {
const cacheResult = await this.getCacheResult(item);
if(cacheResult !== undefined) {
this.logger.verbose(`Skipping rules run because result was found in cache, Check Triggered Result: ${cacheResult}`);
return [cacheResult, allRuleResults];
return [cacheResult.result, cacheResult.ruleResults, true];
}
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
@@ -264,23 +275,27 @@ export abstract class Check implements ICheck {
}
}
async runActions(item: Submission | Comment, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<Action[]> {
async runActions(item: Submission | Comment, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionResult[]> {
const dr = runtimeDryrun || this.dryRun;
this.logger.debug(`${dr ? 'DRYRUN - ' : ''}Running Actions`);
const runActions: Action[] = [];
const runActions: ActionResult[] = [];
for (const a of this.actions) {
if(!a.enabled) {
runActions.push({
kind: a.getKind(),
name: a.getActionUniqueName(),
run: false,
success: false,
runReason: 'Not enabled',
dryRun: (a.dryRun || dr) || false,
});
this.logger.info(`Action ${a.getActionUniqueName()} not run because it is not enabled.`);
continue;
}
try {
await a.handle(item, ruleResults, runtimeDryrun);
runActions.push(a);
} catch (err) {
this.logger.error(`Action ${a.getActionUniqueName()} encountered an error while running`, err);
}
const res = await a.handle(item, ruleResults, runtimeDryrun);
runActions.push(res);
}
this.logger.info(`${dr ? 'DRYRUN - ' : ''}Ran Actions: ${runActions.map(x => x.getActionUniqueName()).join(' | ')}`);
this.logger.info(`${dr ? 'DRYRUN - ' : ''}Ran Actions: ${runActions.map(x => x.name).join(' | ')}`);
return runActions;
}
}
@@ -337,6 +352,7 @@ export interface CheckOptions extends ICheck {
notifyOnTrigger?: boolean
resources: SubredditResources
client: Snoowrap
cacheUserResult?: UserResultCacheOptions;
}
export interface CheckJson extends ICheck {
@@ -371,6 +387,8 @@ export interface CheckJson extends ICheck {
* @default false
* */
notifyOnTrigger?: boolean,
cacheUserResult?: UserResultCacheOptions;
}
export interface SubmissionCheckJson extends CheckJson {
@@ -388,6 +406,9 @@ export interface SubmissionCheckJson extends CheckJson {
* 3. The rule results are not likely to change while cache is valid
* */
export interface UserResultCacheOptions {
/**
* @default false
* */
enable?: boolean,
/**
* The amount of time, in seconds, to cache this result
@@ -396,17 +417,23 @@ export interface UserResultCacheOptions {
* @examples [60]
* */
ttl?: number,
/**
* In the event the cache returns a triggered result should the actions for the check also be run?
*
* @default true
* */
runActions?: boolean
}
export const userResultCacheDefault: Required<UserResultCacheOptions> = {
enable: false,
ttl: 60,
runActions: true,
}
export interface CommentCheckJson extends CheckJson {
kind: 'comment'
itemIs?: CommentState[]
cacheUserResult?: UserResultCacheOptions
}
export type CheckStructuredJson = SubmissionCheckStructuredJson | CommentCheckStructuredJson;

View File

@@ -1,2 +1,2 @@
export const cacheOptDefaults = {ttl: 60, max: 500, checkPeriod: 600};
export const cacheTTLDefaults = {authorTTL: 60, userNotesTTL: 300, wikiTTL: 300, submissionTTL: 60, commentTTL: 60, filterCriteriaTTL: 60};
export const cacheTTLDefaults = {authorTTL: 60, userNotesTTL: 300, wikiTTL: 300, submissionTTL: 60, commentTTL: 60, filterCriteriaTTL: 60, subredditTTL: 600};

View File

@@ -3,6 +3,7 @@ import {Cache} from 'cache-manager';
import {MESSAGE} from 'triple-beam';
import Poll from "snoostorm/out/util/Poll";
import Snoowrap from "snoowrap";
import {RuleResult} from "../Rule";
/**
* An ISO 8601 Duration
@@ -381,49 +382,116 @@ export interface PollingOptions extends PollingDefaults {
export interface TTLConfig {
/**
* Amount of time, in seconds, author activities (Comments/Submission) should be cached
* Amount of time, in seconds, author activity history (Comments/Submission) should be cached
*
* * If `0` or `true` will cache indefinitely (not recommended)
* * If `false` will not cache
*
* * ENV => `AUTHOR_TTL`
* * ARG => `--authorTTL <sec>`
* @examples [60]
* @default 60
* */
authorTTL?: number;
authorTTL?: number | boolean;
/**
* Amount of time, in seconds, wiki content pages should be cached
*
* * If `0` or `true` will cache indefinitely (not recommended)
* * If `false` will not cache
*
* @examples [300]
* @default 300
* */
wikiTTL?: number;
wikiTTL?: number | boolean;
/**
* Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
* Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
*
* * If `0` or `true` will cache indefinitely (not recommended)
* * If `false` will not cache
*
* @examples [300]
* @default 300
* */
userNotesTTL?: number;
userNotesTTL?: number | boolean;
/**
* Amount of time, in seconds, a submission should be cached
*
* * If `0` or `true` will cache indefinitely (not recommended)
* * If `false` will not cache
*
* @examples [60]
* @default 60
* */
submissionTTL?: number;
submissionTTL?: number | boolean;
/**
* Amount of time, in seconds, a comment should be cached
*
* * If `0` or `true` will cache indefinitely (not recommended)
* * If `false` will not cache
*
* @examples [60]
* @default 60
* */
commentTTL?: number;
commentTTL?: number | boolean;
/**
* Amount of time, in seconds, a subreddit (attributes) should be cached
*
* * If `0` or `true` will cache indefinitely (not recommended)
* * If `false` will not cache
*
* @examples [600]
* @default 600
* */
subredditTTL?: number | boolean;
/**
* Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)
*
* This is especially useful if when polling high-volume comments and your checks rely on author/item filters
*
* * If `0` or `true` will cache indefinitely (not recommended)
* * If `false` will not cache
*
* @examples [60]
* @default 60
* */
filterCriteriaTTL?: number;
filterCriteriaTTL?: number | boolean;
}
export interface SubredditCacheConfig extends TTLConfig {
export interface CacheConfig extends TTLConfig {
/**
* The cache provider and, optionally, a custom configuration for that provider
*
* If not present or `null` provider will be `memory`.
*
* To specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`
* */
provider?: CacheProvider | CacheOptions
/**
* The **maximum** number of Events that the cache should store triggered result summaries for
*
* These summaries are viewable through the Web UI.
*
* The value specified by a subreddit cannot be larger than the value set by the Operator for the global/bot config (if set)
*
* @default 25
* @example [25]
* */
actionedEventsMax?: number
}
export interface OperatorCacheConfig extends CacheConfig {
/**
* The **default** number of Events that the cache will store triggered result summaries for
*
* These summaries are viewable through the Web UI.
*
* The value specified cannot be larger than `actionedEventsMax` for the global/bot config (if set)
*
* @default 25
* @example [25]
* */
actionedEventsDefault?: number
}
export interface Footer {
@@ -503,7 +571,7 @@ export interface ManagerOptions {
/**
* Per-subreddit config for caching TTL values. If set to `false` caching is disabled.
* */
caching?: SubredditCacheConfig
caching?: CacheConfig
/**
* Use this option to override the `dryRun` setting for all `Checks`
@@ -607,6 +675,8 @@ export interface ActivityState {
stickied?: boolean
distinguished?: boolean
approved?: boolean
score?: CompareValue
reports?: CompareValue
}
/**
@@ -645,6 +715,41 @@ export interface CommentState extends ActivityState {
submissionState?: SubmissionState[]
}
/**
* Different attributes a `Subreddit` can be in. Only include a property if you want to check it.
* @examples [{"over18": true}]
* */
export interface SubredditState {
/**
* Is subreddit quarantined?
* */
quarantine?: boolean
/**
* Is subreddit NSFW/over 18?
*
* **Note**: This is **mod-controlled flag** so it is up to the mods of the subreddit to correctly mark their subreddit as NSFW
* */
over18?: boolean
/**
* The name the subreddit.
*
* Can be a normal string (will check case-insensitive) or a regular expression
*
* EX `["mealtimevideos", "/onlyfans*\/i"]`
*
* @examples ["mealtimevideos", "/onlyfans*\/i"]
* */
name?: string | RegExp
/**
* A friendly description of what this State is trying to parse
* */
stateDescription?: string
}
export interface StrongSubredditState extends SubredditState {
name?: RegExp
}
export type TypedActivityStates = SubmissionState[] | CommentState[];
export interface DomainInfo {
@@ -692,13 +797,16 @@ export type CacheProvider = 'memory' | 'redis' | 'none';
// provider: CacheOptions
// }
export type StrongCache = {
authorTTL: number,
userNotesTTL: number,
wikiTTL: number,
submissionTTL: number,
commentTTL: number,
filterCriteriaTTL: number,
authorTTL: number | boolean,
userNotesTTL: number | boolean,
wikiTTL: number | boolean,
submissionTTL: number | boolean,
commentTTL: number | boolean,
subredditTTL: number | boolean,
filterCriteriaTTL: number | boolean,
provider: CacheOptions
actionedEventsMax?: number,
actionedEventsDefault: number,
}
/**
@@ -757,6 +865,8 @@ export interface CacheOptions {
* @examples [500]
* */
max?: number
[key:string]: any
}
export type NotificationProvider = 'discord';
@@ -1053,60 +1163,11 @@ export interface BotInstanceJsonConfig {
}
/**
* Settings to configure the default caching behavior for each suberddit
* Settings to configure the default caching behavior for this bot
*
* Every setting not specified will default to what is specified by the global operator caching config
* */
caching?: {
/**
* Amount of time, in seconds, author activity history (Comments/Submission) should be cached
*
* * ENV => `AUTHOR_TTL`
* * ARG => `--authorTTL <sec>`
* @examples [60]
* @default 60
* */
authorTTL?: number;
/**
* Amount of time, in seconds, wiki content pages should be cached
* @examples [300]
* @default 300
* */
wikiTTL?: number;
/**
* Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
* @examples [300]
* @default 300
* */
userNotesTTL?: number;
/**
* Amount of time, in seconds, a submission should be cached
* @examples [60]
* @default 60
* */
submissionTTL?: number;
/**
* Amount of time, in seconds, a comment should be cached
* @examples [60]
* @default 60
* */
commentTTL?: number;
/**
* Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)
*
* This is especially useful if when polling high-volume comments and your checks rely on author/item filters
*
* @examples [60]
* @default 60
* */
filterCriteriaTTL?: number;
/**
* The cache provider and, optionally, a custom configuration for that provider
*
* If not present or `null` provider will be `memory`.
*
* To specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`
* */
provider?: CacheProvider | CacheOptions
}
caching?: OperatorCacheConfig
/**
* Settings related to managing heavy API usage.
* */
@@ -1214,6 +1275,13 @@ export interface OperatorJsonConfig {
path?: string,
},
/**
* Settings to configure the default caching behavior globally
*
* These settings will be used by each bot, and subreddit, that does not specify their own
* */
caching?: OperatorCacheConfig
bots?: BotInstanceJsonConfig[]
/**
@@ -1230,23 +1298,30 @@ export interface OperatorJsonConfig {
* @examples [8085]
* */
port?: number,
/**
* Caching provider to use for session and invite data
*
* If none is provided the top-level caching provider is used
* */
caching?: 'memory' | 'redis' | CacheOptions
/**
* Settings to configure the behavior of user sessions -- the session is what the web interface uses to identify logged in users.
* */
session?: {
/**
* The cache provider to use.
* Number of seconds a session should be valid for.
*
* The default should be sufficient for almost all use cases
* Default is 1 day
*
* @default "memory"
* @examples ["memory"]
* @default 86400
* @examples [86400]
* */
provider?: 'memory' | 'redis' | CacheOptions,
maxAge?: number
/**
* The secret value used to encrypt session data
*
* If provider is persistent (redis) specifying a value here will ensure sessions are valid between application restarts
* If provider is persistent (`redis`) specifying a value here will ensure sessions are valid between application restarts
*
* When not present or `null` a random string is generated on application start
*
@@ -1254,6 +1329,21 @@ export interface OperatorJsonConfig {
* */
secret?: string,
}
/**
* Settings related to oauth flow invites
* */
invites?: {
/**
* Number of seconds an invite should be valid for
*
* If `0` or not specified (default) invites do not expire
*
* @default 0
* @examples [0]
* */
maxAge?: number
}
/**
* The default log level to filter to in the web interface
*
@@ -1364,12 +1454,17 @@ export interface OperatorConfig extends OperatorJsonConfig {
level: LogLevel,
path?: string,
},
caching: StrongCache,
web: {
port: number,
caching: CacheOptions,
session: {
provider: CacheOptions,
maxAge: number,
secret: string,
}
},
invites: {
maxAge: number
},
logLevel?: LogLevel,
maxLogs: number,
clients: BotConnection[]
@@ -1410,3 +1505,45 @@ export interface LogInfo {
labels?: string[]
bot?: string
}
export interface ActionResult {
kind: string,
name: string,
run: boolean,
runReason?: string,
dryRun: boolean,
success: boolean,
result?: string,
}
export interface ActionProcessResult {
success: boolean,
dryRun: boolean,
result?: string
}
export interface ActionedEvent {
activity: {
peek: string
link: string
}
author: string
timestamp: number
check: string
ruleSummary: string,
subreddit: string,
ruleResults: RuleResult[]
actionResults: ActionResult[]
}
export interface UserResultCache {
result: boolean,
ruleResults: RuleResult[]
}
export type RedditEntityType = 'user' | 'subreddit';
export interface RedditEntity {
name: string
type: RedditEntityType
}

View File

@@ -1,12 +1,12 @@
import {Logger} from "winston";
import {
buildCacheOptionsFromProvider,
buildCacheOptionsFromProvider, buildCachePrefix,
createAjvFactory,
mergeArr,
normalizeName,
overwriteMerge,
parseBool, randomId,
readJson,
readConfigFile,
removeUndefinedKeys
} from "./util";
import {CommentCheck} from "./Check/CommentCheck";
@@ -43,6 +43,7 @@ import {operatorConfig} from "./Utils/CommandConfig";
import merge from 'deepmerge';
import * as process from "process";
import {cacheOptDefaults, cacheTTLDefaults} from "./Common/defaults";
import objectHash from "object-hash";
export interface ConfigBuilderOptions {
logger: Logger,
@@ -291,10 +292,6 @@ export const parseDefaultBotInstanceFromArgs = (args: any): BotInstanceJsonConfi
polling: {
sharedMod,
},
caching: {
provider: caching,
authorTTL
},
nanny: {
softLimit,
hardLimit
@@ -316,6 +313,8 @@ export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
sessionSecret,
web,
mode,
caching,
authorTTL,
} = args || {};
const data = {
@@ -328,6 +327,10 @@ export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
level: logLevel,
path: logDir === true ? `${process.cwd()}/logs` : undefined,
},
caching: {
provider: caching,
authorTTL
},
web: {
enabled: web,
port,
@@ -345,9 +348,9 @@ export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
return removeUndefinedKeys(data) as OperatorJsonConfig;
}
const parseListFromEnv = (val: string|undefined) => {
const parseListFromEnv = (val: string | undefined) => {
let listVals: undefined | string[];
if(val === undefined) {
if (val === undefined) {
return listVals;
}
const trimmedVal = val.trim();
@@ -387,13 +390,6 @@ export const parseDefaultBotInstanceFromEnv = (): BotInstanceJsonConfig => {
polling: {
sharedMod: parseBool(process.env.SHARE_MOD),
},
caching: {
provider: {
// @ts-ignore
store: process.env.CACHING as (CacheProvider | undefined)
},
authorTTL: process.env.AUTHOR_TTL !== undefined ? parseInt(process.env.AUTHOR_TTL) : undefined
},
nanny: {
softLimit: process.env.SOFT_LIMIT !== undefined ? parseInt(process.env.SOFT_LIMIT) : undefined,
hardLimit: process.env.HARD_LIMIT !== undefined ? parseInt(process.env.HARD_LIMIT) : undefined
@@ -405,7 +401,7 @@ export const parseDefaultBotInstanceFromEnv = (): BotInstanceJsonConfig => {
export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
const data = {
mode: process.env.MODE !== undefined ? process.env.MODE as ('all' | 'server' | 'client') : undefined,
operator: {
operator: {
name: parseListFromEnv(process.env.OPERATOR),
display: process.env.OPERATOR_DISPLAY
},
@@ -414,6 +410,13 @@ export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
level: process.env.LOG_LEVEL,
path: process.env.LOG_DIR === 'true' ? `${process.cwd()}/logs` : undefined,
},
caching: {
provider: {
// @ts-ignore
store: process.env.CACHING as (CacheProvider | undefined)
},
authorTTL: process.env.AUTHOR_TTL !== undefined ? parseInt(process.env.AUTHOR_TTL) : undefined
},
web: {
port: process.env.PORT !== undefined ? parseInt(process.env.PORT) : undefined,
session: {
@@ -473,7 +476,7 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
if (operatorConfig !== undefined) {
let rawConfig;
try {
rawConfig = await readJson(operatorConfig, {log: initLogger});
rawConfig = await readConfigFile(operatorConfig, {log: initLogger}) as object;
} catch (err) {
initLogger.error('Cannot continue app startup because operator config file was not parseable.');
err.logged = true;
@@ -493,21 +496,25 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
const defaultBotInstanceFromEnv = parseDefaultBotInstanceFromEnv();
const {bots: botInstancesFromFile = [], ...restConfigFile} = configFromFile;
const defaultBotInstance = merge.all([defaultBotInstanceFromEnv, defaultBotInstanceFromArgs], {
const mergedConfig = merge.all([opConfigFromEnv, restConfigFile, opConfigFromArgs], {
arrayMerge: overwriteMerge,
});
const defaultBotInstance = merge.all([defaultBotInstanceFromEnv, defaultBotInstanceFromArgs], {
arrayMerge: overwriteMerge,
}) as BotInstanceJsonConfig;
if (configFromFile.caching !== undefined) {
defaultBotInstance.caching = configFromFile.caching;
}
let botInstances = [];
if(botInstancesFromFile.length === 0) {
if (botInstancesFromFile.length === 0) {
botInstances = [defaultBotInstance];
} else {
botInstances = botInstancesFromFile.map(x => merge.all([defaultBotInstance, x], {arrayMerge: overwriteMerge}));
}
const mergedConfig = merge.all([opConfigFromEnv, restConfigFile, opConfigFromArgs], {
arrayMerge: overwriteMerge,
});
return removeUndefinedKeys({...mergedConfig, bots: botInstances}) as OperatorJsonConfig;
}
@@ -522,12 +529,17 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
level = 'verbose',
path,
} = {},
caching: opCache,
web: {
port = 8085,
maxLogs = 200,
caching: webCaching = {},
session: {
secret = randomId(),
provider: sessionProvider = { store: 'memory' },
maxAge: sessionMaxAge = 86400,
} = {},
invites: {
maxAge: inviteMaxAge = 0,
} = {},
clients,
credentials: webCredentials,
@@ -541,102 +553,175 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
bots = [],
} = data;
let hydratedBots: BotInstanceConfig[] = bots.map(x => {
const {
polling: {
sharedMod = false,
limit = 100,
interval = 30,
} = {},
queue: {
maxWorkers = 1,
} = {},
caching,
nanny: {
softLimit = 250,
hardLimit = 50
} = {},
snoowrap = {},
credentials: {
clientId: ci,
clientSecret: cs,
...restCred
} = {},
subreddits: {
names = [],
exclude = [],
wikiConfig = 'botconfig/contextbot',
dryRun,
heartbeatInterval = 300,
} = {},
} = x;
let cache: StrongCache;
let defaultProvider: CacheOptions;
let opActionedEventsMax: number | undefined;
let opActionedEventsDefault: number = 25;
if(caching === undefined) {
if (opCache === undefined) {
defaultProvider = {
store: 'memory',
...cacheOptDefaults
};
cache = {
...cacheTTLDefaults,
provider: {
store: 'memory',
...cacheOptDefaults
}
provider: defaultProvider,
actionedEventsDefault: opActionedEventsDefault,
};
} else {
const {provider, ...restConfig} = caching;
const {provider, actionedEventsMax, actionedEventsDefault = opActionedEventsDefault, ...restConfig} = opCache;
if (actionedEventsMax !== undefined && actionedEventsMax !== null) {
opActionedEventsMax = actionedEventsMax;
opActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
}
if (typeof provider === 'string') {
cache = {
...cacheTTLDefaults,
...restConfig,
provider: {
store: provider as CacheProvider,
...cacheOptDefaults
}
}
defaultProvider = {
store: provider as CacheProvider,
...cacheOptDefaults
};
} else {
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
cache = {
...cacheTTLDefaults,
...restConfig,
provider: {
store,
...cacheOptDefaults,
...rest,
},
}
defaultProvider = {
store,
...cacheOptDefaults,
...rest,
};
}
cache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsMax: opActionedEventsMax,
actionedEventsDefault: opActionedEventsDefault,
provider: defaultProvider,
}
}
return {
snoowrap,
subreddits: {
names,
exclude,
wikiConfig,
heartbeatInterval,
dryRun,
},
credentials: {
let hydratedBots: BotInstanceConfig[] = bots.map(x => {
const {
name: botName,
polling: {
sharedMod = false,
limit = 100,
interval = 30,
} = {},
queue: {
maxWorkers = 1,
} = {},
caching,
nanny: {
softLimit = 250,
hardLimit = 50
} = {},
snoowrap = {},
credentials: {
clientId: ci,
clientSecret: cs,
...restCred
} = {},
subreddits: {
names = [],
exclude = [],
wikiConfig = 'botconfig/contextbot',
dryRun,
heartbeatInterval = 300,
} = {},
} = x;
let botCache: StrongCache;
let botActionedEventsDefault: number;
if (caching === undefined) {
botCache = {
...cacheTTLDefaults,
actionedEventsDefault: opActionedEventsDefault,
actionedEventsMax: opActionedEventsMax,
provider: {
store: 'memory',
...cacheOptDefaults
}
};
} else {
const {
provider,
actionedEventsMax = opActionedEventsMax,
actionedEventsDefault = opActionedEventsDefault,
...restConfig
} = caching;
botActionedEventsDefault = actionedEventsDefault;
if(actionedEventsMax !== undefined) {
botActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
}
if (typeof provider === 'string') {
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: botActionedEventsDefault,
provider: {
store: provider as CacheProvider,
...cacheOptDefaults
}
}
} else {
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: botActionedEventsDefault,
actionedEventsMax,
provider: {
store,
...cacheOptDefaults,
...rest,
},
}
}
}
const botCreds = {
clientId: (ci as string),
clientSecret: (cs as string),
...restCred,
},
caching: cache,
polling: {
sharedMod,
limit,
interval,
},
queue: {
maxWorkers,
},
nanny: {
softLimit,
hardLimit
};
if (botCache.provider.prefix === undefined || botCache.provider.prefix === defaultProvider.prefix) {
// need to provide unique prefix to bot
botCache.provider.prefix = buildCachePrefix([botCache.provider.prefix, 'bot', (botName || objectHash.sha1(botCreds))]);
}
return {
name: botName,
snoowrap,
subreddits: {
names,
exclude,
wikiConfig,
heartbeatInterval,
dryRun,
},
credentials: botCreds,
caching: botCache,
polling: {
sharedMod,
limit,
interval,
},
queue: {
maxWorkers,
},
nanny: {
softLimit,
hardLimit
}
}
}
});
const defaultOperators = typeof name === 'string' ? [name] : name;
const config: OperatorConfig = {
@@ -649,19 +734,19 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
level,
path
},
caching: cache,
web: {
port,
caching: {
...defaultProvider,
...webCaching
},
invites: {
maxAge: inviteMaxAge,
},
session: {
secret,
provider: typeof sessionProvider === 'string' ? {
...buildCacheOptionsFromProvider({
ttl: 86400000,
store: sessionProvider,
})
} : {
...buildCacheOptionsFromProvider(sessionProvider),
ttl: 86400000,
},
maxAge: sessionMaxAge,
},
maxLogs,
clients: clients === undefined ? [{host: 'localhost:8095', secret: apiSecret}] : clients,

View File

@@ -5,9 +5,10 @@ import Submission from "snoowrap/dist/objects/Submission";
import {getAttributionIdentifier} from "../Utils/SnoowrapUtils";
import dayjs from "dayjs";
import {
asSubmission,
comparisonTextOp,
FAIL,
formatNumber,
formatNumber, getActivitySubredditName, isSubmission,
parseGenericValueOrPercentComparison,
parseSubredditName,
PASS
@@ -52,8 +53,6 @@ export interface AttributionCriteria {
/**
* A list of domains whose Activities will be tested against `threshold`.
*
* If this is present then `aggregateOn` is ignored.
*
* The values are tested as partial strings so you do not need to include full URLs, just the part that matters.
*
* EX `["youtube"]` will match submissions with the domain `https://youtube.com/c/aChannel`
@@ -97,7 +96,7 @@ export interface AttributionCriteria {
exclude?: string[],
/**
* If `domains` is not specified this list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`
* This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`
*
* * If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)
* * If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)
@@ -190,9 +189,9 @@ export class AttributionRule extends Rule {
let activities = thresholdOn === 'submissions' ? await this.resources.getAuthorSubmissions(item.author, {window: window}) : await this.resources.getAuthorActivities(item.author, {window: window});
activities = activities.filter(act => {
if (include.length > 0) {
return include.some(x => x === act.subreddit.display_name.toLowerCase());
return include.some(x => x === getActivitySubredditName(act).toLowerCase());
} else if (exclude.length > 0) {
return !exclude.some(x => x === act.subreddit.display_name.toLowerCase())
return !exclude.some(x => x === getActivitySubredditName(act).toLowerCase())
}
return true;
});
@@ -219,7 +218,7 @@ export class AttributionRule extends Rule {
const realDomains: DomainInfo[] = domains.map(x => {
if(x === SUBMISSION_DOMAIN) {
if(!(item instanceof Submission)) {
if(!(asSubmission(item))) {
throw new SimpleError('Cannot run Attribution Rule with the domain SELF:AGG on a Comment');
}
return getAttributionIdentifier(item, consolidateMediaDomains);
@@ -228,7 +227,7 @@ export class AttributionRule extends Rule {
});
const realDomainIdents = realDomains.map(x => x.aliases).flat(1).map(x => x.toLowerCase());
const submissions: Submission[] = thresholdOn === 'submissions' ? activities as Submission[] : activities.filter(x => x instanceof Submission) as Submission[];
const submissions: Submission[] = thresholdOn === 'submissions' ? activities as Submission[] : activities.filter(x => isSubmission(x)) as Submission[];
const aggregatedSubmissions = submissions.reduce((acc: Map<string, DomainAgg>, sub) => {
const domainInfo = getAttributionIdentifier(sub, consolidateMediaDomains)
@@ -239,7 +238,7 @@ export class AttributionRule extends Rule {
domainType = 'self';
}
if(realDomains.length === 0 && aggregateOn.length !== 0) {
if(aggregateOn.length !== 0) {
if(domainType === 'media' && !aggregateOn.includes('media')) {
return acc;
}

View File

@@ -1,17 +1,19 @@
import {ActivityWindowType, CompareValueOrPercent, ThresholdCriteria} from "../Common/interfaces";
import {ActivityWindowType, CompareValueOrPercent, SubredditState, ThresholdCriteria} from "../Common/interfaces";
import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
import Submission from "snoowrap/dist/objects/Submission";
import {getAuthorActivities} from "../Utils/SnoowrapUtils";
import dayjs from "dayjs";
import {
asSubmission,
comparisonTextOp,
FAIL,
formatNumber,
formatNumber, getActivitySubredditName, isSubmission,
parseGenericValueOrPercentComparison, parseSubredditName,
PASS,
percentFromString
percentFromString, toStrongSubredditState
} from "../util";
import {Comment} from "snoowrap";
export interface CommentThresholdCriteria extends ThresholdCriteria {
/**
@@ -23,42 +25,56 @@ export interface CommentThresholdCriteria extends ThresholdCriteria {
asOp?: boolean
}
/**
* If both `submission` and `comment` are defined then criteria will only trigger if BOTH thresholds are met
* Criteria will only trigger if ALL present thresholds (comment, submission, total) are met
* */
export interface HistoryCriteria {
/**
* A string containing a comparison operator and a value to compare submissions against
* A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) submissions against
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign]`
*
* * EX `> 100` => greater than 100 submissions
* * EX `<= 75%` => submissions are equal to or less than 75% of all Activities
* * EX `> 100` => greater than 100 filtered submissions
* * EX `<= 75%` => filtered submissions are equal to or less than 75% of unfiltered Activities
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* */
submission?: CompareValueOrPercent
/**
* A string containing a comparison operator and a value to compare comments against
* A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) comments against
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`
*
* * EX `> 100` => greater than 100 comments
* * EX `<= 75%` => comments are equal to or less than 75% of all Activities
* * EX `<= 75%` => comments are equal to or less than 75% of unfiltered Activities
*
* If your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:
*
* * EX `> 100 OP` => greater than 100 comments as OP
* * EX `<= 25% as OP` => Comments as OP were less then or equal to 25% of **all Comments**
* * EX `> 100 OP` => greater than 100 filtered comments as OP
* * EX `<= 25% as OP` => **Filtered** comments as OP were less then or equal to 25% of **unfiltered Comments**
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* */
comment?: CompareValueOrPercent
/**
* A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`) activities against
*
* **Note:** This is only useful if using `include` or `exclude` otherwise percent will always be 100% and total === activityTotal
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`
*
* * EX `> 100` => greater than 100 filtered activities
* * EX `<= 75%` => filtered activities are equal to or less than 75% of all Activities
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* */
total?: CompareValueOrPercent
window: ActivityWindowType
/**
* The minimum number of activities that must exist from the `window` results for this criteria to run
* The minimum number of **filtered** activities that must exist from the `window` results for this criteria to run
* @default 5
* */
minActivityCount?: number
@@ -68,8 +84,9 @@ export interface HistoryCriteria {
export class HistoryRule extends Rule {
criteria: HistoryCriteria[];
condition: 'AND' | 'OR';
include: string[];
exclude: string[];
include: (string | SubredditState)[];
exclude: (string | SubredditState)[];
activityFilterFunc: (x: Submission|Comment) => Promise<boolean> = async (x) => true;
constructor(options: HistoryOptions) {
super(options);
@@ -85,8 +102,41 @@ export class HistoryRule extends Rule {
if (this.criteria.length === 0) {
throw new Error('Must provide at least one HistoryCriteria');
}
this.include = include.map(x => parseSubredditName(x).toLowerCase());
this.exclude = exclude.map(x => parseSubredditName(x).toLowerCase());
this.include = include;
this.exclude = exclude;
if(this.include.length > 0) {
const subStates = include.map((x) => {
if(typeof x === 'string') {
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
}
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
});
this.activityFilterFunc = async (x: Submission|Comment) => {
for(const ss of subStates) {
if(await this.resources.testSubredditCriteria(x, ss)) {
return true;
}
}
return false;
};
} else if(this.exclude.length > 0) {
const subStates = exclude.map((x) => {
if(typeof x === 'string') {
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
}
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
});
this.activityFilterFunc = async (x: Submission|Comment) => {
for(const ss of subStates) {
if(await this.resources.testSubredditCriteria(x, ss)) {
return false;
}
}
return true;
};
}
}
getKind(): string {
@@ -107,25 +157,23 @@ export class HistoryRule extends Rule {
for (const criteria of this.criteria) {
const {comment, window, submission, minActivityCount = 5} = criteria;
const {comment, window, submission, total, minActivityCount = 5} = criteria;
let activities = await this.resources.getAuthorActivities(item.author, {window: window});
activities = activities.filter(act => {
if (this.include.length > 0) {
return this.include.some(x => x === act.subreddit.display_name.toLowerCase());
} else if (this.exclude.length > 0) {
return !this.exclude.some(x => x === act.subreddit.display_name.toLowerCase())
const filteredActivities = [];
for(const a of activities) {
if(await this.activityFilterFunc(a)) {
filteredActivities.push(a);
}
return true;
});
}
if (activities.length < minActivityCount) {
if (filteredActivities.length < minActivityCount) {
continue;
}
const activityTotal = activities.length;
const {submissionTotal, commentTotal, opTotal} = activities.reduce((acc, act) => {
if(act instanceof Submission) {
if(asSubmission(act)) {
return {...acc, submissionTotal: acc.submissionTotal + 1};
}
let a = {...acc, commentTotal: acc.commentTotal + 1};
@@ -134,6 +182,24 @@ export class HistoryRule extends Rule {
}
return a;
},{submissionTotal: 0, commentTotal: 0, opTotal: 0});
let fSubmissionTotal = submissionTotal;
let fCommentTotal = commentTotal;
let fOpTotal = opTotal;
if(activities.length !== filteredActivities.length) {
const filteredCounts = filteredActivities.reduce((acc, act) => {
if(asSubmission(act)) {
return {...acc, submissionTotal: acc.submissionTotal + 1};
}
let a = {...acc, commentTotal: acc.commentTotal + 1};
if(act.is_submitter) {
a.opTotal = a.opTotal + 1;
}
return a;
},{submissionTotal: 0, commentTotal: 0, opTotal: 0});
fSubmissionTotal = filteredCounts.submissionTotal;
fCommentTotal = filteredCounts.commentTotal;
fOpTotal = filteredCounts.opTotal;
}
let commentTrigger = undefined;
if(comment !== undefined) {
@@ -142,15 +208,15 @@ export class HistoryRule extends Rule {
if(isPercent) {
const per = value / 100;
if(asOp) {
commentTrigger = comparisonTextOp(opTotal / commentTotal, operator, per);
commentTrigger = comparisonTextOp(fOpTotal / commentTotal, operator, per);
} else {
commentTrigger = comparisonTextOp(commentTotal / activityTotal, operator, per);
commentTrigger = comparisonTextOp(fCommentTotal / activityTotal, operator, per);
}
} else {
if(asOp) {
commentTrigger = comparisonTextOp(opTotal, operator, value);
commentTrigger = comparisonTextOp(fOpTotal, operator, value);
} else {
commentTrigger = comparisonTextOp(commentTotal, operator, value);
commentTrigger = comparisonTextOp(fCommentTotal, operator, value);
}
}
}
@@ -160,27 +226,40 @@ export class HistoryRule extends Rule {
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(submission);
if(isPercent) {
const per = value / 100;
submissionTrigger = comparisonTextOp(submissionTotal / activityTotal, operator, per);
submissionTrigger = comparisonTextOp(fSubmissionTotal / activityTotal, operator, per);
} else {
submissionTrigger = comparisonTextOp(submissionTotal, operator, value);
submissionTrigger = comparisonTextOp(fSubmissionTotal, operator, value);
}
}
let totalTrigger = undefined;
if(total !== undefined) {
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(total);
if(isPercent) {
const per = value / 100;
totalTrigger = comparisonTextOp(filteredActivities.length / activityTotal, operator, per);
} else {
totalTrigger = comparisonTextOp(filteredActivities.length, operator, value);
}
}
const firstActivity = activities[0];
const lastActivity = activities[activities.length - 1];
const activityTotalWindow = dayjs.duration(dayjs(firstActivity.created_utc * 1000).diff(dayjs(lastActivity.created_utc * 1000)));
const activityTotalWindow = activities.length === 0 ? dayjs.duration(0, 's') : dayjs.duration(dayjs(firstActivity.created_utc * 1000).diff(dayjs(lastActivity.created_utc * 1000)));
criteriaResults.push({
criteria,
activityTotal,
activityTotalWindow,
submissionTotal,
commentTotal,
opTotal,
submissionTotal: fSubmissionTotal,
commentTotal: fCommentTotal,
opTotal: fOpTotal,
filteredTotal: filteredActivities.length,
submissionTrigger,
commentTrigger,
triggered: (submissionTrigger === undefined || submissionTrigger === true) && (commentTrigger === undefined || commentTrigger === true)
totalTrigger,
triggered: (submissionTrigger === undefined || submissionTrigger === true) && (commentTrigger === undefined || commentTrigger === true) && (totalTrigger === undefined || totalTrigger === true)
});
}
@@ -223,36 +302,50 @@ export class HistoryRule extends Rule {
activityTotalWindow,
submissionTotal,
commentTotal,
filteredTotal,
opTotal,
criteria: {
comment,
submission,
total,
window,
},
criteria,
triggered,
submissionTrigger,
commentTrigger,
totalTrigger,
} = results;
const data: any = {
activityTotal,
submissionTotal,
commentTotal,
filteredTotal,
opTotal,
commentPercent: formatNumber((commentTotal/activityTotal)*100),
submissionPercent: formatNumber((submissionTotal/activityTotal)*100),
opPercent: formatNumber((opTotal/commentTotal)*100),
filteredPercent: formatNumber((filteredTotal/activityTotal)*100),
criteria,
window: typeof window === 'number' ? `${activityTotal} Items` : activityTotalWindow.humanize(true),
window: typeof window === 'number' || activityTotal === 0 ? `${activityTotal} Items` : activityTotalWindow.humanize(true),
triggered,
submissionTrigger,
commentTrigger,
totalTrigger,
};
let thresholdSummary = [];
let totalSummary;
let submissionSummary;
let commentSummary;
if(total !== undefined) {
const {operator, value, isPercent, displayText} = parseGenericValueOrPercentComparison(total);
const suffix = !isPercent ? 'Items' : `(${formatNumber((filteredTotal/activityTotal)*100)}%) of ${activityTotal} Total`;
totalSummary = `${includePassFailSymbols ? `${submissionTrigger ? PASS : FAIL} ` : ''}Filtered Activities (${filteredTotal}) were${totalTrigger ? '' : ' not'} ${displayText} ${suffix}`;
data.totalSummary = totalSummary;
thresholdSummary.push(totalSummary);
}
if(submission !== undefined) {
const {operator, value, isPercent, displayText} = parseGenericValueOrPercentComparison(submission);
const suffix = !isPercent ? 'Items' : `(${formatNumber((submissionTotal/activityTotal)*100)}%) of ${activityTotal} Total`;
@@ -298,21 +391,45 @@ interface HistoryConfig {
condition?: 'AND' | 'OR'
/**
* Only include Submissions from this list of Subreddits (by name, case-insensitive)
* If present, activities will be counted only if they are found in this list of Subreddits.
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* Each value in the list can be either:
*
* * string (name of subreddit)
* * regular expression to run on the subreddit name
* * `SubredditState`
*
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
*
* **Note:** This affects **post-window retrieval** activities. So that:
*
* * `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering
* * all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**
* * -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`
*
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
* */
include?: string[],
include?: (string | SubredditState)[],
/**
* Do not include Submissions from this list of Subreddits (by name, case-insensitive)
* If present, activities will be counted only if they are **NOT** found in this list of Subreddits
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* Each value in the list can be either:
*
* * string (name of subreddit)
* * regular expression to run on the subreddit name
* * `SubredditState`
*
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
*
* **Note:** This affects **post-window retrieval** activities. So that:
*
* * `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering
* * all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**
* * -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`
*
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
* */
exclude?: string[],
exclude?: (string | SubredditState)[],
}
export interface HistoryOptions extends HistoryConfig, RuleOptions {

View File

@@ -2,25 +2,26 @@ import {Rule, RuleJSONConfig, RuleOptions, RulePremise, RuleResult} from "./inde
import {Comment, VoteableContent} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {
activityWindowText,
comparisonTextOp, FAIL, formatNumber,
parseGenericValueOrPercentComparison, parseSubredditName,
activityWindowText, asSubmission,
comparisonTextOp, FAIL, formatNumber, getActivitySubredditName, isSubmission, objectToStringSummary,
parseGenericValueOrPercentComparison, parseStringToRegex, parseSubredditName,
parseUsableLinkIdentifier,
PASS
PASS, toStrongSubredditState
} from "../util";
import {
ActivityWindow,
ActivityWindowCriteria,
ActivityWindowType,
ReferenceSubmission,
SubredditCriteria
ActivityWindowType, CommentState,
ReferenceSubmission, StrongSubredditState, SubmissionState,
SubredditCriteria, SubredditState
} from "../Common/interfaces";
import {SubredditResources} from "../Subreddit/SubredditResources";
const parseLink = parseUsableLinkIdentifier();
export class RecentActivityRule extends Rule {
window: ActivityWindowType;
thresholds: SubThreshold[];
thresholds: ActivityThreshold[];
useSubmissionAsReference: boolean;
lookAt?: 'comments' | 'submissions';
@@ -67,14 +68,14 @@ export class RecentActivityRule extends Rule {
let viableActivity = activities;
if (this.useSubmissionAsReference) {
if (!(item instanceof Submission)) {
if (!asSubmission(item)) {
this.logger.warn('Cannot use post as reference because triggered item is not a Submission');
} else if (item.is_self) {
this.logger.warn('Cannot use post as reference because triggered Submission is not a link type');
} else {
const usableUrl = parseLink(await item.url);
viableActivity = viableActivity.filter((x) => {
if (!(x instanceof Submission)) {
if (!asSubmission(x)) {
return false;
}
if (x.url === undefined) {
@@ -84,29 +85,59 @@ export class RecentActivityRule extends Rule {
});
}
}
const groupedActivity = viableActivity.reduce((grouped, activity) => {
const s = activity.subreddit.display_name.toLowerCase();
grouped[s] = (grouped[s] || []).concat(activity);
return grouped;
}, {} as Record<string, (Submission | Comment)[]>);
const summaries = [];
let totalTriggeredOn;
for (const triggerSet of this.thresholds) {
let currCount = 0;
const presentSubs = [];
const {threshold = '>= 1', subreddits = []} = triggerSet;
for (const sub of subreddits.map(x => parseSubredditName(x))) {
const isub = sub.toLowerCase();
const {[isub]: tSub = []} = groupedActivity;
if (tSub.length > 0) {
currCount += tSub.length;
presentSubs.push(sub);
const presentSubs: string[] = [];
let combinedKarma = 0;
const {
threshold = '>= 1',
subreddits = [],
karma: karmaThreshold,
commentState,
submissionState,
} = triggerSet;
// convert subreddits array into entirely StrongSubredditState
const subStates: StrongSubredditState[] = subreddits.map((x) => {
if(typeof x === 'string') {
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
}
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
});
for(const activity of viableActivity) {
if(asSubmission(activity) && submissionState !== undefined) {
if(!(await this.resources.testItemCriteria(activity, [submissionState]))) {
continue;
}
} else if(commentState !== undefined) {
if(!(await this.resources.testItemCriteria(activity, [commentState]))) {
continue;
}
}
let inSubreddits = false;
for(const ss of subStates) {
const res = await this.resources.testSubredditCriteria(activity, ss);
if(res) {
inSubreddits = true;
break;
}
}
if(inSubreddits) {
currCount++;
combinedKarma += activity.score;
const pSub = getActivitySubredditName(activity);
if(!presentSubs.includes(pSub)) {
presentSubs.push(pSub);
}
}
}
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(threshold);
let sum = {subsWithActivity: presentSubs, subreddits, count: currCount, threshold, triggered: false, testValue: currCount.toString()};
let sum = {subsWithActivity: presentSubs, combinedKarma, karmaThreshold, subreddits: subStates.map(x => x.stateDescription), count: currCount, threshold, triggered: false, testValue: currCount.toString()};
if (isPercent) {
sum.testValue = `${formatNumber((currCount / viableActivity.length) * 100)}%`;
if (comparisonTextOp(currCount / viableActivity.length, operator, value / 100)) {
@@ -117,6 +148,15 @@ export class RecentActivityRule extends Rule {
sum.triggered = true;
totalTriggeredOn = sum;
}
// if we would trigger on threshold need to also test for karma
if(totalTriggeredOn !== undefined && karmaThreshold !== undefined) {
const {operator: opKarma, value: valueKarma} = parseGenericValueOrPercentComparison(karmaThreshold);
if(!comparisonTextOp(combinedKarma, opKarma, valueKarma)) {
sum.triggered = false;
totalTriggeredOn = undefined;
}
}
summaries.push(sum);
// if either trigger condition is hit end the iteration early
if (totalTriggeredOn !== undefined) {
@@ -150,10 +190,15 @@ export class RecentActivityRule extends Rule {
subreddits = [],
subsWithActivity = [],
threshold,
triggered
triggered,
combinedKarma,
karmaThreshold,
} = summary;
const relevantSubs = subsWithActivity.length === 0 ? subreddits : subsWithActivity;
const totalSummary = `${testValue} activities over ${relevantSubs.length} subreddits ${triggered ? 'met' : 'did not meet'} threshold of ${threshold}`;
let totalSummary = `${testValue} activities over ${relevantSubs.length} subreddits${karmaThreshold !== undefined ? ` with ${combinedKarma} combined karma` : ''} ${triggered ? 'met' : 'did not meet'} threshold of ${threshold}${karmaThreshold !== undefined ? ` and ${karmaThreshold} combined karma` : ''}`;
if(triggered && subsWithActivity.length > 0) {
totalSummary = `${totalSummary} -- subreddits: ${subsWithActivity.join(', ')}`;
}
return {
result: totalSummary,
data: {
@@ -163,7 +208,8 @@ export class RecentActivityRule extends Rule {
subCount: relevantSubs.length,
totalCount: count,
threshold,
testValue
testValue,
karmaThreshold,
}
};
}
@@ -175,7 +221,16 @@ export class RecentActivityRule extends Rule {
* @minProperties 1
* @additionalProperties false
* */
export interface SubThreshold extends SubredditCriteria {
export interface ActivityThreshold {
/**
* When present, a Submission will only be counted if it meets this criteria
* */
submissionState?: SubmissionState
/**
* When present, a Comment will only be counted if it meets this criteria
* */
commentState?: CommentState
/**
* A string containing a comparison operator and a value to compare recent activities against
*
@@ -191,6 +246,35 @@ export interface SubThreshold extends SubredditCriteria {
* @examples [">= 1"]
* */
threshold?: string
/**
* Test the **combined karma** from Activities found in the specified subreddits
*
* Value is a string containing a comparison operator and a number of **combined karma** to compare against
*
* If specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied
*
* The syntax is `(< OR > OR <= OR >=) <number>`
*
* * EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* */
karma?: string
/**
* Activities will be counted if they are found in this list of Subreddits
*
* Each value in the list can be either:
*
* * string (name of subreddit)
* * regular expression to run on the subreddit name
* * `SubredditState`
*
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
* */
subreddits?: (string | SubredditState)[]
}
interface RecentActivityConfig extends ActivityWindow, ReferenceSubmission {
@@ -203,7 +287,7 @@ interface RecentActivityConfig extends ActivityWindow, ReferenceSubmission {
* A list of subreddits/count criteria that may trigger this rule. ANY SubThreshold will trigger this rule.
* @minItems 1
* */
thresholds: SubThreshold[],
thresholds: ActivityThreshold[],
}
export interface RecentActivityRuleOptions extends RecentActivityConfig, RuleOptions {

View File

@@ -2,14 +2,16 @@ import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
import {Comment} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {
comparisonTextOp, FAIL, isExternalUrlSubmission, parseGenericValueComparison,
parseGenericValueOrPercentComparison, parseRegex,
PASS
asSubmission,
comparisonTextOp, FAIL, isExternalUrlSubmission, isSubmission, parseGenericValueComparison,
parseGenericValueOrPercentComparison, parseRegex, parseStringToRegex,
PASS, triggeredIndicator
} from "../util";
import {
ActivityWindowType, JoinOperands,
} from "../Common/interfaces";
import dayjs from 'dayjs';
import SimpleError from "../Utils/SimpleError";
export interface RegexCriteria {
/**
@@ -21,17 +23,11 @@ export interface RegexCriteria {
/**
* A valid Regular Expression to test content against
*
* Do not wrap expression in forward slashes
* If no flags are specified then the **global** flag is used by default
*
* EX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`
*
* @examples ["reddit|FoxxMD"]
* @examples ["/reddit|FoxxMD/ig"]
* */
regex: string,
/**
* Regex flags to use
* */
regexFlags?: string,
/**
* Which content from an Activity to test the regex against
@@ -134,12 +130,11 @@ export class RegexRule extends Rule {
let criteriaResults = [];
for (const criteria of this.criteria) {
for (const [index, criteria] of this.criteria.entries()) {
const {
name,
name = (index + 1),
regex,
regexFlags,
testOn: testOnVals = ['title', 'body'],
lookAt = 'all',
matchThreshold = '> 0',
@@ -157,7 +152,10 @@ export class RegexRule extends Rule {
}, []);
// check regex
const reg = new RegExp(regex);
const reg = parseStringToRegex(regex, 'g');
if(reg === undefined) {
throw new SimpleError(`Value given for regex on Criteria ${name} was not valid: ${regex}`);
}
// ok cool its a valid regex
const matchComparison = parseGenericValueComparison(matchThreshold);
@@ -176,7 +174,7 @@ export class RegexRule extends Rule {
// first lets see if the activity we are checking satisfies thresholds
// since we may be able to avoid api calls to get history
let actMatches = this.getMatchesFromActivity(item, testOn, reg, regexFlags);
let actMatches = this.getMatchesFromActivity(item, testOn, reg);
matches = matches.concat(actMatches).slice(0, 100);
matchCount += actMatches.length;
@@ -226,7 +224,7 @@ export class RegexRule extends Rule {
for (const h of history) {
activitiesTested++;
const aMatches = this.getMatchesFromActivity(h, testOn, reg, regexFlags);
const aMatches = this.getMatchesFromActivity(h, testOn, reg);
matches = matches.concat(aMatches).slice(0, 100);
matchCount += aMatches.length;
const matched = comparisonTextOp(aMatches.length, matchComparison.operator, matchComparison.value);
@@ -300,30 +298,35 @@ export class RegexRule extends Rule {
let index = 0;
for (const c of criteriaResults) {
index++;
let msg = `Crit ${c.criteria.name || index} ${c.triggered ? PASS : FAIL}`;
let msg = `Criteria ${c.criteria.name || `#${index}`} ${triggeredIndicator(c.triggered)}`;
if (c.activityThresholdMet !== undefined) {
msg = `${msg} -- Activity Match=> ${c.activityThresholdMet ? PASS : FAIL} ${c.activitiesMatchedCount} ${c.criteria.activityMatchThreshold} (Threshold ${c.criteria.matchThreshold})`;
msg = `${msg} -- Activity Match ${triggeredIndicator(c.activityThresholdMet)} => ${c.activitiesMatchedCount} ${c.criteria.activityMatchThreshold} (Threshold ${c.criteria.matchThreshold})`;
}
if (c.totalThresholdMet !== undefined) {
msg = `${msg} -- Total Matches=> ${c.totalThresholdMet ? PASS : FAIL} ${c.matchCount} ${c.criteria.totalMatchThreshold}`;
msg = `${msg} -- Total Matches ${triggeredIndicator(c.totalThresholdMet)} => ${c.matchCount} ${c.criteria.totalMatchThreshold}`;
} else {
msg = `${msg} and ${c.matchCount} Total Matches`;
}
msg = `${msg} (Window: ${c.criteria.window})`;
logSummary.push(msg);
if(c.matches.length > 0) {
let matchSample = `-- Matched Values: ${c.matches.slice(0, 3).map(x => `"${x}"`).join(', ')}${c.matches.length > 3 ? `, and ${c.matches.length - 3} more...` : ''}`;
logSummary.push(`${msg} ${matchSample}`);
} else {
logSummary.push(msg);
}
}
const result = `${criteriaMet ? PASS : FAIL} ${logSummary.join(' || ')}`;
const result = `${triggeredIndicator(criteriaMet)} ${logSummary.join(' || ')}`;
this.logger.verbose(result);
return Promise.resolve([criteriaMet, this.getResult(criteriaMet, {result, data: criteriaResults})]);
}
protected getMatchesFromActivity(a: (Submission | Comment), testOn: string[], reg: RegExp, flags?: string): string[] {
protected getMatchesFromActivity(a: (Submission | Comment), testOn: string[], reg: RegExp): string[] {
let m: string[] = [];
// determine what content we are testing
let contents: string[] = [];
if (a instanceof Submission) {
if (asSubmission(a)) {
for (const l of testOn) {
switch (l) {
case 'title':
@@ -346,7 +349,7 @@ export class RegexRule extends Rule {
}
for (const c of contents) {
const results = parseRegex(reg, c, flags);
const results = parseRegex(reg, c);
if (results.matched) {
m = m.concat(results.matches);
}

View File

@@ -1,12 +1,18 @@
import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
import {Comment} from "snoowrap";
import {
activityWindowText,
comparisonTextOp, FAIL, isExternalUrlSubmission, isRedditMedia,
activityWindowText, asSubmission,
comparisonTextOp, FAIL, getActivitySubredditName, isExternalUrlSubmission, isRedditMedia,
parseGenericValueComparison, parseSubredditName,
parseUsableLinkIdentifier as linkParser, PASS
parseUsableLinkIdentifier as linkParser, PASS, toStrongSubredditState
} from "../util";
import {ActivityWindow, ActivityWindowType, ReferenceSubmission} from "../Common/interfaces";
import {
ActivityWindow,
ActivityWindowType,
ReferenceSubmission,
StrongSubredditState,
SubredditState
} from "../Common/interfaces";
import Submission from "snoowrap/dist/objects/Submission";
import dayjs from "dayjs";
import Fuse from 'fuse.js'
@@ -25,7 +31,7 @@ interface RepeatActivityReducer {
const getActivityIdentifier = (activity: (Submission | Comment), length = 200) => {
let identifier: string;
if (activity instanceof Submission) {
if (asSubmission(activity)) {
if (activity.is_self) {
identifier = `${activity.title}${activity.selftext.slice(0, length)}`;
} else if(isRedditMedia(activity)) {
@@ -50,8 +56,9 @@ export class RepeatActivityRule extends Rule {
gapAllowance?: number;
useSubmissionAsReference: boolean;
lookAt: 'submissions' | 'all';
include: string[];
exclude: string[];
include: (string | SubredditState)[];
exclude: (string | SubredditState)[];
activityFilterFunc: (x: Submission|Comment) => Promise<boolean> = async (x) => true;
keepRemoved: boolean;
minWordCount: number;
@@ -74,8 +81,40 @@ export class RepeatActivityRule extends Rule {
this.window = window;
this.gapAllowance = gapAllowance;
this.useSubmissionAsReference = useSubmissionAsReference;
this.include = include.map(x => parseSubredditName(x).toLowerCase());
this.exclude = exclude.map(x => parseSubredditName(x).toLowerCase());
this.include = include;
this.exclude = exclude;
if(this.include.length > 0) {
const subStates = include.map((x) => {
if(typeof x === 'string') {
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
}
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
});
this.activityFilterFunc = async (x: Submission|Comment) => {
for(const ss of subStates) {
if(await this.resources.testSubredditCriteria(x, ss)) {
return true;
}
}
return false;
};
} else if(this.exclude.length > 0) {
const subStates = exclude.map((x) => {
if(typeof x === 'string') {
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
}
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
});
this.activityFilterFunc = async (x: Submission|Comment) => {
for(const ss of subStates) {
if(await this.resources.testSubredditCriteria(x, ss)) {
return false;
}
}
return true;
};
}
this.lookAt = lookAt;
}
@@ -96,17 +135,10 @@ export class RepeatActivityRule extends Rule {
async process(item: Submission|Comment): Promise<[boolean, RuleResult]> {
let referenceUrl;
if(item instanceof Submission && this.useSubmissionAsReference) {
if(asSubmission(item) && this.useSubmissionAsReference) {
referenceUrl = await item.url;
}
let filterFunc = (x: any) => true;
if(this.include.length > 0) {
filterFunc = (x: Submission|Comment) => this.include.includes(x.subreddit.display_name.toLowerCase());
} else if(this.exclude.length > 0) {
filterFunc = (x: Submission|Comment) => !this.exclude.includes(x.subreddit.display_name.toLowerCase());
}
let activities: (Submission | Comment)[] = [];
switch (this.lookAt) {
case 'submissions':
@@ -117,13 +149,14 @@ export class RepeatActivityRule extends Rule {
break;
}
const condensedActivities = activities.reduce((acc: RepeatActivityReducer, activity: (Submission | Comment), index: number) => {
const condensedActivities = await activities.reduce(async (accProm: Promise<RepeatActivityReducer>, activity: (Submission | Comment), index: number) => {
const acc = await accProm;
const {openSets = [], allSets = []} = acc;
let identifier = getActivityIdentifier(activity);
const isUrl = isExternalUrlSubmission(activity);
let fu = new Fuse([identifier], !isUrl ? fuzzyOptions : {...fuzzyOptions, distance: 5});
const validSub = filterFunc(activity);
const validSub = await this.activityFilterFunc(activity);
let minMet = identifier.length >= this.minWordCount;
let updatedAllSets = [...allSets];
@@ -174,7 +207,7 @@ export class RepeatActivityRule extends Rule {
return {openSets: updatedOpenSets, allSets: updatedAllSets};
}, {openSets: [], allSets: []});
}, Promise.resolve({openSets: [], allSets: []}));
const allRepeatSets = [...condensedActivities.allSets, ...condensedActivities.openSets];
@@ -223,7 +256,7 @@ export class RepeatActivityRule extends Rule {
};
for (let set of value) {
const test = comparisonTextOp(set.length, operator, thresholdValue);
const md = set.map((x: (Comment | Submission)) => `[${x instanceof Submission ? x.title : getActivityIdentifier(x, 50)}](https://reddit.com${x.permalink}) in ${x.subreddit_name_prefixed} on ${dayjs(x.created_utc * 1000).utc().format()}`);
const md = set.map((x: (Comment | Submission)) => `[${asSubmission(x) ? x.title : getActivityIdentifier(x, 50)}](https://reddit.com${x.permalink}) in ${x.subreddit_name_prefixed} on ${dayjs(x.created_utc * 1000).utc().format()}`);
summaryData.sets.push(set);
summaryData.largestTrigger = Math.max(summaryData.largestTrigger, set.length);
@@ -294,21 +327,31 @@ interface RepeatActivityConfig extends ActivityWindow, ReferenceSubmission {
* */
gapAllowance?: number,
/**
* Only include Submissions from this list of Subreddits (by name, case-insensitive)
* If present, activities will be counted only if they are found in this list of Subreddits
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* Each value in the list can be either:
*
* * string (name of subreddit)
* * regular expression to run on the subreddit name
* * `SubredditState`
*
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
* */
include?: string[],
include?: (string | SubredditState)[],
/**
* Do not include Submissions from this list of Subreddits (by name, case-insensitive)
* If present, activities will be counted only if they are **NOT** found in this list of Subreddits
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* Each value in the list can be either:
*
* * string (name of subreddit)
* * regular expression to run on the subreddit name
* * `SubredditState`
*
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
* */
exclude?: string[],
exclude?: (string | SubredditState)[],
/**
* If present determines which activities to consider for gapAllowance.

View File

@@ -32,6 +32,11 @@ export interface RuleResult extends ResultContext {
triggered: (boolean | null)
}
export type FormattedRuleResult = RuleResult & {
triggered: string
result: string
}
export interface RuleSetResult {
results: RuleResult[],
condition: 'OR' | 'AND',

View File

@@ -154,6 +154,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
@@ -213,6 +223,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},

View File

@@ -1,6 +1,72 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"ActivityThreshold": {
"additionalProperties": false,
"description": "At least one count property must be present. If both are present then either can trigger the rule",
"minProperties": 1,
"properties": {
"commentState": {
"$ref": "#/definitions/CommentState",
"description": "When present, a Comment will only be counted if it meets this criteria",
"examples": [
{
"op": true,
"removed": false
}
]
},
"karma": {
"description": "Test the **combined karma** from Activities found in the specified subreddits\n\nValue is a string containing a comparison operator and a number of **combined karma** to compare against\n\nIf specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"submissionState": {
"$ref": "#/definitions/SubmissionState",
"description": "When present, a Submission will only be counted if it meets this criteria",
"examples": [
{
"over_18": true,
"removed": false
}
]
},
"subreddits": {
"description": "Activities will be counted if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"type": "array"
},
"threshold": {
"default": ">= 1",
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
"examples": [
">= 1"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
}
},
"type": "object"
},
"ActivityWindowCriteria": {
"additionalProperties": false,
"description": "Multiple properties that may be used to define what range of Activity to retrieve.\n\nMay specify one, or both properties along with the `satisfyOn` property, to affect the retrieval behavior.",
@@ -167,7 +233,7 @@
"properties": {
"aggregateOn": {
"default": "undefined",
"description": "If `domains` is not specified this list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
"examples": [
[
]
@@ -195,7 +261,7 @@
[
]
],
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nIf this is present then `aggregateOn` is ignored.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"items": {
"type": "string"
},
@@ -686,7 +752,112 @@
],
"type": "object"
},
"CacheConfig": {
"properties": {
"actionedEventsMax": {
"default": 25,
"description": "The **maximum** number of Events that the cache should store triggered result summaries for\n\nThese summaries are viewable through the Web UI.\n\nThe value specified by a subreddit cannot be larger than the value set by the Operator for the global/bot config (if set)",
"type": "number"
},
"authorTTL": {
"default": 60,
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"commentTTL": {
"default": 60,
"description": "Amount of time, in seconds, a comment should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"filterCriteriaTTL": {
"default": 60,
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
],
"description": "The cache provider and, optionally, a custom configuration for that provider\n\nIf not present or `null` provider will be `memory`.\n\nTo specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`"
},
"submissionTTL": {
"default": 60,
"description": "Amount of time, in seconds, a submission should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"subredditTTL": {
"default": 600,
"description": "Amount of time, in seconds, a subreddit (attributes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
600
],
"type": [
"number",
"boolean"
]
},
"userNotesTTL": {
"default": 300,
"description": "Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
300
],
"type": [
"number",
"boolean"
]
},
"wikiTTL": {
"default": 300,
"description": "Amount of time, in seconds, wiki content pages should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
300
],
"type": [
"number",
"boolean"
]
}
},
"type": "object"
},
"CacheOptions": {
"additionalProperties": {
},
"description": "Configure granular settings for a cache provider with this object",
"properties": {
"auth_pass": {
@@ -1067,6 +1238,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
@@ -1247,23 +1428,28 @@
"type": "object"
},
"HistoryCriteria": {
"description": "If both `submission` and `comment` are defined then criteria will only trigger if BOTH thresholds are met",
"description": "Criteria will only trigger if ALL present thresholds (comment, submission, total) are met",
"properties": {
"comment": {
"description": "A string containing a comparison operator and a value to compare comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of all Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 comments as OP\n* EX `<= 25% as OP` => Comments as OP were less then or equal to 25% of **all Comments**",
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of unfiltered Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 filtered comments as OP\n* EX `<= 25% as OP` => **Filtered** comments as OP were less then or equal to 25% of **unfiltered Comments**",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"minActivityCount": {
"default": 5,
"description": "The minimum number of activities that must exist from the `window` results for this criteria to run",
"description": "The minimum number of **filtered** activities that must exist from the `window` results for this criteria to run",
"type": "number"
},
"name": {
"type": "string"
},
"submission": {
"description": "A string containing a comparison operator and a value to compare submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 submissions\n* EX `<= 75%` => submissions are equal to or less than 75% of all Activities",
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 filtered submissions\n* EX `<= 75%` => filtered submissions are equal to or less than 75% of unfiltered Activities",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"total": {
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`) activities against\n\n**Note:** This is only useful if using `include` or `exclude` otherwise percent will always be 100% and total === activityTotal\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 filtered activities\n* EX `<= 75%` => filtered activities are equal to or less than 75% of all Activities",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
@@ -1332,27 +1518,51 @@
"type": "array"
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n**Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"include": {
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are found in this list of Subreddits.\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n **Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"itemIs": {
@@ -1567,6 +1777,16 @@
"title": {
"description": "The title of the message\n\nIf not specified will be defaulted to `Concerning your [Submission/Comment]`",
"type": "string"
},
"to": {
"description": "Entity to send message to.\n\nIf not present Message be will sent to the Author of the Activity being checked.\n\nValid formats:\n\n* `aUserName` -- send to /u/aUserName\n* `u/aUserName` -- send to /u/aUserName\n* `r/aSubreddit` -- sent to modmail of /r/aSubreddit\n\n**Note:** Reddit does not support sending a message AS a subreddit TO another subreddit",
"examples": [
"aUserName",
"u/aUserName",
"r/aSubreddit"
],
"pattern": "^\\s*(\\/[ru]\\/|[ru]\\/)*(\\w+)*\\s*$",
"type": "string"
}
},
"required": [
@@ -1759,7 +1979,7 @@
"thresholds": {
"description": "A list of subreddits/count criteria that may trigger this rule. ANY SubThreshold will trigger this rule.",
"items": {
"$ref": "#/definitions/SubThreshold"
"$ref": "#/definitions/ActivityThreshold"
},
"minItems": 1,
"type": "array"
@@ -1796,6 +2016,49 @@
],
"type": "object"
},
"RegExp": {
"properties": {
"dotAll": {
"type": "boolean"
},
"flags": {
"type": "string"
},
"global": {
"type": "boolean"
},
"ignoreCase": {
"type": "boolean"
},
"lastIndex": {
"type": "number"
},
"multiline": {
"type": "boolean"
},
"source": {
"type": "string"
},
"sticky": {
"type": "boolean"
},
"unicode": {
"type": "boolean"
}
},
"required": [
"dotAll",
"flags",
"global",
"ignoreCase",
"lastIndex",
"multiline",
"source",
"sticky",
"unicode"
],
"type": "object"
},
"RegexCriteria": {
"properties": {
"activityMatchThreshold": {
@@ -1834,16 +2097,12 @@
"type": "string"
},
"regex": {
"description": "A valid Regular Expression to test content against\n\nDo not wrap expression in forward slashes\n\nEX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`",
"description": "A valid Regular Expression to test content against\n\nIf no flags are specified then the **global** flag is used by default",
"examples": [
"reddit|FoxxMD"
"/reddit|FoxxMD/ig"
],
"type": "string"
},
"regexFlags": {
"description": "Regex flags to use",
"type": "string"
},
"testOn": {
"default": [
"title",
@@ -2082,15 +2341,27 @@
]
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"gapAllowance": {
@@ -2098,15 +2369,27 @@
"type": "number"
},
"include": {
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"itemIs": {
@@ -2333,40 +2616,6 @@
],
"type": "object"
},
"SubThreshold": {
"additionalProperties": false,
"description": "At least one count property must be present. If both are present then either can trigger the rule",
"minProperties": 1,
"properties": {
"subreddits": {
"description": "A list of Subreddits (by name, case-insensitive) to look for.\n\nEX [\"mealtimevideos\",\"askscience\"]",
"examples": [
[
"mealtimevideos",
"askscience"
]
],
"items": {
"type": "string"
},
"minItems": 1,
"type": "array"
},
"threshold": {
"default": ">= 1",
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
"examples": [
">= 1"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
}
},
"required": [
"subreddits"
],
"type": "object"
},
"SubmissionCheckJson": {
"properties": {
"actions": {
@@ -2439,6 +2688,10 @@
}
]
},
"cacheUserResult": {
"$ref": "#/definitions/UserResultCacheOptions",
"description": "Cache the result of this check based on the comment author and the submission id\n\nThis is useful in this type of scenario:\n\n1. This check is configured to run on comments for specific submissions with high volume activity\n2. The rules being run are not dependent on the content of the comment\n3. The rule results are not likely to change while cache is valid"
},
"condition": {
"default": "AND",
"description": "Under what condition should a set of run `Rule` objects be considered \"successful\"?\n\nIf `OR` then **any** triggered `Rule` object results in success.\n\nIf `AND` then **all** `Rule` objects must be triggered to result in success.",
@@ -2583,6 +2836,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
@@ -2599,70 +2862,40 @@
},
"type": "object"
},
"SubredditCacheConfig": {
"SubredditState": {
"description": "Different attributes a `Subreddit` can be in. Only include a property if you want to check it.",
"examples": [
{
"over18": true
}
],
"properties": {
"authorTTL": {
"default": 60,
"description": "Amount of time, in seconds, author activities (Comments/Submission) should be cached",
"examples": [
60
],
"type": "number"
},
"commentTTL": {
"default": 60,
"description": "Amount of time, in seconds, a comment should be cached",
"examples": [
60
],
"type": "number"
},
"filterCriteriaTTL": {
"default": 60,
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters",
"examples": [
60
],
"type": "number"
},
"provider": {
"name": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
"$ref": "#/definitions/RegExp"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
],
"description": "The name the subreddit.\n\nCan be a normal string (will check case-insensitive) or a regular expression\n\nEX `[\"mealtimevideos\", \"/onlyfans*\\/i\"]`",
"examples": [
"mealtimevideos",
"/onlyfans*/i"
]
},
"submissionTTL": {
"default": 60,
"description": "Amount of time, in seconds, a submission should be cached",
"examples": [
60
],
"type": "number"
"over18": {
"description": "Is subreddit NSFW/over 18?\n\n**Note**: This is **mod-controlled flag** so it is up to the mods of the subreddit to correctly mark their subreddit as NSFW",
"type": "boolean"
},
"userNotesTTL": {
"default": 300,
"description": "Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached",
"examples": [
300
],
"type": "number"
"quarantine": {
"description": "Is subreddit quarantined?",
"type": "boolean"
},
"wikiTTL": {
"default": 300,
"description": "Amount of time, in seconds, wiki content pages should be cached",
"examples": [
300
],
"type": "number"
"stateDescription": {
"description": "A friendly description of what this State is trying to parse",
"type": "string"
}
},
"type": "object"
@@ -2810,6 +3043,12 @@
"description": "Cache the result of this check based on the comment author and the submission id\n\nThis is useful in this type of scenario:\n\n1. This check is configured to run on comments for specific submissions with high volume activity\n2. The rules being run are not dependent on the content of the comment\n3. The rule results are not likely to change while cache is valid",
"properties": {
"enable": {
"default": false,
"type": "boolean"
},
"runActions": {
"default": true,
"description": "In the event the cache returns a triggered result should the actions for the check also be run?",
"type": "boolean"
},
"ttl": {
@@ -2826,7 +3065,7 @@
},
"properties": {
"caching": {
"$ref": "#/definitions/SubredditCacheConfig",
"$ref": "#/definitions/CacheConfig",
"description": "Per-subreddit config for caching TTL values. If set to `false` caching is disabled."
},
"checks": {

View File

@@ -23,74 +23,8 @@
"description": "The configuration for an **individual reddit account** ContextMod will run as a bot.\n\nMultiple bot configs may be specified (one per reddit account).\n\n**NOTE:** If `bots` is not specified in a `FILE` then a default `bot` is generated using `ENV/ARG` values IE `CLIENT_ID`, etc...but if `bots` IS specified the default is not generated.",
"properties": {
"caching": {
"description": "Settings to configure the default caching behavior for each suberddit",
"properties": {
"authorTTL": {
"default": 60,
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",
"examples": [
60
],
"type": "number"
},
"commentTTL": {
"default": 60,
"description": "Amount of time, in seconds, a comment should be cached",
"examples": [
60
],
"type": "number"
},
"filterCriteriaTTL": {
"default": 60,
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters",
"examples": [
60
],
"type": "number"
},
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
],
"description": "The cache provider and, optionally, a custom configuration for that provider\n\nIf not present or `null` provider will be `memory`.\n\nTo specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`"
},
"submissionTTL": {
"default": 60,
"description": "Amount of time, in seconds, a submission should be cached",
"examples": [
60
],
"type": "number"
},
"userNotesTTL": {
"default": 300,
"description": "Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached",
"examples": [
300
],
"type": "number"
},
"wikiTTL": {
"default": 300,
"description": "Amount of time, in seconds, wiki content pages should be cached",
"examples": [
300
],
"type": "number"
}
},
"type": "object"
"$ref": "#/definitions/OperatorCacheConfig",
"description": "Settings to configure the default caching behavior for this bot\n\nEvery setting not specified will default to what is specified by the global operator caching config"
},
"credentials": {
"$ref": "#/definitions/RedditCredentials",
@@ -243,6 +177,8 @@
"type": "object"
},
"CacheOptions": {
"additionalProperties": {
},
"description": "Configure granular settings for a cache provider with this object",
"properties": {
"auth_pass": {
@@ -394,6 +330,114 @@
],
"type": "object"
},
"OperatorCacheConfig": {
"properties": {
"actionedEventsDefault": {
"default": 25,
"description": "The **default** number of Events that the cache will store triggered result summaries for\n\nThese summaries are viewable through the Web UI.\n\nThe value specified cannot be larger than `actionedEventsMax` for the global/bot config (if set)",
"type": "number"
},
"actionedEventsMax": {
"default": 25,
"description": "The **maximum** number of Events that the cache should store triggered result summaries for\n\nThese summaries are viewable through the Web UI.\n\nThe value specified by a subreddit cannot be larger than the value set by the Operator for the global/bot config (if set)",
"type": "number"
},
"authorTTL": {
"default": 60,
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"commentTTL": {
"default": 60,
"description": "Amount of time, in seconds, a comment should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"filterCriteriaTTL": {
"default": 60,
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
],
"description": "The cache provider and, optionally, a custom configuration for that provider\n\nIf not present or `null` provider will be `memory`.\n\nTo specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`"
},
"submissionTTL": {
"default": 60,
"description": "Amount of time, in seconds, a submission should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"subredditTTL": {
"default": 600,
"description": "Amount of time, in seconds, a subreddit (attributes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
600
],
"type": [
"number",
"boolean"
]
},
"userNotesTTL": {
"default": 300,
"description": "Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
300
],
"type": [
"number",
"boolean"
]
},
"wikiTTL": {
"default": 300,
"description": "Amount of time, in seconds, wiki content pages should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
300
],
"type": [
"number",
"boolean"
]
}
},
"type": "object"
},
"PollingDefaults": {
"properties": {
"delayUntil": {
@@ -527,6 +571,10 @@
},
"type": "array"
},
"caching": {
"$ref": "#/definitions/OperatorCacheConfig",
"description": "Settings to configure the default caching behavior globally\n\nThese settings will be used by each bot, and subreddit, that does not specify their own"
},
"logging": {
"description": "Settings to configure global logging defaults",
"properties": {
@@ -605,6 +653,21 @@
"web": {
"description": "Settings for the web interface",
"properties": {
"caching": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"redis"
],
"type": "string"
}
],
"description": "Caching provider to use for session and invite data\n\nIf none is provided the top-level caching provider is used"
},
"clients": {
"description": "A list of CM Servers this Client should connect to.\n\nIf not specified a default `BotConnection` for this instance is generated",
"examples": [
@@ -631,6 +694,20 @@
}
]
},
"invites": {
"description": "Settings related to oauth flow invites",
"properties": {
"maxAge": {
"default": 0,
"description": "Number of seconds an invite should be valid for\n\n If `0` or not specified (default) invites do not expire",
"examples": [
0
],
"type": "number"
}
},
"type": "object"
},
"logLevel": {
"description": "The default log level to filter to in the web interface\n\nIf not specified or `null` will be same as global `logLevel`",
"enum": [
@@ -674,27 +751,16 @@
"session": {
"description": "Settings to configure the behavior of user sessions -- the session is what the web interface uses to identify logged in users.",
"properties": {
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"redis"
],
"type": "string"
}
],
"default": "memory",
"description": "The cache provider to use.\n\nThe default should be sufficient for almost all use cases",
"maxAge": {
"default": 86400,
"description": "Number of seconds a session should be valid for.\n\nDefault is 1 day",
"examples": [
"memory"
]
86400
],
"type": "number"
},
"secret": {
"description": "The secret value used to encrypt session data\n\nIf provider is persistent (redis) specifying a value here will ensure sessions are valid between application restarts\n\nWhen not present or `null` a random string is generated on application start",
"description": "The secret value used to encrypt session data\n\nIf provider is persistent (`redis`) specifying a value here will ensure sessions are valid between application restarts\n\nWhen not present or `null` a random string is generated on application start",
"examples": [
"definitelyARandomString"
],

View File

@@ -24,6 +24,72 @@
}
],
"definitions": {
"ActivityThreshold": {
"additionalProperties": false,
"description": "At least one count property must be present. If both are present then either can trigger the rule",
"minProperties": 1,
"properties": {
"commentState": {
"$ref": "#/definitions/CommentState",
"description": "When present, a Comment will only be counted if it meets this criteria",
"examples": [
{
"op": true,
"removed": false
}
]
},
"karma": {
"description": "Test the **combined karma** from Activities found in the specified subreddits\n\nValue is a string containing a comparison operator and a number of **combined karma** to compare against\n\nIf specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"submissionState": {
"$ref": "#/definitions/SubmissionState",
"description": "When present, a Submission will only be counted if it meets this criteria",
"examples": [
{
"over_18": true,
"removed": false
}
]
},
"subreddits": {
"description": "Activities will be counted if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"type": "array"
},
"threshold": {
"default": ">= 1",
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
"examples": [
">= 1"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
}
},
"type": "object"
},
"ActivityWindowCriteria": {
"additionalProperties": false,
"description": "Multiple properties that may be used to define what range of Activity to retrieve.\n\nMay specify one, or both properties along with the `satisfyOn` property, to affect the retrieval behavior.",
@@ -113,7 +179,7 @@
"properties": {
"aggregateOn": {
"default": "undefined",
"description": "If `domains` is not specified this list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
"examples": [
[
]
@@ -141,7 +207,7 @@
[
]
],
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nIf this is present then `aggregateOn` is ignored.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"items": {
"type": "string"
},
@@ -538,6 +604,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
@@ -611,23 +687,28 @@
"type": "object"
},
"HistoryCriteria": {
"description": "If both `submission` and `comment` are defined then criteria will only trigger if BOTH thresholds are met",
"description": "Criteria will only trigger if ALL present thresholds (comment, submission, total) are met",
"properties": {
"comment": {
"description": "A string containing a comparison operator and a value to compare comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of all Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 comments as OP\n* EX `<= 25% as OP` => Comments as OP were less then or equal to 25% of **all Comments**",
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of unfiltered Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 filtered comments as OP\n* EX `<= 25% as OP` => **Filtered** comments as OP were less then or equal to 25% of **unfiltered Comments**",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"minActivityCount": {
"default": 5,
"description": "The minimum number of activities that must exist from the `window` results for this criteria to run",
"description": "The minimum number of **filtered** activities that must exist from the `window` results for this criteria to run",
"type": "number"
},
"name": {
"type": "string"
},
"submission": {
"description": "A string containing a comparison operator and a value to compare submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 submissions\n* EX `<= 75%` => submissions are equal to or less than 75% of all Activities",
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 filtered submissions\n* EX `<= 75%` => filtered submissions are equal to or less than 75% of unfiltered Activities",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"total": {
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`) activities against\n\n**Note:** This is only useful if using `include` or `exclude` otherwise percent will always be 100% and total === activityTotal\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 filtered activities\n* EX `<= 75%` => filtered activities are equal to or less than 75% of all Activities",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
@@ -696,27 +777,51 @@
"type": "array"
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n**Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"include": {
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are found in this list of Subreddits.\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n **Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"itemIs": {
@@ -830,7 +935,7 @@
"thresholds": {
"description": "A list of subreddits/count criteria that may trigger this rule. ANY SubThreshold will trigger this rule.",
"items": {
"$ref": "#/definitions/SubThreshold"
"$ref": "#/definitions/ActivityThreshold"
},
"minItems": 1,
"type": "array"
@@ -867,6 +972,49 @@
],
"type": "object"
},
"RegExp": {
"properties": {
"dotAll": {
"type": "boolean"
},
"flags": {
"type": "string"
},
"global": {
"type": "boolean"
},
"ignoreCase": {
"type": "boolean"
},
"lastIndex": {
"type": "number"
},
"multiline": {
"type": "boolean"
},
"source": {
"type": "string"
},
"sticky": {
"type": "boolean"
},
"unicode": {
"type": "boolean"
}
},
"required": [
"dotAll",
"flags",
"global",
"ignoreCase",
"lastIndex",
"multiline",
"source",
"sticky",
"unicode"
],
"type": "object"
},
"RegexCriteria": {
"properties": {
"activityMatchThreshold": {
@@ -905,16 +1053,12 @@
"type": "string"
},
"regex": {
"description": "A valid Regular Expression to test content against\n\nDo not wrap expression in forward slashes\n\nEX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`",
"description": "A valid Regular Expression to test content against\n\nIf no flags are specified then the **global** flag is used by default",
"examples": [
"reddit|FoxxMD"
"/reddit|FoxxMD/ig"
],
"type": "string"
},
"regexFlags": {
"description": "Regex flags to use",
"type": "string"
},
"testOn": {
"default": [
"title",
@@ -1076,15 +1220,27 @@
]
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"gapAllowance": {
@@ -1092,15 +1248,27 @@
"type": "number"
},
"include": {
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"itemIs": {
@@ -1190,40 +1358,6 @@
],
"type": "object"
},
"SubThreshold": {
"additionalProperties": false,
"description": "At least one count property must be present. If both are present then either can trigger the rule",
"minProperties": 1,
"properties": {
"subreddits": {
"description": "A list of Subreddits (by name, case-insensitive) to look for.\n\nEX [\"mealtimevideos\",\"askscience\"]",
"examples": [
[
"mealtimevideos",
"askscience"
]
],
"items": {
"type": "string"
},
"minItems": 1,
"type": "array"
},
"threshold": {
"default": ">= 1",
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
"examples": [
">= 1"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
}
},
"required": [
"subreddits"
],
"type": "object"
},
"SubmissionState": {
"description": "Different attributes a `Submission` can be in. Only include a property if you want to check it.",
"examples": [
@@ -1267,6 +1401,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
@@ -1283,6 +1427,44 @@
},
"type": "object"
},
"SubredditState": {
"description": "Different attributes a `Subreddit` can be in. Only include a property if you want to check it.",
"examples": [
{
"over18": true
}
],
"properties": {
"name": {
"anyOf": [
{
"$ref": "#/definitions/RegExp"
},
{
"type": "string"
}
],
"description": "The name the subreddit.\n\nCan be a normal string (will check case-insensitive) or a regular expression\n\nEX `[\"mealtimevideos\", \"/onlyfans*\\/i\"]`",
"examples": [
"mealtimevideos",
"/onlyfans*/i"
]
},
"over18": {
"description": "Is subreddit NSFW/over 18?\n\n**Note**: This is **mod-controlled flag** so it is up to the mods of the subreddit to correctly mark their subreddit as NSFW",
"type": "boolean"
},
"quarantine": {
"description": "Is subreddit quarantined?",
"type": "boolean"
},
"stateDescription": {
"description": "A friendly description of what this State is trying to parse",
"type": "string"
}
},
"type": "object"
},
"UserNoteCriteria": {
"properties": {
"count": {

View File

@@ -1,6 +1,72 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"ActivityThreshold": {
"additionalProperties": false,
"description": "At least one count property must be present. If both are present then either can trigger the rule",
"minProperties": 1,
"properties": {
"commentState": {
"$ref": "#/definitions/CommentState",
"description": "When present, a Comment will only be counted if it meets this criteria",
"examples": [
{
"op": true,
"removed": false
}
]
},
"karma": {
"description": "Test the **combined karma** from Activities found in the specified subreddits\n\nValue is a string containing a comparison operator and a number of **combined karma** to compare against\n\nIf specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"submissionState": {
"$ref": "#/definitions/SubmissionState",
"description": "When present, a Submission will only be counted if it meets this criteria",
"examples": [
{
"over_18": true,
"removed": false
}
]
},
"subreddits": {
"description": "Activities will be counted if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"type": "array"
},
"threshold": {
"default": ">= 1",
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
"examples": [
">= 1"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
}
},
"type": "object"
},
"ActivityWindowCriteria": {
"additionalProperties": false,
"description": "Multiple properties that may be used to define what range of Activity to retrieve.\n\nMay specify one, or both properties along with the `satisfyOn` property, to affect the retrieval behavior.",
@@ -90,7 +156,7 @@
"properties": {
"aggregateOn": {
"default": "undefined",
"description": "If `domains` is not specified this list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
"examples": [
[
]
@@ -118,7 +184,7 @@
[
]
],
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nIf this is present then `aggregateOn` is ignored.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"items": {
"type": "string"
},
@@ -515,6 +581,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
@@ -588,23 +664,28 @@
"type": "object"
},
"HistoryCriteria": {
"description": "If both `submission` and `comment` are defined then criteria will only trigger if BOTH thresholds are met",
"description": "Criteria will only trigger if ALL present thresholds (comment, submission, total) are met",
"properties": {
"comment": {
"description": "A string containing a comparison operator and a value to compare comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of all Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 comments as OP\n* EX `<= 25% as OP` => Comments as OP were less then or equal to 25% of **all Comments**",
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of unfiltered Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 filtered comments as OP\n* EX `<= 25% as OP` => **Filtered** comments as OP were less then or equal to 25% of **unfiltered Comments**",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"minActivityCount": {
"default": 5,
"description": "The minimum number of activities that must exist from the `window` results for this criteria to run",
"description": "The minimum number of **filtered** activities that must exist from the `window` results for this criteria to run",
"type": "number"
},
"name": {
"type": "string"
},
"submission": {
"description": "A string containing a comparison operator and a value to compare submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 submissions\n* EX `<= 75%` => submissions are equal to or less than 75% of all Activities",
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 filtered submissions\n* EX `<= 75%` => filtered submissions are equal to or less than 75% of unfiltered Activities",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"total": {
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`) activities against\n\n**Note:** This is only useful if using `include` or `exclude` otherwise percent will always be 100% and total === activityTotal\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 filtered activities\n* EX `<= 75%` => filtered activities are equal to or less than 75% of all Activities",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
@@ -673,27 +754,51 @@
"type": "array"
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n**Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"include": {
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are found in this list of Subreddits.\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n **Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"itemIs": {
@@ -807,7 +912,7 @@
"thresholds": {
"description": "A list of subreddits/count criteria that may trigger this rule. ANY SubThreshold will trigger this rule.",
"items": {
"$ref": "#/definitions/SubThreshold"
"$ref": "#/definitions/ActivityThreshold"
},
"minItems": 1,
"type": "array"
@@ -844,6 +949,49 @@
],
"type": "object"
},
"RegExp": {
"properties": {
"dotAll": {
"type": "boolean"
},
"flags": {
"type": "string"
},
"global": {
"type": "boolean"
},
"ignoreCase": {
"type": "boolean"
},
"lastIndex": {
"type": "number"
},
"multiline": {
"type": "boolean"
},
"source": {
"type": "string"
},
"sticky": {
"type": "boolean"
},
"unicode": {
"type": "boolean"
}
},
"required": [
"dotAll",
"flags",
"global",
"ignoreCase",
"lastIndex",
"multiline",
"source",
"sticky",
"unicode"
],
"type": "object"
},
"RegexCriteria": {
"properties": {
"activityMatchThreshold": {
@@ -882,16 +1030,12 @@
"type": "string"
},
"regex": {
"description": "A valid Regular Expression to test content against\n\nDo not wrap expression in forward slashes\n\nEX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`",
"description": "A valid Regular Expression to test content against\n\nIf no flags are specified then the **global** flag is used by default",
"examples": [
"reddit|FoxxMD"
"/reddit|FoxxMD/ig"
],
"type": "string"
},
"regexFlags": {
"description": "Regex flags to use",
"type": "string"
},
"testOn": {
"default": [
"title",
@@ -1053,15 +1197,27 @@
]
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"gapAllowance": {
@@ -1069,15 +1225,27 @@
"type": "number"
},
"include": {
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"itemIs": {
@@ -1167,40 +1335,6 @@
],
"type": "object"
},
"SubThreshold": {
"additionalProperties": false,
"description": "At least one count property must be present. If both are present then either can trigger the rule",
"minProperties": 1,
"properties": {
"subreddits": {
"description": "A list of Subreddits (by name, case-insensitive) to look for.\n\nEX [\"mealtimevideos\",\"askscience\"]",
"examples": [
[
"mealtimevideos",
"askscience"
]
],
"items": {
"type": "string"
},
"minItems": 1,
"type": "array"
},
"threshold": {
"default": ">= 1",
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
"examples": [
">= 1"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
}
},
"required": [
"subreddits"
],
"type": "object"
},
"SubmissionState": {
"description": "Different attributes a `Submission` can be in. Only include a property if you want to check it.",
"examples": [
@@ -1244,6 +1378,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
@@ -1260,6 +1404,44 @@
},
"type": "object"
},
"SubredditState": {
"description": "Different attributes a `Subreddit` can be in. Only include a property if you want to check it.",
"examples": [
{
"over18": true
}
],
"properties": {
"name": {
"anyOf": [
{
"$ref": "#/definitions/RegExp"
},
{
"type": "string"
}
],
"description": "The name the subreddit.\n\nCan be a normal string (will check case-insensitive) or a regular expression\n\nEX `[\"mealtimevideos\", \"/onlyfans*\\/i\"]`",
"examples": [
"mealtimevideos",
"/onlyfans*/i"
]
},
"over18": {
"description": "Is subreddit NSFW/over 18?\n\n**Note**: This is **mod-controlled flag** so it is up to the mods of the subreddit to correctly mark their subreddit as NSFW",
"type": "boolean"
},
"quarantine": {
"description": "Is subreddit quarantined?",
"type": "boolean"
},
"stateDescription": {
"description": "A friendly description of what this State is trying to parse",
"type": "string"
}
},
"type": "object"
},
"UserNoteCriteria": {
"properties": {
"count": {

View File

@@ -5,14 +5,16 @@ import {CommentCheck} from "../Check/CommentCheck";
import {
cacheStats,
createRetryHandler,
determineNewResults, formatNumber,
mergeArr, parseFromJsonOrYamlToObject, pollingInfo, sleep, totalFromMapStats,
determineNewResults, findLastIndex, formatNumber,
mergeArr, parseFromJsonOrYamlToObject, pollingInfo, resultsSummary, sleep, totalFromMapStats, triggeredIndicator,
} from "../util";
import {Poll} from "snoostorm";
import pEvent from "p-event";
import {RuleResult} from "../Rule";
import {ConfigBuilder, buildPollingOptions} from "../ConfigBuilder";
import {
ActionedEvent,
ActionResult,
DEFAULT_POLLING_INTERVAL,
DEFAULT_POLLING_LIMIT, Invokee,
ManagerOptions, ManagerStateChangeOption, PAUSED,
@@ -36,6 +38,7 @@ import {queue, QueueObject} from 'async';
import {JSONConfig} from "../JsonConfig";
import {CheckStructuredJson} from "../Check";
import NotificationManager from "../Notification/NotificationManager";
import action from "../Web/Server/routes/authenticated/user/action";
export interface RunningState {
state: RunState,
@@ -46,6 +49,7 @@ export interface runCheckOptions {
checkNames?: string[],
delayUntil?: number,
dryRun?: boolean,
refresh?: boolean,
}
export interface CheckTask {
@@ -94,6 +98,12 @@ export interface ManagerStats {
},
}
interface QueuedIdentifier {
id: string,
shouldRefresh: boolean
state: 'queued' | 'processing'
}
export class Manager {
subreddit: Subreddit;
client: Snoowrap;
@@ -117,6 +127,15 @@ export class Manager {
globalDryRun?: boolean;
emitter: EventEmitter = new EventEmitter();
queue: QueueObject<CheckTask>;
// firehose is used to ensure all activities from different polling streams are unique
// that is -- if the same activities is in both modqueue and unmoderated we don't want to process the activity twice or use stale data
//
// so all activities get queued to firehose, it keeps track of items by id (using queuedItemsMeta)
// and ensures that if any activities are ingested while they are ALSO currently queued or working then they are properly handled by either
// 1) if queued, do not re-queue but instead tell worker to refresh before processing
// 2) if currently processing then re-queue but also refresh before processing
firehose: QueueObject<CheckTask>;
queuedItemsMeta: QueuedIdentifier[] = [];
globalMaxWorkers: number;
subMaxWorkers?: number;
@@ -165,6 +184,7 @@ export class Manager {
rulesUniqueRollingAvg: number = 0;
actionsRun: Map<string, number> = new Map();
actionsRunSinceStart: Map<string, number> = new Map();
actionedEvents: ActionedEvent[] = [];
getStats = async (): Promise<ManagerStats> => {
const data: any = {
@@ -246,6 +266,7 @@ export class Manager {
this.queue = this.generateQueue(this.getMaxWorkers(this.globalMaxWorkers));
this.queue.pause();
this.firehose = this.generateFirehose();
this.eventsSampleInterval = setInterval((function(self) {
return function() {
@@ -306,6 +327,32 @@ export class Manager {
return maxWorkers;
}
protected generateFirehose() {
return queue(async (task: CheckTask, cb) => {
// items in queuedItemsMeta will be processing FIFO so earlier elements (by index) are older
//
// if we insert the same item again because it is currently being processed AND THEN we get the item AGAIN we only want to update the newest meta
// so search the array backwards to get the neweset only
const queuedItemIndex = findLastIndex(this.queuedItemsMeta, x => x.id === task.activity.id);
if(queuedItemIndex !== -1) {
const itemMeta = this.queuedItemsMeta[queuedItemIndex];
let msg = `Item ${itemMeta.id} is already ${itemMeta.state}.`;
if(itemMeta.state === 'queued') {
this.logger.debug(`${msg} Flagging to refresh data before processing.`);
this.queuedItemsMeta.splice(queuedItemIndex, 1, {...itemMeta, shouldRefresh: true});
} else {
this.logger.debug(`${msg} Re-queuing item but will also refresh data before processing.`);
this.queuedItemsMeta.push({id: task.activity.id, shouldRefresh: true, state: 'queued'});
this.queue.push(task);
}
} else {
this.queuedItemsMeta.push({id: task.activity.id, shouldRefresh: false, state: 'queued'});
this.queue.push(task);
}
}
, 1);
}
protected generateQueue(maxWorkers: number) {
if (maxWorkers > 1) {
this.logger.warn(`Setting max queue workers above 1 (specified: ${maxWorkers}) may have detrimental effects to log readability and api usage. Consult the documentation before using this advanced/experimental feature.`);
@@ -316,7 +363,16 @@ export class Manager {
this.logger.debug(`SOFT API LIMIT MODE: Delaying Event run by ${this.delayBy} seconds`);
await sleep(this.delayBy * 1000);
}
await this.runChecks(task.checkType, task.activity, task.options);
const queuedItemIndex = this.queuedItemsMeta.findIndex(x => x.id === task.activity.id);
try {
const itemMeta = this.queuedItemsMeta[queuedItemIndex];
this.queuedItemsMeta.splice(queuedItemIndex, 1, {...itemMeta, state: 'processing'});
await this.runChecks(task.checkType, task.activity, {...task.options, refresh: itemMeta.shouldRefresh});
} finally {
// always remove item meta regardless of success or failure since we are done with it meow
this.queuedItemsMeta.splice(queuedItemIndex, 1);
}
}
, maxWorkers);
q.error((err, task) => {
@@ -512,8 +568,11 @@ export class Manager {
checkNames = [],
delayUntil,
dryRun,
refresh = false,
} = options || {};
let wasRefreshed = false;
if (delayUntil !== undefined) {
const created = dayjs.unix(item.created_utc);
const diff = dayjs().diff(created, 's');
@@ -522,8 +581,16 @@ export class Manager {
await sleep(delayUntil - diff);
// @ts-ignore
item = await activity.refresh();
wasRefreshed = true;
}
}
// refresh signal from firehose if activity was ingested multiple times before processing or re-queued while processing
// want to make sure we have the most recent data
if(!wasRefreshed && refresh === true) {
this.logger.verbose('Refreshed data (probably due to signal from firehose)');
// @ts-ignore
item = await activity.refresh();
}
const startingApiLimit = this.client.ratelimitRemaining;
@@ -540,10 +607,23 @@ export class Manager {
let checksRun = 0;
let actionsRun = 0;
let totalRulesRun = 0;
let runActions: Action[] = [];
let runActions: ActionResult[] = [];
let actionedEvent: ActionedEvent = {
subreddit: this.subreddit.display_name_prefixed,
activity: {
peek: ePeek,
link: item.permalink
},
author: item.author.name,
timestamp: Date.now(),
check: '',
ruleSummary: '',
ruleResults: [],
actionResults: [],
}
let triggered = false;
try {
let triggered = false;
for (const check of checks) {
if (checkNames.length > 0 && !checkNames.map(x => x.toLowerCase()).some(x => x === check.name.toLowerCase())) {
this.logger.warn(`Check ${check.name} not in array of requested checks to run, skipping...`);
@@ -555,14 +635,22 @@ export class Manager {
}
checksRun++;
triggered = false;
let isFromCache = false;
let currentResults: RuleResult[] = [];
try {
const [checkTriggered, checkResults] = await check.runRules(item, allRuleResults);
await check.setCacheResult(item, checkTriggered);
const [checkTriggered, checkResults, fromCache = false] = await check.runRules(item, allRuleResults);
isFromCache = fromCache;
if(!fromCache) {
await check.setCacheResult(item, {result: checkTriggered, ruleResults: checkResults});
}
currentResults = checkResults;
totalRulesRun += checkResults.length;
allRuleResults = allRuleResults.concat(determineNewResults(allRuleResults, checkResults));
triggered = checkTriggered;
if(triggered && fromCache && !check.cacheUserResult.runActions) {
this.logger.info('Check was triggered but cache result options specified NOT to run actions...counting as check NOT triggered');
triggered = false;
}
} catch (e) {
if (e.logged !== true) {
this.logger.warn(`Running rules for Check ${check.name} failed due to uncaught exception`, e);
@@ -570,13 +658,20 @@ export class Manager {
}
if (triggered) {
actionedEvent.check = check.name;
actionedEvent.ruleResults = currentResults;
if(isFromCache) {
actionedEvent.ruleSummary = `Check result was found in cache: ${triggeredIndicator(true)}`;
} else {
actionedEvent.ruleSummary = resultsSummary(currentResults, check.condition);
}
this.checksTriggered.set(check.name, (this.checksTriggered.get(check.name) || 0) + 1);
this.checksTriggeredSinceStart.set(check.name, (this.checksTriggeredSinceStart.get(check.name) || 0) + 1);
runActions = await check.runActions(item, currentResults.filter(x => x.triggered), dryRun);
actionsRun = runActions.length;
if(check.notifyOnTrigger) {
const ar = runActions.map(x => x.getActionUniqueName()).join(', ');
const ar = runActions.map(x => x.name).join(', ');
this.notificationManager.handle('eventActioned', 'Check Triggered', `Check "${check.name}" was triggered on Event: \n\n ${ePeek} \n\n with the following actions run: ${ar}`);
}
break;
@@ -606,9 +701,13 @@ export class Manager {
this.rulesTriggeredSinceStartTotal += triggeredRulesTotal;
for (const a of runActions) {
const name = a.getActionUniqueName();
const name = a.name;
this.actionsRun.set(name, (this.actionsRun.get(name) || 0) + 1);
this.actionsRunSinceStart.set(name, (this.actionsRunSinceStart.get(name) || 0) + 1)
this.actionsRunSinceStart.set(name, (this.actionsRunSinceStart.get(name) || 0) + 1);
}
actionedEvent.actionResults = runActions;
if(triggered) {
await this.resources.addActionedEvent(actionedEvent);
}
this.logger.verbose(`Run Stats: Checks ${checksRun} | Rules => Total: ${totalRulesRun} Unique: ${allRuleResults.length} Cached: ${totalRulesRun - allRuleResults.length} Rolling Avg: ~${formatNumber(this.rulesUniqueRollingAvg)}/s | Actions ${actionsRun}`);
@@ -710,7 +809,7 @@ export class Manager {
checkType = 'Comment';
}
if (checkType !== undefined) {
this.queue.push({checkType, activity: item, options: {delayUntil}})
this.firehose.push({checkType, activity: item, options: {delayUntil}})
}
};
@@ -743,14 +842,19 @@ export class Manager {
} else if (!this.validConfigLoaded) {
this.logger.warn('Cannot start activity processing queue while manager has an invalid configuration');
} else {
if(this.queueState.state === STOPPED) {
// extra precaution to make sure queue meta is cleared before starting queue
this.queuedItemsMeta = [];
}
this.queue.resume();
this.firehose.resume();
this.logger.info(`Activity processing queue started RUNNING with ${this.queue.length()} queued activities`);
this.queueState = {
state: RUNNING,
causedBy
}
if(!suppressNotification) {
this.notificationManager.handle('runStateChanged', 'Queue Started', reason, causedBy)
this.notificationManager.handle('runStateChanged', 'Queue Started', reason, causedBy);
}
}
}
@@ -812,7 +916,9 @@ export class Manager {
this.logger.verbose(`Activity processing queue is stopping...waiting for ${this.queue.running()} activities to finish processing`);
}
this.logger.info(`Activity processing queue stopped by ${causedBy} and ${this.queue.length()} queued activities cleared (waited ${dayjs().diff(pauseWaitStart, 's')} seconds while activity processing finished)`);
this.firehose.kill();
this.queue.kill();
this.queuedItemsMeta = [];
}
this.queueState = {

View File

@@ -1,4 +1,4 @@
import Snoowrap, {RedditUser} from "snoowrap";
import Snoowrap, {RedditUser, Subreddit} from "snoowrap";
import objectHash from 'object-hash';
import {
activityIsDeleted, activityIsFiltered,
@@ -8,23 +8,23 @@ import {
getAuthorActivities,
testAuthorCriteria
} from "../Utils/SnoowrapUtils";
import Subreddit from 'snoowrap/dist/objects/Subreddit';
import winston, {Logger} from "winston";
import fetch from 'node-fetch';
import {
buildCacheOptionsFromProvider,
cacheStats, createCacheManager,
formatNumber,
asSubmission,
buildCacheOptionsFromProvider, buildCachePrefix,
cacheStats, comparisonTextOp, createCacheManager,
formatNumber, getActivityAuthorName, getActivitySubredditName, isStrongSubredditState,
mergeArr,
parseExternalUrl,
parseWikiContext
parseExternalUrl, parseGenericValueComparison,
parseWikiContext, toStrongSubredditState
} from "../util";
import LoggedError from "../Utils/LoggedError";
import {
BotInstanceConfig,
CacheOptions, CommentState,
Footer, OperatorConfig, ResourceStats, SubmissionState,
SubredditCacheConfig, TTLConfig, TypedActivityStates
Footer, OperatorConfig, ResourceStats, StrongCache, SubmissionState,
CacheConfig, TTLConfig, TypedActivityStates, UserResultCache, ActionedEvent, SubredditState, StrongSubredditState
} from "../Common/interfaces";
import UserNotes from "./UserNotes";
import Mustache from "mustache";
@@ -34,11 +34,12 @@ import {SPoll} from "./Streams";
import {Cache} from 'cache-manager';
import {Submission, Comment} from "snoowrap/dist/objects";
import {cacheTTLDefaults} from "../Common/defaults";
import {check} from "tcp-port-used";
export const DEFAULT_FOOTER = '\r\n*****\r\nThis action was performed by [a bot.]({{botLink}}) Mention a moderator or [send a modmail]({{modmailLink}}) if you any ideas, questions, or concerns about this action.';
export interface SubredditResourceConfig extends Footer {
caching?: SubredditCacheConfig,
caching?: CacheConfig,
subreddit: Subreddit,
logger: Logger;
client: Snoowrap
@@ -52,19 +53,22 @@ interface SubredditResourceOptions extends Footer {
subreddit: Subreddit,
logger: Logger;
client: Snoowrap;
prefix?: string;
actionedEventsMax: number;
}
export interface SubredditResourceSetOptions extends SubredditCacheConfig, Footer {
export interface SubredditResourceSetOptions extends CacheConfig, Footer {
}
export class SubredditResources {
//enabled!: boolean;
protected useSubredditAuthorCache!: boolean;
protected authorTTL: number = cacheTTLDefaults.authorTTL;
protected wikiTTL: number = cacheTTLDefaults.wikiTTL;
protected submissionTTL: number = cacheTTLDefaults.submissionTTL;
protected commentTTL: number = cacheTTLDefaults.commentTTL;
protected filterCriteriaTTL: number = cacheTTLDefaults.filterCriteriaTTL;
protected authorTTL: number | false = cacheTTLDefaults.authorTTL;
protected subredditTTL: number | false = cacheTTLDefaults.subredditTTL;
protected wikiTTL: number | false = cacheTTLDefaults.wikiTTL;
protected submissionTTL: number | false = cacheTTLDefaults.submissionTTL;
protected commentTTL: number | false = cacheTTLDefaults.commentTTL;
protected filterCriteriaTTL: number | false = cacheTTLDefaults.filterCriteriaTTL;
name: string;
protected logger: Logger;
userNotes: UserNotes;
@@ -75,6 +79,8 @@ export class SubredditResources {
cacheType: string
cacheSettingsHash?: string;
pruneInterval?: any;
prefix?: string
actionedEventsMax: number;
stats: { cache: ResourceStats };
@@ -87,20 +93,30 @@ export class SubredditResources {
authorTTL,
wikiTTL,
filterCriteriaTTL,
submissionTTL,
commentTTL,
subredditTTL,
},
cache,
prefix,
cacheType,
actionedEventsMax,
cacheSettingsHash,
client,
} = options || {};
this.cacheSettingsHash = cacheSettingsHash;
this.cache = cache;
this.prefix = prefix;
this.client = client;
this.cacheType = cacheType;
this.authorTTL = authorTTL;
this.wikiTTL = wikiTTL;
this.filterCriteriaTTL = filterCriteriaTTL;
this.actionedEventsMax = actionedEventsMax;
this.authorTTL = authorTTL === true ? 0 : authorTTL;
this.submissionTTL = submissionTTL === true ? 0 : submissionTTL;
this.commentTTL = commentTTL === true ? 0 : commentTTL;
this.subredditTTL = subredditTTL === true ? 0 : subredditTTL;
this.wikiTTL = wikiTTL === true ? 0 : wikiTTL;
this.filterCriteriaTTL = filterCriteriaTTL === true ? 0 : filterCriteriaTTL;
this.subreddit = subreddit;
this.name = name;
if (logger === undefined) {
@@ -122,7 +138,7 @@ export class SubredditResources {
this.userNotes = new UserNotes(userNotesTTL, this.subreddit, this.logger, this.cache, cacheUseCB)
if(this.cacheType === 'memory' && this.cacheSettingsHash !== 'default') {
const min = Math.min(...([wikiTTL, authorTTL, userNotesTTL].filter(x => x !== 0)));
const min = Math.min(...([this.wikiTTL, this.authorTTL, this.submissionTTL, this.commentTTL, this.filterCriteriaTTL].filter(x => typeof x === 'number' && x !== 0) as number[]));
if(min > 0) {
// set default prune interval
this.pruneInterval = setInterval(() => {
@@ -137,6 +153,9 @@ export class SubredditResources {
async getCacheKeyCount() {
if (this.cache.store.keys !== undefined) {
if(this.cacheType === 'redis') {
return (await this.cache.store.keys(`${this.prefix}*`)).length;
}
return (await this.cache.store.keys()).length;
}
return 0;
@@ -200,16 +219,26 @@ export class SubredditResources {
this.logger = logger.child({labels: ['Resource Cache']}, mergeArr);
}
async getActionedEvents(): Promise<ActionedEvent[]> {
return await this.cache.wrap(`actionedEvents-${this.subreddit.display_name}`, () => []);
}
async addActionedEvent(ae: ActionedEvent) {
const events = await this.cache.wrap(`actionedEvents-${this.subreddit.display_name}`, () => []) as ActionedEvent[];
events.unshift(ae);
await this.cache.set(`actionedEvents-${this.subreddit.display_name}`, events.slice(0, this.actionedEventsMax), {ttl: 0});
}
async getActivity(item: Submission | Comment) {
try {
let hash = '';
if (item instanceof Submission && this.submissionTTL > 0) {
if (this.submissionTTL !== false && asSubmission(item)) {
hash = `sub-${item.name}`;
await this.stats.cache.submission.identifierRequestCount.set(hash, (await this.stats.cache.submission.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
this.stats.cache.submission.requestTimestamps.push(Date.now());
this.stats.cache.submission.requests++;
const cachedSubmission = await this.cache.get(hash);
if (cachedSubmission !== undefined) {
if (cachedSubmission !== undefined && cachedSubmission !== null) {
this.logger.debug(`Cache Hit: Submission ${item.name}`);
return cachedSubmission;
}
@@ -218,13 +247,13 @@ export class SubredditResources {
this.stats.cache.submission.miss++;
await this.cache.set(hash, submission, {ttl: this.submissionTTL});
return submission;
} else if (this.commentTTL > 0) {
} else if (this.commentTTL !== false) {
hash = `comm-${item.name}`;
await this.stats.cache.comment.identifierRequestCount.set(hash, (await this.stats.cache.comment.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
this.stats.cache.comment.requestTimestamps.push(Date.now());
this.stats.cache.comment.requests++;
const cachedComment = await this.cache.get(hash);
if (cachedComment !== undefined) {
if (cachedComment !== undefined && cachedComment !== null) {
this.logger.debug(`Cache Hit: Comment ${item.name}`);
return cachedComment;
}
@@ -243,30 +272,72 @@ export class SubredditResources {
}
}
async getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
if (this.authorTTL > 0) {
const userName = user.name;
const hashObj: any = {...options, userName};
if (this.useSubredditAuthorCache) {
hashObj.subreddit = this.name;
// @ts-ignore
async getSubreddit(item: Submission | Comment) {
try {
let hash = '';
if (this.subredditTTL !== false) {
hash = `sub-${getActivitySubredditName(item)}`;
await this.stats.cache.subreddit.identifierRequestCount.set(hash, (await this.stats.cache.subreddit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
this.stats.cache.subreddit.requestTimestamps.push(Date.now());
this.stats.cache.subreddit.requests++;
const cachedSubreddit = await this.cache.get(hash);
if (cachedSubreddit !== undefined && cachedSubreddit !== null) {
this.logger.debug(`Cache Hit: Subreddit ${item.subreddit.display_name}`);
// @ts-ignore
return cachedSubreddit as Subreddit;
}
// @ts-ignore
const subreddit = await this.client.getSubreddit(getActivitySubredditName(item)).fetch() as Subreddit;
this.stats.cache.subreddit.miss++;
// @ts-ignore
await this.cache.set(hash, subreddit, {ttl: this.subredditTTL});
// @ts-ignore
return subreddit as Subreddit;
} else {
// @ts-ignore
let subreddit = await this.client.getSubreddit(getActivitySubredditName(item));
return subreddit as Subreddit;
}
const hash = objectHash.sha1({...options, userName});
} catch (err) {
this.logger.error('Error while trying to fetch a cached activity', err);
throw err.logged;
}
}
async getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
const userName = getActivityAuthorName(user);
if (this.authorTTL !== false) {
const hashObj: any = options;
if (this.useSubredditAuthorCache) {
hashObj.subreddit = this.subreddit;
}
const hash = `authorActivities-${userName}-${options.type || 'overview'}-${objectHash.sha1(hashObj)}`;
this.stats.cache.author.requests++;
await this.stats.cache.author.identifierRequestCount.set(user.name, (await this.stats.cache.author.identifierRequestCount.wrap(user.name, () => 0) as number) + 1);
await this.stats.cache.author.identifierRequestCount.set(userName, (await this.stats.cache.author.identifierRequestCount.wrap(userName, () => 0) as number) + 1);
this.stats.cache.author.requestTimestamps.push(Date.now());
let miss = false;
const cacheVal = await this.cache.wrap(hash, async () => {
miss = true;
if(typeof user === 'string') {
// @ts-ignore
user = await this.client.getUser(userName);
}
return await getAuthorActivities(user, options);
}, {ttl: this.authorTTL});
if (!miss) {
this.logger.debug(`Cache Hit: ${userName} (${options.type || 'overview'})`);
this.logger.debug(`Cache Hit: ${userName} (Hash ${hash})`);
} else {
this.stats.cache.author.miss++;
}
return cacheVal as Array<Submission | Comment>;
}
if(typeof user === 'string') {
// @ts-ignore
user = await this.client.getUser(userName);
}
return await getAuthorActivities(user, options);
}
@@ -298,14 +369,14 @@ export class SubredditResources {
}
// try to get cached value first
let hash = `${subreddit.display_name}-${cacheKey}`;
if (this.wikiTTL > 0) {
let hash = `${subreddit.display_name}-content-${cacheKey}`;
if (this.wikiTTL !== false) {
await this.stats.cache.content.identifierRequestCount.set(cacheKey, (await this.stats.cache.content.identifierRequestCount.wrap(cacheKey, () => 0) as number) + 1);
this.stats.cache.content.requestTimestamps.push(Date.now());
this.stats.cache.content.requests++;
const cachedContent = await this.cache.get(hash);
if (cachedContent !== undefined) {
this.logger.debug(`Cache Hit: ${cacheKey}`);
if (cachedContent !== undefined && cachedContent !== null) {
this.logger.debug(`Content Cache Hit: ${cacheKey}`);
return cachedContent as string;
} else {
this.stats.cache.content.miss++;
@@ -349,17 +420,63 @@ export class SubredditResources {
}
}
if (this.wikiTTL > 0) {
if (this.wikiTTL !== false) {
this.cache.set(hash, wikiContent, {ttl: this.wikiTTL});
}
return wikiContent;
}
async testSubredditCriteria(item: (Comment | Submission), state: SubredditState | StrongSubredditState) {
if(Object.keys(state).length === 0) {
return true;
}
// optimize for name-only criteria checks
// -- we don't need to store cache results for this since we know subreddit name is always available from item (no request required)
const critCount = Object.entries(state).filter(([key, val]) => {
return val !== undefined && !['name','stateDescription'].includes(key);
}).length;
if(critCount === 0) {
const subName = getActivitySubredditName(item);
return await this.isSubreddit({display_name: subName} as Subreddit, state, this.logger);
}
if (this.filterCriteriaTTL !== false) {
try {
const hash = `subredditCrit-${getActivitySubredditName(item)}-${objectHash.sha1(state)}`;
await this.stats.cache.subredditCrit.identifierRequestCount.set(hash, (await this.stats.cache.subredditCrit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
this.stats.cache.subredditCrit.requestTimestamps.push(Date.now());
this.stats.cache.subredditCrit.requests++;
const cachedItem = await this.cache.get(hash);
if (cachedItem !== undefined && cachedItem !== null) {
this.logger.debug(`Cache Hit: Subreddit Check on ${getActivitySubredditName(item)} (Hash ${hash})`);
return cachedItem as boolean;
}
const itemResult = await this.isSubreddit(await this.getSubreddit(item), state, this.logger);
this.stats.cache.subredditCrit.miss++;
await this.cache.set(hash, itemResult, {ttl: this.filterCriteriaTTL});
return itemResult;
} catch (err) {
if (err.logged !== true) {
this.logger.error('Error occurred while testing subreddit criteria', err);
}
throw err;
}
}
return await this.isSubreddit(await this.getSubreddit(item), state, this.logger);
}
async testAuthorCriteria(item: (Comment | Submission), authorOpts: AuthorCriteria, include = true) {
if (this.filterCriteriaTTL > 0) {
const hashObj = {itemId: item.id, ...authorOpts, include};
const hash = `authorCrit-${objectHash.sha1(hashObj)}`;
if (this.filterCriteriaTTL !== false) {
// in the criteria check we only actually use the `item` to get the author flair
// which will be the same for the entire subreddit
//
// so we can create a hash only using subreddit-author-criteria
// and ignore the actual item
const hashObj = {...authorOpts, include};
const userName = getActivityAuthorName(item.author);
const hash = `authorCrit-${this.subreddit.display_name}-${userName}-${objectHash.sha1(hashObj)}`;
await this.stats.cache.authorCrit.identifierRequestCount.set(hash, (await this.stats.cache.authorCrit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
this.stats.cache.authorCrit.requestTimestamps.push(Date.now());
this.stats.cache.authorCrit.requests++;
@@ -367,9 +484,9 @@ export class SubredditResources {
const cachedAuthorTest = await this.cache.wrap(hash, async () => {
miss = true;
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
}, {ttl: this.authorTTL});
}, {ttl: this.filterCriteriaTTL});
if (!miss) {
this.logger.debug(`Cache Hit: Author Check on ${item.id}`);
this.logger.debug(`Cache Hit: Author Check on ${userName} (Hash ${hash})`);
} else {
this.stats.cache.authorCrit.miss++;
}
@@ -379,10 +496,10 @@ export class SubredditResources {
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
}
async testItemCriteria(i: (Comment | Submission), s: TypedActivityStates) {
if (this.filterCriteriaTTL > 0) {
async testItemCriteria(i: (Comment | Submission), activityStates: TypedActivityStates) {
if (this.filterCriteriaTTL !== false) {
let item = i;
let states = s;
let states = activityStates;
// optimize for submission only checks on comment item
if (item instanceof Comment && states.length === 1 && Object.keys(states[0]).length === 1 && (states[0] as CommentState).submissionState !== undefined) {
// @ts-ignore
@@ -392,19 +509,18 @@ export class SubredditResources {
states = (states[0] as CommentState).submissionState as SubmissionState[];
}
try {
const hashObj = {itemId: item.name, ...states};
const hash = `itemCrit-${objectHash.sha1(hashObj)}`;
const hash = `itemCrit-${item.name}-${objectHash.sha1(states)}`;
await this.stats.cache.itemCrit.identifierRequestCount.set(hash, (await this.stats.cache.itemCrit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
this.stats.cache.itemCrit.requestTimestamps.push(Date.now());
this.stats.cache.itemCrit.requests++;
const cachedItem = await this.cache.get(hash);
if (cachedItem !== undefined) {
this.logger.debug(`Cache Hit: Item Check on ${item.name}`);
if (cachedItem !== undefined && cachedItem !== null) {
this.logger.debug(`Cache Hit: Item Check on ${item.name} (Hash ${hash})`);
return cachedItem as boolean;
}
const itemResult = await this.isItem(item, states, this.logger);
this.stats.cache.itemCrit.miss++;
const res = await this.cache.set(hash, itemResult, {ttl: this.filterCriteriaTTL});
await this.cache.set(hash, itemResult, {ttl: this.filterCriteriaTTL});
return itemResult;
} catch (err) {
if (err.logged !== true) {
@@ -414,7 +530,50 @@ export class SubredditResources {
}
}
return await this.isItem(i, s, this.logger);
return await this.isItem(i, activityStates, this.logger);
}
async isSubreddit (subreddit: Subreddit, stateCriteria: SubredditState | StrongSubredditState, logger: Logger) {
delete stateCriteria.stateDescription;
if (Object.keys(stateCriteria).length === 0) {
return true;
}
const crit = isStrongSubredditState(stateCriteria) ? stateCriteria : toStrongSubredditState(stateCriteria, {defaultFlags: 'i'});
const log = logger.child({leaf: 'Subreddit Check'}, mergeArr);
return await (async () => {
for (const k of Object.keys(crit)) {
// @ts-ignore
if (crit[k] !== undefined) {
switch (k) {
case 'name':
const nameReg = crit[k] as RegExp;
if(!nameReg.test(subreddit.display_name)) {
return false;
}
break;
default:
// @ts-ignore
if (crit[k] !== undefined) {
// @ts-ignore
if (crit[k] !== subreddit[k]) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${subreddit[k]}`)
return false
}
} else {
log.warn(`Tried to test for Subreddit property '${k}' but it did not exist`);
}
break;
}
}
}
log.debug(`Passed: ${JSON.stringify(stateCriteria)}`);
return true;
})() as boolean;
}
async isItem (item: Submission | Comment, stateCriteria: TypedActivityStates, logger: Logger) {
@@ -446,6 +605,22 @@ export class SubredditResources {
return false;
}
break;
case 'score':
const scoreCompare = parseGenericValueComparison(crit[k] as string);
if(!comparisonTextOp(item.score, scoreCompare.operator, scoreCompare.value)) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${item.score}`)
return false
}
break;
case 'reports':
const reportCompare = parseGenericValueComparison(crit[k] as string);
if(!comparisonTextOp(item.num_reports, reportCompare.operator, reportCompare.value)) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${item.num_reports}`)
return false
}
break;
case 'removed':
const removed = activityIsRemoved(item);
if (removed !== crit['removed']) {
@@ -514,36 +689,26 @@ export class SubredditResources {
return false
}
async getCommentCheckCacheResult(item: Comment, checkConfig: object): Promise<boolean | undefined> {
const criteria = {
author: item.author.name,
submission: item.link_id,
...checkConfig
}
const hash = objectHash.sha1(criteria);
async getCommentCheckCacheResult(item: Comment, checkConfig: object): Promise<UserResultCache | undefined> {
const userName = getActivityAuthorName(item.author);
const hash = `commentUserResult-${userName}-${item.link_id}-${objectHash.sha1(checkConfig)}`;
this.stats.cache.commentCheck.requests++;
const result = await this.cache.get(hash) as boolean | undefined;
let result = await this.cache.get(hash) as UserResultCache | undefined | null;
if(result === null) {
result = undefined;
}
if(result === undefined) {
this.stats.cache.commentCheck.miss++;
}
this.logger.debug(`Cache Hit: Comment Check for ${item.author.name} in Submission ${item.link_id}`);
this.logger.debug(`Cache Hit: Comment Check for ${userName} in Submission ${item.link_id} (Hash ${hash})`);
return result;
}
async setCommentCheckCacheResult(item: Comment, checkConfig: object, result: boolean, ttl: number) {
const criteria = {
author: item.author.name,
submission: item.link_id,
...checkConfig
}
const hash = objectHash.sha1(criteria);
// don't set if result is already cached
if(undefined !== await this.cache.get(hash)) {
this.logger.debug(`Check result already cached for User ${item.author.name} on Submission ${item.link_id}`);
} else {
await this.cache.set(hash, result, { ttl });
this.logger.debug(`Cached check result '${result}' for User ${item.author.name} on Submission ${item.link_id} for ${ttl} seconds`);
}
async setCommentCheckCacheResult(item: Comment, checkConfig: object, result: UserResultCache, ttl: number) {
const userName = getActivityAuthorName(item.author);
const hash = `commentUserResult-${userName}-${item.link_id}-${objectHash.sha1(checkConfig)}`
await this.cache.set(hash, result, { ttl });
this.logger.debug(`Cached check result '${result.result}' for User ${userName} on Submission ${item.link_id} for ${ttl} seconds (Hash ${hash})`);
}
async generateFooter(item: Submission | Comment, actionFooter?: false | string) {
@@ -566,9 +731,12 @@ export class BotResourcesManager {
enabled: boolean = true;
modStreams: Map<string, SPoll<Snoowrap.Submission | Snoowrap.Comment>> = new Map();
defaultCache: Cache;
defaultCacheConfig: StrongCache
cacheType: string = 'none';
cacheHash: string;
ttlDefaults: Required<TTLConfig>;
actionedEventsMaxDefault?: number;
actionedEventsDefault: number;
pruneInterval: any;
constructor(config: BotInstanceConfig) {
@@ -579,19 +747,29 @@ export class BotResourcesManager {
wikiTTL,
commentTTL,
submissionTTL,
subredditTTL,
filterCriteriaTTL,
provider,
actionedEventsMax,
actionedEventsDefault,
},
name,
credentials,
caching,
} = config;
this.cacheHash = objectHash.sha1(caching);
this.ttlDefaults = {authorTTL, userNotesTTL, wikiTTL, commentTTL, submissionTTL, filterCriteriaTTL};
caching.provider.prefix = buildCachePrefix([caching.provider.prefix, 'SHARED']);
const {actionedEventsMax: eMax, actionedEventsDefault: eDef, ...relevantCacheSettings} = caching;
this.cacheHash = objectHash.sha1(relevantCacheSettings);
this.defaultCacheConfig = caching;
this.ttlDefaults = {authorTTL, userNotesTTL, wikiTTL, commentTTL, submissionTTL, filterCriteriaTTL, subredditTTL};
const options = provider;
this.cacheType = options.store;
this.actionedEventsMaxDefault = actionedEventsMax;
this.actionedEventsDefault = actionedEventsDefault;
this.defaultCache = createCacheManager(options);
if (this.cacheType === 'memory') {
const min = Math.min(...([this.ttlDefaults.wikiTTL, this.ttlDefaults.authorTTL, this.ttlDefaults.userNotesTTL].filter(x => x !== 0)));
const min = Math.min(...([this.ttlDefaults.wikiTTL, this.ttlDefaults.authorTTL, this.ttlDefaults.userNotesTTL].filter(x => typeof x === 'number' && x !== 0) as number[]));
if (min > 0) {
// set default prune interval
this.pruneInterval = setInterval(() => {
@@ -622,11 +800,13 @@ export class BotResourcesManager {
cacheType: this.cacheType,
cacheSettingsHash: hash,
ttl: this.ttlDefaults,
prefix: this.defaultCacheConfig.provider.prefix,
actionedEventsMax: this.actionedEventsMaxDefault !== undefined ? Math.min(this.actionedEventsDefault, this.actionedEventsMaxDefault) : this.actionedEventsDefault,
...init,
};
if(caching !== undefined) {
const {provider = 'memory', ...rest} = caching;
const {provider = this.defaultCacheConfig.provider, actionedEventsMax = this.actionedEventsDefault, ...rest} = caching;
let cacheConfig = {
provider: buildCacheOptionsFromProvider(provider),
ttl: {
@@ -638,10 +818,16 @@ export class BotResourcesManager {
// only need to create private if there settings are actually different than the default
if(hash !== this.cacheHash) {
const {provider: trueProvider, ...trueRest} = cacheConfig;
const defaultPrefix = trueProvider.prefix;
const subPrefix = defaultPrefix === this.defaultCacheConfig.provider.prefix ? buildCachePrefix([(defaultPrefix !== undefined ? defaultPrefix.replace('SHARED', '') : defaultPrefix), subName]) : trueProvider.prefix;
trueProvider.prefix = subPrefix;
const eventsMax = this.actionedEventsMaxDefault !== undefined ? Math.min(actionedEventsMax, this.actionedEventsMaxDefault) : actionedEventsMax;
opts = {
cache: createCacheManager(trueProvider),
actionedEventsMax: eventsMax,
cacheType: trueProvider.store,
cacheSettingsHash: hash,
prefix: subPrefix,
...init,
...trueRest,
};

View File

@@ -1,6 +1,13 @@
import dayjs, {Dayjs} from "dayjs";
import {Comment, RedditUser, WikiPage} from "snoowrap";
import {COMMENT_URL_ID, deflateUserNotes, inflateUserNotes, parseLinkIdentifier, SUBMISSION_URL_ID} from "../util";
import {
COMMENT_URL_ID,
deflateUserNotes, getActivityAuthorName,
inflateUserNotes,
isScopeError,
parseLinkIdentifier,
SUBMISSION_URL_ID
} from "../util";
import Subreddit from "snoowrap/dist/objects/Subreddit";
import {Logger} from "winston";
import LoggedError from "../Utils/LoggedError";
@@ -48,7 +55,7 @@ export interface RawNote {
export type UserNotesConstants = Pick<any, "users" | "warnings">;
export class UserNotes {
notesTTL: number;
notesTTL: number | false;
subreddit: Subreddit;
wiki: WikiPage;
moderators?: RedditUser[];
@@ -63,8 +70,8 @@ export class UserNotes {
debounceCB: any;
batchCount: number = 0;
constructor(ttl: number, subreddit: Subreddit, logger: Logger, cache: Cache, cacheCB: Function) {
this.notesTTL = ttl;
constructor(ttl: number | boolean, subreddit: Subreddit, logger: Logger, cache: Cache, cacheCB: Function) {
this.notesTTL = ttl === true ? 0 : ttl;
this.subreddit = subreddit;
this.logger = logger;
this.wiki = subreddit.getWikiPage('usernotes');
@@ -74,10 +81,11 @@ export class UserNotes {
}
async getUserNotes(user: RedditUser): Promise<UserNote[]> {
const userName = getActivityAuthorName(user);
let notes: UserNote[] | undefined = [];
if (this.users !== undefined) {
notes = this.users.get(user.name);
notes = this.users.get(userName);
if (notes !== undefined) {
this.logger.debug('Returned cached notes');
return notes;
@@ -85,7 +93,7 @@ export class UserNotes {
}
const payload = await this.retrieveData();
const rawNotes = payload.blob[user.name];
const rawNotes = payload.blob[userName];
if (rawNotes !== undefined) {
if (this.moderators === undefined) {
this.moderators = await this.subreddit.getModerators();
@@ -94,7 +102,7 @@ export class UserNotes {
// sort in ascending order by time
notes.sort((a, b) => a.time.isBefore(b.time) ? -1 : 1);
if (this.notesTTL > 0 && this.cache !== undefined) {
this.users.set(user.name, notes);
this.users.set(userName, notes);
}
return notes;
} else {
@@ -105,6 +113,7 @@ export class UserNotes {
async addUserNote(item: (Submission|Comment), type: string | number, text: string = ''): Promise<UserNote>
{
const payload = await this.retrieveData();
const userName = getActivityAuthorName(item.author);
// idgaf
// @ts-ignore
@@ -120,16 +129,16 @@ export class UserNotes {
}
const newNote = new UserNote(dayjs(), text, mod, type, `https://reddit.com${item.permalink}`);
if(payload.blob[item.author.name] === undefined) {
payload.blob[item.author.name] = {ns: []};
if(payload.blob[userName] === undefined) {
payload.blob[userName] = {ns: []};
}
payload.blob[item.author.name].ns.push(newNote.toRaw(payload.constants));
payload.blob[userName].ns.push(newNote.toRaw(payload.constants));
await this.saveData(payload);
if(this.notesTTL > 0) {
const currNotes = this.users.get(item.author.name) || [];
const currNotes = this.users.get(userName) || [];
currNotes.push(newNote);
this.users.set(item.author.name, currNotes);
this.users.set(userName, currNotes);
}
return newNote;
}
@@ -144,7 +153,7 @@ export class UserNotes {
let cacheMiss;
if (this.notesTTL > 0) {
const cachedPayload = await this.cache.get(this.identifier);
if (cachedPayload !== undefined) {
if (cachedPayload !== undefined && cachedPayload !== null) {
this.cacheCB(false);
return cachedPayload as unknown as RawUserNotesPayload;
}
@@ -153,14 +162,15 @@ export class UserNotes {
}
try {
if(cacheMiss && this.debounceCB !== undefined) {
// timeout is still delayed. its our wiki data and we want it now! cm cacheworth 877 cache now
this.logger.debug(`Detected missed cache on usernotes retrieval while batch (${this.batchCount}) save is in progress, executing save immediately before retrieving new notes...`);
clearTimeout(this.saveDebounce);
await this.debounceCB();
this.debounceCB = undefined;
this.saveDebounce = undefined;
}
// DISABLED for now because I think its causing issues
// if(cacheMiss && this.debounceCB !== undefined) {
// // timeout is still delayed. its our wiki data and we want it now! cm cacheworth 877 cache now
// this.logger.debug(`Detected missed cache on usernotes retrieval while batch (${this.batchCount}) save is in progress, executing save immediately before retrieving new notes...`);
// clearTimeout(this.saveDebounce);
// await this.debounceCB();
// this.debounceCB = undefined;
// this.saveDebounce = undefined;
// }
// @ts-ignore
this.wiki = await this.subreddit.getWikiPage('usernotes').fetch();
const wikiContent = this.wiki.content_md;
@@ -169,7 +179,7 @@ export class UserNotes {
userNotes.blob = inflateUserNotes(userNotes.blob);
if (this.notesTTL > 0) {
if (this.notesTTL !== false) {
await this.cache.set(`${this.subreddit.display_name}-usernotes`, userNotes, {ttl: this.notesTTL});
this.users = new Map();
}
@@ -187,30 +197,36 @@ export class UserNotes {
const blob = deflateUserNotes(payload.blob);
const wikiPayload = {text: JSON.stringify({...payload, blob}), reason: 'ContextBot edited usernotes'};
try {
if (this.notesTTL > 0) {
if (this.notesTTL !== false) {
// DISABLED for now because if it fails throws an uncaught rejection
// and need to figured out how to handle this other than just logging (want to interrupt action flow too?)
//
// debounce usernote save by 5 seconds -- effectively batch usernote saves
//
// so that if we are processing a ton of checks that write user notes we aren't calling to save the wiki page on every call
// since we also have everything in cache (most likely...)
//
// TODO might want to increase timeout to 10 seconds
if(this.saveDebounce !== undefined) {
clearTimeout(this.saveDebounce);
}
this.debounceCB = (async function () {
const p = wikiPayload;
// @ts-ignore
const self = this as UserNotes;
// @ts-ignore
self.wiki = await self.subreddit.getWikiPage('usernotes').edit(p);
self.logger.debug(`Batch saved ${self.batchCount} usernotes`);
self.debounceCB = undefined;
self.saveDebounce = undefined;
self.batchCount = 0;
}).bind(this);
this.saveDebounce = setTimeout(this.debounceCB,5000);
this.batchCount++;
this.logger.debug(`Saving Usernotes has been debounced for 5 seconds (${this.batchCount} batched)`)
// if(this.saveDebounce !== undefined) {
// clearTimeout(this.saveDebounce);
// }
// this.debounceCB = (async function () {
// const p = wikiPayload;
// // @ts-ignore
// const self = this as UserNotes;
// // @ts-ignore
// self.wiki = await self.subreddit.getWikiPage('usernotes').edit(p);
// self.logger.debug(`Batch saved ${self.batchCount} usernotes`);
// self.debounceCB = undefined;
// self.saveDebounce = undefined;
// self.batchCount = 0;
// }).bind(this);
// this.saveDebounce = setTimeout(this.debounceCB,5000);
// this.batchCount++;
// this.logger.debug(`Saving Usernotes has been debounced for 5 seconds (${this.batchCount} batched)`)
// @ts-ignore
await this.subreddit.getWikiPage('usernotes').edit(wikiPayload);
await this.cache.set(this.identifier, payload, {ttl: this.notesTTL});
this.users = new Map();
} else {
@@ -220,7 +236,13 @@ export class UserNotes {
return payload as RawUserNotesPayload;
} catch (err) {
const msg = `Could not edit usernotes. Make sure at least one moderator has used toolbox and usernotes before and that this account has editing permissions`;
let msg = 'Could not edit usernotes.';
// Make sure at least one moderator has used toolbox and usernotes before and that this account has editing permissions`;
if(isScopeError(err)) {
msg = `${msg} The bot account did not have sufficient OAUTH scope to perform this action. You must re-authenticate the bot and ensure it has has 'wikiedit' permissions.`
} else {
msg = `${msg} Make sure at least one moderator has used toolbox, created a usernote, and that this account has editing permissions for the wiki page.`;
}
this.logger.error(msg, err);
throw new LoggedError(msg);
}

View File

@@ -13,10 +13,16 @@ import {
TypedActivityStates
} from "../Common/interfaces";
import {
compareDurationValue, comparisonTextOp,
compareDurationValue,
comparisonTextOp,
isActivityWindowCriteria,
normalizeName, parseDuration,
parseDurationComparison, parseGenericValueComparison, parseGenericValueOrPercentComparison, parseSubredditName,
normalizeName,
parseDuration,
parseDurationComparison,
parseGenericValueComparison,
parseGenericValueOrPercentComparison,
parseRuleResultsToMarkdownSummary,
parseSubredditName,
truncateStringToLength
} from "../util";
import UserNotes from "../Subreddit/UserNotes";
@@ -305,7 +311,7 @@ export const renderContent = async (template: string, data: (Submission | Commen
};
}, {});
const view = {item: templateData, rules: normalizedRuleResults};
const view = {item: templateData, ruleSummary: parseRuleResultsToMarkdownSummary(ruleResults), rules: normalizedRuleResults};
const rendered = Mustache.render(template, view) as string;
return he.decode(rendered);
}
@@ -504,7 +510,8 @@ export const itemContentPeek = async (item: (Comment | Submission), peekLength =
peek = `${truncatePeek(item.title)} by ${author} https://reddit.com${item.permalink}`;
} else if (item instanceof Comment) {
content = truncatePeek(item.body);
// replace newlines with spaces to make peek more compact
content = truncatePeek(item.body.replaceAll('\n', ' '));
peek = `${truncatePeek(content)} by ${author} in https://reddit.com${item.permalink}`;
}
@@ -606,6 +613,9 @@ export const getAttributionIdentifier = (sub: Submission, useParentMediaDomain =
if (displayDomain === '') {
displayDomain = domain;
}
if(domainIdents.length === 0 && domain !== '') {
domainIdents.push(domain);
}
return {display: displayDomain, domain, aliases: domainIdents, provider, mediaType};
}

View File

@@ -8,12 +8,13 @@ import passport from 'passport';
import {Strategy as CustomStrategy} from 'passport-custom';
import {OperatorConfig, BotConnection, LogInfo} from "../../Common/interfaces";
import {
buildCachePrefix,
createCacheManager, filterLogBySubreddit,
formatLogLineToHtml,
intersect, isLogLineMinLevel,
LogEntry, parseFromJsonOrYamlToObject, parseInstanceLogInfoName, parseInstanceLogName,
parseSubredditLogName, permissions,
randomId, sleep
randomId, sleep, triggeredIndicator
} from "../../util";
import {Cache} from "cache-manager";
import session, {Session, SessionData} from "express-session";
@@ -42,6 +43,7 @@ import {booleanMiddle} from "../Common/middleware";
import {BotInstance, CMInstance} from "../interfaces";
import { URL } from "url";
import {MESSAGE} from "triple-beam";
import Autolinker from "autolinker";
const emitter = new EventEmitter();
@@ -119,9 +121,16 @@ const webClient = async (options: OperatorConfig) => {
},
web: {
port,
caching,
caching: {
prefix
},
invites: {
maxAge: invitesMaxAge,
},
session: {
provider,
secret,
maxAge: sessionMaxAge,
},
maxLogs,
clients,
@@ -134,8 +143,6 @@ const webClient = async (options: OperatorConfig) => {
},
} = options;
const connectedUsers: ConnectUserObj = {};
const webOps = operators.map(x => x.toLowerCase());
const logger = getLogger({defaultLabel: 'Web', ...options.logging}, 'Web');
@@ -159,11 +166,15 @@ const webClient = async (options: OperatorConfig) => {
throw new SimpleError(`Specified port for web interface (${port}) is in use or not available. Cannot start web server.`);
}
if (provider.store === 'none') {
logger.warn(`Cannot use 'none' for session store or else no one can use the interface...falling back to 'memory'`);
provider.store = 'memory';
if (caching.store === 'none') {
logger.warn(`Cannot use 'none' for web caching or else no one can use the interface...falling back to 'memory'`);
caching.store = 'memory';
}
//const webCache = createCacheManager(provider) as Cache;
//const webCachePrefix = buildCachePrefix([prefix, 'web']);
const webCache = createCacheManager({...caching, prefix: buildCachePrefix([prefix, 'web'])}) as Cache;
//const previousSessions = await webCache.get
const connectedUsers: ConnectUserObj = {};
//<editor-fold desc=Session and Auth>
/*
@@ -217,9 +228,9 @@ const webClient = async (options: OperatorConfig) => {
const sessionObj = session({
cookie: {
maxAge: provider.ttl,
maxAge: sessionMaxAge * 1000,
},
store: new CacheManagerStore(createCacheManager(provider) as Cache),
store: new CacheManagerStore(webCache, {prefix: 'sess:'}),
resave: false,
saveUninitialized: false,
secret,
@@ -279,7 +290,7 @@ const webClient = async (options: OperatorConfig) => {
return res.render('error', {error: errContent});
}
// @ts-ignore
const invite = invites.get(req.session.inviteId) as inviteData;
const invite = await webCache.get(`invite:${req.session.inviteId}`) as InviteData;
const client = await Snoowrap.fromAuthCode({
userAgent: `web:contextBot:web`,
clientId: invite.clientId,
@@ -290,7 +301,7 @@ const webClient = async (options: OperatorConfig) => {
// @ts-ignore
const user = await client.getMe();
// @ts-ignore
invites.delete(req.session.inviteId);
await webCache.del(`invite:${req.session.inviteId}`);
let data: any = {
accessToken: client.accessToken,
refreshToken: client.refreshToken,
@@ -346,7 +357,7 @@ const webClient = async (options: OperatorConfig) => {
});
let token = randomId();
interface inviteData {
interface InviteData {
permissions: string[],
subreddit?: string,
instance?: string,
@@ -355,7 +366,6 @@ const webClient = async (options: OperatorConfig) => {
redirectUri: string
creator: string
}
const invites: Map<string, inviteData> = new Map();
const helperAuthed = async (req: express.Request, res: express.Response, next: Function) => {
@@ -386,14 +396,14 @@ const webClient = async (options: OperatorConfig) => {
});
});
app.getAsync('/auth/invite', (req, res) => {
app.getAsync('/auth/invite', async (req, res) => {
const {invite: inviteId} = req.query;
if(inviteId === undefined) {
return res.render('error', {error: '`invite` param is missing from URL'});
}
const invite = invites.get(inviteId as string);
if(invite === undefined) {
const invite = await webCache.get(`invite:${inviteId}`) as InviteData | undefined | null;
if(invite === undefined || invite === null) {
return res.render('error', {error: 'Invite with the given id does not exist'});
}
@@ -429,7 +439,7 @@ const webClient = async (options: OperatorConfig) => {
}
const inviteId = code || randomId();
invites.set(inviteId, {
await webCache.set(`invite:${inviteId}`, {
permissions,
clientId: (ci || clientId).trim(),
clientSecret: (ce || clientSecret).trim(),
@@ -437,7 +447,7 @@ const webClient = async (options: OperatorConfig) => {
instance,
subreddit,
creator: (req.user as Express.User).name,
});
}, {ttl: invitesMaxAge * 1000});
return res.send(inviteId);
});
@@ -446,8 +456,8 @@ const webClient = async (options: OperatorConfig) => {
if(inviteId === undefined) {
return res.render('error', {error: '`invite` param is missing from URL'});
}
const invite = invites.get(inviteId as string);
if(invite === undefined) {
const invite = await webCache.get(`invite:${inviteId}`) as InviteData | undefined | null;
if(invite === undefined || invite === null) {
return res.render('error', {error: 'Invite with the given id does not exist'});
}
@@ -643,9 +653,9 @@ const webClient = async (options: OperatorConfig) => {
req.session.level = 'verbose';
req.session.sort = 'descending';
req.session.save();
// @ts-ignore
connectedUsers[req.session.id] = {};
}
// @ts-ignore
connectedUsers[req.session.id] = {};
next();
}
@@ -822,6 +832,73 @@ const webClient = async (options: OperatorConfig) => {
return res.send(resp);
});
app.getAsync('/events', [ensureAuthenticatedApi, defaultSession, instanceWithPermissions, botWithPermissions, createUserToken], async (req: express.Request, res: express.Response) => {
const {subreddit} = req.query as any;
const resp = await got.get(`${(req.instance as CMInstance).normalUrl}/events`, {
headers: {
'Authorization': `Bearer ${req.token}`,
},
searchParams: {
subreddit,
bot: req.bot?.botName
}
}).json() as [any];
return res.render('events', {
data: resp.map((x) => {
const {timestamp, activity: {peek, link}, ruleResults = [], actionResults = [], ...rest} = x;
const time = dayjs(timestamp).local().format('YY-MM-DD HH:mm:ss z');
const formattedPeek = Autolinker.link(peek, {
email: false,
phone: false,
mention: false,
hashtag: false,
stripPrefix: false,
sanitizeHtml: true,
});
const formattedRuleResults = ruleResults.map((y: any) => {
const {triggered, result, ...restY} = y;
let t = triggeredIndicator(false);
if(triggered === null) {
t = 'Skipped';
} else if(triggered === true) {
t = triggeredIndicator(true);
}
return {
...restY,
triggered: t,
result: result || '-'
};
});
const formattedActionResults = actionResults.map((y: any) => {
const {run, runReason, success, result, dryRun, ...restA} = y;
let res = '';
if(!run) {
res = `Not Run - ${runReason === undefined ? '(No Reason)' : runReason}`;
} else {
res = `${triggeredIndicator(success)}${result !== undefined ? ` - ${result}` : ''}`;
}
return {
...restA,
dryRun: dryRun ? ' (DRYRUN)' : '',
result: res
};
});
return {
...rest,
timestamp: time,
activity: {
link,
peek: formattedPeek,
},
ruleResults: formattedRuleResults,
actionResults: formattedActionResults
}
}),
title: `${subreddit !== undefined ? `${subreddit} ` : ''}Actioned Events`
});
});
app.getAsync('/logs/settings/update',[ensureAuthenticated], async (req: express.Request, res: express.Response) => {
const e = req.query;
for (const [setting, val] of Object.entries(req.query)) {

View File

@@ -33,7 +33,7 @@ const managerStats: ManagerStats = {
rulesRunSinceStartTotal: 0,
rulesRunTotal: 0,
rulesTriggeredSinceStartTotal: 0,
rulesTriggeredTotal: 0
rulesTriggeredTotal: 0,
};
const botStats: BotStats = {
apiAvg: '-',

View File

@@ -31,3 +31,26 @@ export const botRoute = (required = true) => async (req: Request, res: Response,
}
return next();
}
export const subredditRoute = (required = true) => async (req: Request, res: Response, next: Function) => {
const bot = req.serverBot;
const {subreddit} = req.query as any;
if(subreddit === undefined && required === false) {
next();
} else {
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
if (!isOperator && !realManagers.includes(subreddit)) {
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
}
const manager = bot.subManagers.find(x => x.displayLabel === subreddit);
if (manager === undefined) {
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
}
req.manager = manager;
next();
}
}

View File

@@ -67,7 +67,7 @@ const action = async (req: express.Request, res: express.Response) => {
if (type === 'unmoderated') {
const activities = await manager.subreddit.getUnmoderated({limit: 100});
for (const a of activities.reverse()) {
await manager.queue.push({
await manager.firehose.push({
checkType: a instanceof Submission ? 'Submission' : 'Comment',
activity: a,
});
@@ -75,7 +75,7 @@ const action = async (req: express.Request, res: express.Response) => {
} else {
const activities = await manager.subreddit.getModqueue({limit: 100});
for (const a of activities.reverse()) {
await manager.queue.push({
await manager.firehose.push({
checkType: a instanceof Submission ? 'Submission' : 'Comment',
activity: a,
});

View File

@@ -1,31 +1,51 @@
import {Request, Response} from 'express';
import {authUserCheck, botRoute} from "../../../middleware";
import {authUserCheck, botRoute, subredditRoute} from "../../../middleware";
import Submission from "snoowrap/dist/objects/Submission";
import winston from 'winston';
import {COMMENT_URL_ID, parseLinkIdentifier, SUBMISSION_URL_ID} from "../../../../../util";
import {booleanMiddle} from "../../../../Common/middleware";
import {Manager} from "../../../../../Subreddit/Manager";
import {ActionedEvent} from "../../../../../Common/interfaces";
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);
const config = async (req: Request, res: Response) => {
const bot = req.serverBot;
const {subreddit} = req.query as any;
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
if (!isOperator && !realManagers.includes(subreddit)) {
return res.status(400).send('Cannot retrieve config for subreddit you do not manage or is not run by the bot')
}
const manager = bot.subManagers.find(x => x.displayLabel === subreddit);
if (manager === undefined) {
return res.status(400).send('Cannot retrieve config for subreddit you do not manage or is not run by the bot')
}
const manager = req.manager as Manager;
// @ts-ignore
const wiki = await manager.subreddit.getWikiPage(manager.wikiLocation).fetch();
return res.send(wiki.content_md);
};
export const configRoute = [authUserCheck(), botRoute(), config];
export const configRoute = [authUserCheck(), botRoute(), subredditRoute(), config];
const actionedEvents = async (req: Request, res: Response) => {
let managers: Manager[] = [];
const manager = req.manager as Manager | undefined;
if(manager !== undefined) {
managers.push(manager);
} else {
for(const manager of req.serverBot.subManagers) {
if((req.user?.realManagers as string[]).includes(manager.displayLabel)) {
managers.push(manager);
}
}
}
let events: ActionedEvent[] = [];
for(const m of managers) {
if(m.resources !== undefined) {
events = events.concat(await m.resources.getActionedEvents());
}
}
events.sort((a, b) => b.timestamp - a.timestamp);
return res.json(events);
};
export const actionedEventsRoute = [authUserCheck(), botRoute(), subredditRoute(false), actionedEvents];
const action = async (req: Request, res: Response) => {
const bot = req.serverBot;

View File

@@ -221,7 +221,8 @@ const status = () => {
}, cumRaw);
const cacheReq = subManagerData.reduce((acc, curr) => acc + curr.stats.cache.totalRequests, 0);
const cacheMiss = subManagerData.reduce((acc, curr) => acc + curr.stats.cache.totalMiss, 0);
const aManagerWithDefaultResources = bot.subManagers.find(x => x.resources !== undefined && x.resources.cacheSettingsHash === 'default');
const sharedSub = subManagerData.find(x => x.stats.cache.isShared);
const sharedCount = sharedSub !== undefined ? sharedSub.stats.cache.currentKeyCount : 0;
let allManagerData: any = {
name: 'All',
status: bot.running ? 'RUNNING' : 'NOT RUNNING',
@@ -243,7 +244,7 @@ const status = () => {
stats: {
...rest,
cache: {
currentKeyCount: aManagerWithDefaultResources !== undefined ? await aManagerWithDefaultResources.resources.getCacheKeyCount() : 'N/A',
currentKeyCount: sharedCount + subManagerData.reduce((acc, curr) => curr.stats.cache.isShared ? acc : acc + curr.stats.cache.currentKeyCount,0),
isShared: false,
totalRequests: cacheReq,
totalMiss: cacheMiss,

View File

@@ -22,7 +22,7 @@ import SimpleError from "../../Utils/SimpleError";
import {heartbeat} from "./routes/authenticated/applicationRoutes";
import logs from "./routes/authenticated/user/logs";
import status from './routes/authenticated/user/status';
import {actionRoute, configRoute} from "./routes/authenticated/user";
import {actionedEventsRoute, actionRoute, configRoute} from "./routes/authenticated/user";
import action from "./routes/authenticated/user/action";
import {authUserCheck, botRoute} from "./middleware";
import {opStats} from "../Common/util";
@@ -189,6 +189,8 @@ const rcbServer = async function (options: OperatorConfig) {
server.getAsync('/config', ...configRoute);
server.getAsync('/events', ...actionedEventsRoute);
server.getAsync('/action', ...action);
server.getAsync('/check', ...actionRoute);

View File

@@ -22,6 +22,7 @@
<div class="container mx-auto">
<div class="grid">
<div class="dark:text-white mb-3 pl-2">
Schema <a href="/config?schema=subreddit" id="subredditSchemaType">Subreddit</a> / <a href="/config?schema=operator" id="operatorSchemaType">Operator</a> |
<span class="has-tooltip">
<span style="z-index:999; margin-top: 30px;" class='tooltip rounded shadow-lg p-3 bg-gray-100 text-black space-y-2'>
<div>Copy + paste your configuration here to get:</div>
@@ -39,7 +40,6 @@
</ul>
<div>When done editing hit Ctrl+A (Command+A on macOS) to select all text, then copy + paste back into your wiki/file</div>
</span>
<span id="schemaType"></span> |
<span class="cursor-help">
How To Use
<span>
@@ -55,7 +55,7 @@
</span>
</span>
</span>
| <a id="schemaOpen" href="">Open With Operator Schema</a>
| <input id="configUrl" class="text-black placeholder-gray-500 rounded mx-2" style="min-width:400px;" placeholder="URL of a config to load"/> <a href="#" id="loadConfig">Load</a>
<div id="error" class="font-semibold"></div>
</div>
<div style="min-height: 80vh" id="editor"></div>
@@ -104,22 +104,22 @@
var searchParams = new URLSearchParams(window.location.search);
let schemaType;
let schemaFile;
if(searchParams.get('schema') === 'operator') {
schemaType = 'OperatorConfig.json';
schemaType = 'operator';
schemaFile = 'OperatorConfig.json';
preamble.push('// automatic validation of your OPERATOR configuration');
document.querySelector('#schemaTypeList').innerHTML = 'automatic validation of your OPERATOR configuration (yellow squiggly)';
document.querySelector('#schemaType').innerHTML = 'Operator Configuration';
document.querySelector('#schemaOpen').href = '/config?schema=subreddit';
document.querySelector('#schemaOpen').innerHTML = 'Open with Subreddit Schema';
document.querySelector('#operatorSchemaType').classList.add('font-bold', 'no-underline', 'pointer-events-none');
} else {
schemaType = 'App.json';
schemaType = 'subreddit';
schemaFile = 'App.json';
preamble.push('// automatic validation of your SUBREDDIT configuration');
document.querySelector('#schemaTypeList').innerHTML = 'automatic validation of your SUBREDDIT configuration (yellow squiggly)'
document.querySelector('#schemaType').innerHTML = 'Subreddit Configuration';
document.querySelector('#schemaOpen').href = '/config?schema=operator';
document.querySelector('#subredditSchemaType').classList.add('font-bold', 'no-underline', 'pointer-events-none');
}
const schemaUri = `${document.location.origin}/schemas/${schemaType}`;
const schemaUri = `${document.location.origin}/schemas/${schemaFile}`;
require(['vs/editor/editor.main'], function () {
const modelUri = monaco.Uri.parse("a://b/foo.json");
@@ -135,15 +135,44 @@
schema: schemaData
}]
});
if(searchParams.get('subreddit') !== null) {
fetch(`${document.location.origin}/config/content${document.location.search}`).then((resp) => {
var model = monaco.editor.createModel(preamble.join('\r\n'), "json", modelUri);
document.querySelector('#loadConfig').addEventListener('click', (e) => {
e.preventDefault();
const newUrl = document.querySelector('#configUrl').value;
fetch(newUrl).then((resp) => {
if(!resp.ok) {
resp.text().then(data => {
document.querySelector('#error').innerHTML = `Error occurred while fetching configuration => ${data}`
});
} else {
var sp = new URLSearchParams();
sp.append('schema', schemaType);
sp.append('url', newUrl);
history.pushState(null, '', `${window.location.pathname}?${sp.toString()}`);
resp.text().then(data => {
//model = monaco.editor.createModel(data, "json", modelUri);
model.setValue(data);
})
}
});
});
let dlUrl = searchParams.get('url');
if(dlUrl === null && searchParams.get('subreddit') !== null) {
dlUrl = `${document.location.origin}/config/content${document.location.search}`
}
if(dlUrl !== null) {
document.querySelector('#configUrl').value = dlUrl;
fetch(dlUrl).then((resp) => {
if(!resp.ok) {
resp.text().then(data => {
document.querySelector('#error').innerHTML = `Error occurred while fetching configuration => ${data}`
});
} else {
resp.text().then(data => {
var model = monaco.editor.createModel(data, "json", modelUri);
model.setValue(data);
//model = monaco.editor.createModel(data, "json", modelUri);
var editor = monaco.editor.create(document.getElementById('editor'), {
model,
theme: 'vs-dark',
@@ -154,9 +183,8 @@
editor;
})
}
})
});
} else {
var model = monaco.editor.createModel(preamble.join('\r\n'), "json", modelUri);
var editor = monaco.editor.create(document.getElementById('editor'), {
model,
theme: 'vs-dark',

View File

@@ -0,0 +1,100 @@
<html>
<head>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/tailwindcss/2.0.3/tailwind.min.css"
integrity="sha512-wl80ucxCRpLkfaCnbM88y4AxnutbGk327762eM9E/rRTvY/ZGAHWMZrYUq66VQBYMIYDFpDdJAOGSLyIPHZ2IQ=="
crossorigin="anonymous"/>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/tailwindcss/2.0.3/tailwind-dark.min.css"
integrity="sha512-WvyKyiVHgInX5UQt67447ExtRRZG/8GUijaq1MpqTNYp8wY4/EJOG5bI80sRp/5crDy4Z6bBUydZI2OFV3Vbtg=="
crossorigin="anonymous"/>
<script src="https://code.iconify.design/1/1.0.4/iconify.min.js"></script>
<link rel="stylesheet" href="/public/themeToggle.css">
<link rel="stylesheet" href="/public/app.css">
<title><%= title %></title>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width,initial-scale=1.0">
<!--icons from https://heroicons.com -->
<style>
.peek a {
display: none;
}
</style>
</head>
<body>
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<%- include('partials/title') %>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white px-3 py-6 space-y-3">
<% if(data.length === 0) { %>
No events have been actioned yet!
<% } %>
<% data.forEach(function (eRes){ %>
<div class="shadow-lg">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 dark:bg-gray-700 dark:text-white">
<div class="flex items-center justify-between">
<div>
<span class="peek"><%- eRes.activity.peek %></span><a target="_blank" href="https://reddit.com<%= eRes.activity.link%>">(Link)</a>
</div>
<div class="flex items-center flex-end">
<%= eRes.subreddit %> @ <%= eRes.timestamp %>
</div>
</div>
</div>
<div class="p-4 pl-6 pt-3 space-y-2">
<div><span class="font-semibold">Check:</span> <%= eRes.check %><span class="px-3">&#10132;</span><%= eRes.ruleSummary %></div>
<div>
<span class="font-semibold">Rules:</span>
<ul class="list-inside list-disc">
<% eRes.ruleResults.forEach(function (ruleResult) { %>
<li><%= ruleResult.name %> - <%= ruleResult.triggered%> - <%= ruleResult.result %></li>
<% }) %>
</ul>
</div>
<div><span class="font-semibold">Actions</span>
<ul class="list-inside list-disc">
<% eRes.actionResults.forEach(function (aRes) { %>
<li><%= aRes.name %><%= aRes.dryRun %> - <%= aRes.result %></li>
<% }) %>
</ul>
</div>
</div>
</div>
<% }) %>
</div>
</div>
</div>
<%- include('partials/footer') %>
</div>
<script>
document.querySelectorAll('.theme').forEach(el => {
el.addEventListener('click', e => {
e.preventDefault();
if (e.target.id === 'dark') {
document.body.classList.add('dark');
localStorage.setItem('ms-dark', 'yes');
} else {
document.body.classList.remove('dark');
localStorage.setItem('ms-dark', 'no');
}
document.querySelectorAll('.theme').forEach(el => {
el.classList.remove('font-bold', 'no-underline', 'pointer-events-none');
});
e.target.classList.add('font-bold', 'no-underline', 'pointer-events-none');
})
})
document.querySelector("#themeToggle").checked = localStorage.getItem('ms-dark') !== 'no';
document.querySelector("#themeToggle").onchange = (e) => {
if (e.target.checked === true) {
document.body.classList.add('dark');
localStorage.setItem('ms-dark', 'yes');
} else {
document.body.classList.remove('dark');
localStorage.setItem('ms-dark', 'no');
}
}
</script>
</body>
</html>

View File

@@ -335,14 +335,19 @@
<div data-subreddit="<%= data.name %>"
class="stats botStats reloadStats">
<label>Events</label>
<span><%= data.stats.eventsCheckedSinceStartTotal === undefined ? '-' : data.stats.eventsCheckedSinceStartTotal %></span>
<span>
<%= data.stats.eventsCheckedSinceStartTotal === undefined ? '-' : data.stats.eventsCheckedSinceStartTotal %>
</span>
<label>Checks</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.checksTriggeredSinceStartTotal %></span> Triggered / <span><%= data.stats.checksRunSinceStartTotal %></span> Run
</span>
<span><%= data.stats.checksTriggeredSinceStartTotal %></span> T / <span><%= data.stats.checksRunSinceStartTotal %></span> R
<% if (data.name !== 'All') { %>
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>&subreddit=<%= data.name %>" class="underline" style="text-decoration-style: dotted"><%= data.stats.checksTriggeredSinceStartTotal %> T</a>
<% } else { %>
<%= data.stats.checksTriggeredSinceStartTotal %> T
<% } %>/ <span><%= data.stats.checksRunSinceStartTotal %></span> R
</span>
<label>Rules</label>
@@ -360,14 +365,15 @@
<% } %>
<div data-subreddit="<%= data.name %>" class="stats botStats allStats">
<label>Events</label>
<span><%= data.stats.eventsCheckedTotal %></span>
<span>
<%= data.stats.eventsCheckedTotal %>
</span>
<label>Checks</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.checksTriggeredTotal %></span> Triggered / <span><%= data.stats.checksRunTotal %></span> Run
</span>
<span class="cursor-help"><span><%= data.stats.checksTriggeredTotal %></span> T / <span><%= data.stats.checksRunTotal %></span> R</span>
<span><%= data.stats.checksTriggeredTotal %> T / <span><%= data.stats.checksRunTotal %></span> R</span>
</span>
<label>Rules</label>
@@ -379,7 +385,11 @@
</span>
<label>Actions</label>
<span><%= data.stats.actionsRunTotal %> Run</span>
<% if (data.name !== 'All') { %>
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>&subreddit=<%= data.name %>" class="underline" style="text-decoration-style: dotted"><%= data.stats.actionsRunTotal %> Run</a>
<% } else { %>
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>" class="underline" style="text-decoration-style: dotted"><%= data.stats.actionsRunTotal %> Run</a>
<% } %>
</div>
</div>
<div>
@@ -632,7 +642,7 @@
const url = urlInput.value;
const dryRun = dryRunCheck.checked ? 1 : 0;
const fetchUrl = `/api/check?instanceId=<%= instanceId %>%bot=${bot}&url=${url}&dryRun=${dryRun}&subreddit=${subreddit}`;
const fetchUrl = `/api/check?instance=<%= instanceId %>&bot=${bot}&url=${url}&dryRun=${dryRun}&subreddit=${subreddit}`;
fetch(fetchUrl);
urlInput.value = '';

View File

@@ -1,6 +1,7 @@
import {App} from "../../../App";
import Bot from "../../../Bot";
import {BotInstance, CMInstance} from "../../interfaces";
import {Manager} from "../../../Subreddit/Manager";
declare global {
declare namespace Express {
@@ -10,6 +11,7 @@ declare global {
instance?: CMInstance,
bot?: BotInstance,
serverBot: Bot,
manager?: Manager,
}
interface User {
name: string

View File

@@ -2,6 +2,8 @@ import winston from 'winston';
import 'winston-daily-rotate-file';
import dayjs from 'dayjs';
import utc from 'dayjs/plugin/utc.js';
import advancedFormat from 'dayjs/plugin/advancedFormat';
import tz from 'dayjs/plugin/timezone';
import dduration from 'dayjs/plugin/duration.js';
import relTime from 'dayjs/plugin/relativeTime.js';
import sameafter from 'dayjs/plugin/isSameOrAfter.js';
@@ -20,7 +22,7 @@ import {App} from "./App";
import apiServer from './Web/Server/server';
import clientServer from './Web/Client';
import Submission from "snoowrap/dist/objects/Submission";
import {COMMENT_URL_ID, parseLinkIdentifier, SUBMISSION_URL_ID} from "./util";
import {COMMENT_URL_ID, isScopeError, parseLinkIdentifier, SUBMISSION_URL_ID} from "./util";
import LoggedError from "./Utils/LoggedError";
import {buildOperatorConfigWithDefaults, parseOperatorConfigFromSources} from "./ConfigBuilder";
import {getLogger} from "./Utils/loggerFactory";
@@ -31,6 +33,8 @@ dayjs.extend(dduration);
dayjs.extend(relTime);
dayjs.extend(sameafter);
dayjs.extend(samebefore);
dayjs.extend(tz);
dayjs.extend(advancedFormat);
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);
@@ -182,7 +186,7 @@ const program = new Command();
for(const manager of b.subManagers) {
const activities = await manager.subreddit.getUnmoderated();
for (const a of activities.reverse()) {
manager.queue.push({
manager.firehose.push({
checkType: a instanceof Submission ? 'Submission' : 'Comment',
activity: a,
options: {checkNames: checks}
@@ -197,11 +201,8 @@ const program = new Command();
} catch (err) {
if (!err.logged && !(err instanceof LoggedError)) {
const logger = winston.loggers.get('app');
if (err.name === 'StatusCodeError' && err.response !== undefined) {
const authHeader = err.response.headers['www-authenticate'];
if (authHeader !== undefined && authHeader.includes('insufficient_scope')) {
logger.error('Reddit responded with a 403 insufficient_scope, did you choose the correct scopes?');
}
if(isScopeError(err)) {
logger.error('Reddit responded with a 403 insufficient_scope which means the bot is lacking necessary OAUTH scopes to perform general actions.');
}
logger.error(err);
}

View File

@@ -1,7 +1,7 @@
import winston, {Logger} from "winston";
import jsonStringify from 'safe-stable-stringify';
import dayjs, {Dayjs, OpUnitType} from 'dayjs';
import {isRuleSetResult, RulePremise, RuleResult, RuleSetResult} from "./Rule";
import {FormattedRuleResult, isRuleSetResult, RulePremise, RuleResult, RuleSetResult} from "./Rule";
import deepEqual from "fast-deep-equal";
import {Duration} from 'dayjs/plugin/duration.js';
import Ajv from "ajv";
@@ -13,8 +13,8 @@ import {
ActivityWindowCriteria, CacheOptions, CacheProvider,
DurationComparison,
GenericComparison, LogInfo, NamedGroup,
PollingOptionsStrong, RegExResult, ResourceStats,
StringOperator
PollingOptionsStrong, RedditEntity, RedditEntityType, RegExResult, ResourceStats,
StringOperator, StrongSubredditState, SubredditState
} from "./Common/interfaces";
import JSON5 from "json5";
import yaml, {JSON_SCHEMA} from "js-yaml";
@@ -28,6 +28,10 @@ import crypto from "crypto";
import Autolinker from 'autolinker';
import {create as createMemoryStore} from './Utils/memoryStore';
import {MESSAGE} from "triple-beam";
import {RedditUser} from "snoowrap/dist/objects";
import reRegExp from '@stdlib/regexp-regexp';
const ReReg = reRegExp();
const {format} = winston;
const {combine, printf, timestamp, label, splat, errors} = format;
@@ -569,6 +573,24 @@ export const parseSubredditName = (val:string): string => {
return matches[1] as string;
}
export const REDDIT_ENTITY_REGEX: RegExp = /^\s*(?<entityType>\/[ru]\/|[ru]\/)*(?<name>\w+)*\s*$/;
export const REDDIT_ENTITY_REGEX_URL = 'https://regexr.com/65r9b';
export const parseRedditEntity = (val:string): RedditEntity => {
const matches = val.match(REDDIT_ENTITY_REGEX);
if (matches === null) {
throw new InvalidRegexError(REDDIT_ENTITY_REGEX, val, REDDIT_ENTITY_REGEX_URL)
}
const groups = matches.groups as any;
let eType: RedditEntityType = 'user';
if(groups.entityType !== undefined && typeof groups.entityType === 'string' && groups.entityType.includes('r')) {
eType = 'subreddit';
}
return {
name: groups.name,
type: eType,
}
}
const WIKI_REGEX: RegExp = /^\s*wiki:(?<url>[^|]+)\|*(?<subreddit>[^\s]*)\s*$/;
const WIKI_REGEX_URL = 'https://regexr.com/61bq1';
const URL_REGEX: RegExp = /^\s*url:(?<url>[^\s]+)\s*$/;
@@ -824,12 +846,22 @@ export const isRedditMedia = (act: Submission): boolean => {
}
export const isExternalUrlSubmission = (act: Comment | Submission): boolean => {
return act instanceof Submission && !act.is_self && !isRedditMedia(act);
return asSubmission(act) && !act.is_self && !isRedditMedia(act);
}
export const parseRegex = (r: string | RegExp, val: string, flags?: string): RegExResult => {
export const parseStringToRegex = (val: string, defaultFlags?: string): RegExp | undefined => {
const result = ReReg.exec(val);
if (result === null) {
return undefined;
}
// index 0 => full string
// index 1 => regex without flags and forward slashes
// index 2 => flags
const flags = result[2] === '' ? (defaultFlags || '') : result[2];
return new RegExp(result[1], flags);
}
const reg = r instanceof RegExp ? r : new RegExp(r, flags);
export const parseRegex = (reg: RegExp, val: string): RegExResult => {
if(reg.global) {
const g = Array.from(val.matchAll(reg));
@@ -855,12 +887,59 @@ export const parseRegex = (r: string | RegExp, val: string, flags?: string): Reg
}
}
export async function readJson(path: string, opts: any) {
export const isStrongSubredditState = (value: SubredditState | StrongSubredditState) => {
return value.name === undefined || value.name instanceof RegExp;
}
export const asStrongSubredditState = (value: any): value is StrongSubredditState => {
return isStrongSubredditState(value);
}
export interface StrongSubredditStateOptions {
defaultFlags?: string
generateDescription?: boolean
}
export const toStrongSubredditState = (s: SubredditState, opts?: StrongSubredditStateOptions): StrongSubredditState => {
const {defaultFlags, generateDescription = false} = opts || {};
const {name: nameVal, stateDescription} = s;
let nameReg: RegExp | undefined;
if (nameVal !== undefined) {
if (!(nameVal instanceof RegExp)) {
nameReg = parseStringToRegex(nameVal, defaultFlags);
if (nameReg === undefined) {
nameReg = new RegExp(parseSubredditName(nameVal), defaultFlags);
}
} else {
nameReg = nameVal;
}
}
const strongState = {
...s,
name: nameReg
};
if (generateDescription && stateDescription === undefined) {
strongState.stateDescription = objectToStringSummary(strongState);
}
return strongState;
}
export async function readConfigFile(path: string, opts: any) {
const {log, throwOnNotFound = true} = opts;
try {
await promises.access(path, constants.R_OK);
const data = await promises.readFile(path);
return JSON.parse(data as unknown as string);
const [configObj, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(data as unknown as string);
if(configObj !== undefined) {
return configObj as object;
}
log.error(`Could not parse wiki page contents as JSON or YAML:`);
log.error(jsonErr);
log.error(yamlErr);
throw new SimpleError('Could not parse wiki page contents as JSON or YAML');
} catch (e) {
const {code} = e;
if (code === 'ENOENT') {
@@ -924,10 +1003,12 @@ export const cacheStats = (): ResourceStats => {
author: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
authorCrit: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
itemCrit: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
subredditCrit: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
content: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
userNotes: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
submission: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
comment: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
subreddit: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
commentCheck: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0}
};
}
@@ -947,7 +1028,7 @@ export const buildCacheOptionsFromProvider = (provider: CacheProvider | any): Ca
}
export const createCacheManager = (options: CacheOptions): Cache => {
const {store, max, ttl = 60, host = 'localhost', port, auth_pass, db} = options;
const {store, max, ttl = 60, host = 'localhost', port, auth_pass, db, ...rest} = options;
switch (store) {
case 'none':
return cacheManager.caching({store: 'none', max, ttl});
@@ -958,7 +1039,8 @@ export const createCacheManager = (options: CacheOptions): Cache => {
port,
auth_pass,
db,
ttl
ttl,
...rest,
});
case 'memory':
default:
@@ -984,3 +1066,91 @@ export const snooLogWrapper = (logger: Logger) => {
trace: (...args: any[]) => logger.debug(args.slice(0, 2).join(' '), [args.slice(2)]),
}
}
export const isScopeError = (err: any): boolean => {
if(typeof err === 'object' && err.name === 'StatusCodeError' && err.response !== undefined) {
const authHeader = err.response.headers['www-authenticate'];
return authHeader !== undefined && authHeader.includes('insufficient_scope');
}
return false;
}
/**
* Cached activities lose type information when deserialized so need to check properties as well to see if the object is the shape of a Submission
* */
export const isSubmission = (value: any) => {
return value instanceof Submission || value.domain !== undefined;
}
export const asSubmission = (value: any): value is Submission => {
return isSubmission(value);
}
/**
* Serialized activities store subreddit and user properties as their string representations (instead of proxy)
* */
export const getActivitySubredditName = (activity: any): string => {
if(typeof activity.subreddit === 'string') {
return activity.subreddit;
}
return activity.subreddit.display_name;
}
/**
* Serialized activities store subreddit and user properties as their string representations (instead of proxy)
* */
export const getActivityAuthorName = (author: RedditUser | string): string => {
if(typeof author === 'string') {
return author;
}
return author.name;
}
export const buildCachePrefix = (parts: any[]): string => {
const prefix = parts.filter(x => typeof x === 'string' && x !== '').map(x => x.trim()).map(x => x.split(':')).flat().filter(x => x !== '').join(':')
if(prefix !== '') {
return `${prefix}:`;
}
return prefix;
}
export const objectToStringSummary = (obj: object): string => {
const parts = [];
for(const [key, val] of Object.entries(obj)) {
parts.push(`${key}: ${val}`);
}
return parts.join(' | ');
}
/**
* Returns the index of the last element in the array where predicate is true, and -1
* otherwise.
* @param array The source array to search in
* @param predicate find calls predicate once for each element of the array, in descending
* order, until it finds one where predicate returns true. If such an element is found,
* findLastIndex immediately returns that element index. Otherwise, findLastIndex returns -1.
*
* @see https://stackoverflow.com/a/53187807/1469797
*/
export function findLastIndex<T>(array: Array<T>, predicate: (value: T, index: number, obj: T[]) => boolean): number {
let l = array.length;
while (l--) {
if (predicate(array[l], l, array))
return l;
}
return -1;
}
export const parseRuleResultsToMarkdownSummary = (ruleResults: RuleResult[]): string => {
const results = ruleResults.map((y: any) => {
const {triggered, result, name, ...restY} = y;
let t = triggeredIndicator(false);
if(triggered === null) {
t = 'Skipped';
} else if(triggered === true) {
t = triggeredIndicator(true);
}
return `* ${name} - ${t} - ${result || '-'}`;
});
return results.join('\r\n');
}