Compare commits

..

239 Commits
0.1 ... 0.5.0

Author SHA1 Message Date
FoxxMD
fb3047ca82 Remove error wording (may be normal shutdown) 2021-07-27 13:08:56 -04:00
FoxxMD
193ecfba2f Fix truthy condition for notifyOnTrigger 2021-07-27 12:56:53 -04:00
FoxxMD
ef3475e519 Add a blank notification manager for initial manager creation 2021-07-27 12:42:31 -04:00
FoxxMD
a003e18360 Add log statement when sending notification 2021-07-27 12:33:27 -04:00
FoxxMD
6b6124d76e Implement event hooks for notifications
* Implemented a generic notification manager with several event types
* Implemented discord notification provider
2021-07-27 12:24:36 -04:00
FoxxMD
e4f18e8f06 Fix cache call request rate when over 10 minutes 2021-07-26 18:45:53 -04:00
FoxxMD
24963ec333 More op doc fixes 2021-07-26 17:10:12 -04:00
FoxxMD
2ab6ee3715 Typo fix 2021-07-26 17:07:07 -04:00
FoxxMD
802884f686 Add operator configuration documentation 2021-07-26 17:06:02 -04:00
FoxxMD
67ed8ab4ee Add more caching info and tooltips for major components 2021-07-26 14:47:58 -04:00
FoxxMD
1e6d61ac31 Add 401 http status to retry handler
Seen this occasionally on polling and is actually a reddit issue, not client, so should be able to retry on it
2021-07-26 12:20:10 -04:00
FoxxMD
7cda47183f Simplify cli options and improve web server checks
* Check for in-use web port before starting so we can display a useful error message
* Remove 'web' command and instead make it an optional argument for `run` (default to web)
* Update dockerfile to use run command
* Remove remaining commander option defaults since they are defined in config builder now
2021-07-26 12:18:51 -04:00
FoxxMD
a9edd4b998 Fix miss percent formatting 2021-07-26 09:55:03 -04:00
FoxxMD
9e1d5f1dd4 Ui polish
* Add more caching information
* Add more visual distinction for selected subreddit
* Add querystring based on shown subreddit so view persists between reload
2021-07-23 17:03:30 -04:00
FoxxMD
4617e06459 Fix missing await on usernote cache call 2021-07-23 16:44:21 -04:00
FoxxMD
b2b4988246 Consolidate all caching
* Use cache manager instance in UserNotes and callback to report stats
* Simplify sub resource config interface and setter
* Use cache manager for express session
* Add session cache provider to operator config
2021-07-23 14:48:33 -04:00
FoxxMD
e1c24133dd Use name instead of user id for banning 2021-07-23 13:26:53 -04:00
FoxxMD
12a4e0436e Simplify resource stats generation 2021-07-23 11:46:37 -04:00
FoxxMD
484931d8b5 Fix bool middleware running on wrong route 2021-07-23 11:10:53 -04:00
FoxxMD
abf2674f80 Detect noisy html errors from reddit 2021-07-22 21:37:43 -04:00
FoxxMD
1f3cfbeef9 Fix cache miss value comparison 2021-07-22 21:37:08 -04:00
FoxxMD
2b21885a55 Fix default cache stuff when subreddit manager isn't built 2021-07-22 17:59:45 -04:00
FoxxMD
232925e691 Maybe fix error on ban 2021-07-22 17:53:47 -04:00
FoxxMD
a91b9ab146 Use more robust caching implementation
* use node-cache-manager so operator has a choice of memory or redis
* BC update TTL values to be in seconds instead of milliseconds
* Count requests and misses for cache
* display cache stats in ui
2021-07-22 17:47:19 -04:00
FoxxMD
73c3052c69 Refactor how configuration is parsed
* Implement a weak/strong interface for operator (app) config
* Consolidate config parsing code into ConfigBuilder
* Implement parsing configuration from a json file
* Refactor configuration parsing so there is a clear hierarchy to where and how config is overridden
2021-07-22 15:17:41 -04:00
FoxxMD
4fbb3edf8b Lots of regex fixes 2021-07-21 16:50:49 -04:00
FoxxMD
c69d66c001 Initial implementation of Regex Rule 2021-07-21 15:47:26 -04:00
FoxxMD
9b53974152 Fix formatting for rolling average 2021-07-21 12:14:28 -04:00
FoxxMD
13d3ed2314 Improve repeat pattern detection for repeat activity rule
* Fuzzy match on all identifiers to enable detecting small variations in title/body
* For reddit media submissions (image/video) check title instead of url
* For submissions with external url also check title
2021-07-21 12:09:51 -04:00
FoxxMD
9d7505fa38 More UI improvements
* Move theme toggle to title to make room in the subreddit tabs when there are many, many subreddits
* Implement check permalink with subreddit context -- can run any permalink on a subreddit's config
2021-07-21 12:07:37 -04:00
FoxxMD
1b94316987 Fix log level for api nanny 2021-07-20 20:30:06 -04:00
FoxxMD
9316019b01 Implement api nanny mode to help with heavy api usage 2021-07-20 20:15:15 -04:00
FoxxMD
4642f67104 Implement loading operator arguments (main program) from file 2021-07-20 16:38:35 -04:00
FoxxMD
a78692d7a0 Even more polish 2021-07-20 13:11:37 -04:00
FoxxMD
3627a5f60a More UI polish 2021-07-20 12:48:32 -04:00
FoxxMD
6b04ea0a9d Compact stats/info 2021-07-20 00:57:47 -04:00
FoxxMD
f6217547ae Some ui cleanup 2021-07-20 00:23:48 -04:00
FoxxMD
f1b24eb4a2 Add unmoderated/modqueue check actions to ui 2021-07-19 23:16:27 -04:00
FoxxMD
c9bdae66dd Implement configurable shared modqueue for operator convenience
If operator is running subreddits for lop-sided traffic burdens then /r/mod may be over the 100 activity limit, all from subreddit X while sub Y and Z have few activities. In this scenario Y and Z would never run since X would take up all results. Therefore default to individual modqueues and make shared modqueue opt-in by operator
2021-07-19 23:04:47 -04:00
FoxxMD
6ab162888b try-catch all the things 2021-07-19 22:37:50 -04:00
FoxxMD
dd202ac790 Add console to error/rejection transports 2021-07-19 22:37:39 -04:00
FoxxMD
eead88c9a7 Remove secondary client usage which was causing bug
For some reason using client *again* here was causing the primary client usage to lose response data. I think perhaps it needs to be cloned or need to create a new instance all-together rather than trying to re-use.
2021-07-19 22:37:22 -04:00
FoxxMD
f548be8060 Fix "All" actions type 2021-07-19 16:44:03 -04:00
FoxxMD
5df4fd5ccc Separate event polling and activity processing systems to improve robustness
* Use async/queue for handling activity processing on each manager
* Refactor polling to push activities into activity queue
* Refactor manager state to control activity processing queue and event polling independently
* Pause activity processing and wait until no work is being done to do config update
* Add way more logging for new systems
* Add basic ui controls/view for new systems
2021-07-19 16:32:41 -04:00
FoxxMD
b25001b7af Reduce false positives for repeat self/comment activities
* For self submissions use "title + body" to identify repeats instead of just body -- body could potentially be empty
* Add "minWordCount" with default of 1 so that blank comments are ignored
2021-07-18 21:30:44 -04:00
FoxxMD
8733717cda Remove submission title on comment item peek
Erroneously returning proxy object instead of actual submission. Dont' really need the title since its in the permalink so remove it entirely to avoid another api call
2021-07-18 21:05:56 -04:00
FoxxMD
6167d45e49 Refactor configuration loading to be independent of manager initialization
* Makes it easier to deal with never-successful subs (from startup) and invalid configs during run
* Paves the way for adding managers in-situ
* Add validConfig property to manager to track this and in UI
* Track if user manually stopped manager so we don't try to restart on heartbeat
2021-07-16 16:57:25 -04:00
FoxxMD
694842226b Cover case where user declines oauth 2021-07-16 15:35:25 -04:00
FoxxMD
02ad661226 Cover case where outside user logs in 2021-07-16 15:18:38 -04:00
FoxxMD
3be62f5560 Use partials for rendering head/title 2021-07-16 15:18:01 -04:00
FoxxMD
eb84df5133 Set dark theme as default 2021-07-16 12:12:17 -04:00
FoxxMD
4b11e348ba Update some logging to be more subreddit specific 2021-07-16 12:08:19 -04:00
FoxxMD
9edacf29fa Refactor log tracking to handle limit's per suberddit
Makes the 'limit' option in UI more true -- keep track of $max number of logs per subreddit and then combine at render time
2021-07-16 11:50:34 -04:00
FoxxMD
56c13474d9 Cover edge case where logs don't exist for a subreddit 2021-07-16 10:15:47 -04:00
FoxxMD
66a4144b7b retry on all stream types 2021-07-16 10:11:17 -04:00
FoxxMD
16880775fb Increase max polling retries
If reddit has a general outage this is where we want to wait the longest before giving up entirely since polling is infrequent (relative to running checks)
2021-07-16 10:11:03 -04:00
FoxxMD
d69d0e972c End stream when error occurs
Stream should also stop polling. It's up to the retry handler and individual usage of stream in app/manager to restart it
2021-07-16 10:10:18 -04:00
FoxxMD
a9350c2828 Fix retry handler and broaden request conditions
* fix other retry compare statement
* check request for status code missing (timeout, socket timeout, address unavailable, etc.) or valid status code
* clarify wording in retry handler logging
2021-07-16 10:09:38 -04:00
FoxxMD
2fe06f21d9 Fix stats margin 2021-07-15 23:04:09 -04:00
FoxxMD
42d71a918f Implement URL checker 2021-07-15 22:58:20 -04:00
FoxxMD
0aa2b24f39 Implement actions 2021-07-15 20:22:27 -04:00
FoxxMD
4771efa32a Add status indicator to subreddit tab name 2021-07-15 20:01:07 -04:00
FoxxMD
1d9f4f32b8 Implement logout and reduce footprint of light dark toggle 2021-07-15 19:54:12 -04:00
FoxxMD
d84e6f1905 Implement oauth helper ui 2021-07-15 19:34:01 -04:00
FoxxMD
ae19d1c9a1 Fix log check 2021-07-15 17:10:31 -04:00
FoxxMD
f9c7cf433e Switch to web as default run command and provide fallback
* Use 'web' as run command for dockerfile
* If on 'web' there is no redirect uri fallback to cli (doesn't break compatibility this way)
2021-07-15 15:44:47 -04:00
FoxxMD
2917233728 More startup logging and better logging on invalid/missing credentials 2021-07-15 15:31:56 -04:00
FoxxMD
6dfb5823ba Add web ui port as configurable 2021-07-15 15:02:41 -04:00
FoxxMD
14e7275f64 Implement operator-specific UI components
* Add api limit, reset, and heartbeat interval to All overview when operator is viewing
* Stream op stats on log emit
* Add env/arg for setting "Operated by"
2021-07-15 14:57:05 -04:00
FoxxMD
1fbe6b708d More layout improvements 2021-07-15 13:37:05 -04:00
FoxxMD
495213bba9 Improvements to web ui
* Implement stats tracking per manager
* Render stats per sub and overall in ui
* Clean up ui layout
2021-07-15 12:54:32 -04:00
FoxxMD
15e031d448 Implement web ui
* Refactor program options to allow running as web
* Implement authentication using reddit oauth
* Use session in memory to store identification and accessible subreddits
* Implement socket.io with shared session to enable streaming logs
* Implement log streaming with per-subreddit views
2021-07-15 01:18:21 -04:00
FoxxMD
6994bbe770 Remove errenous delayUntil default 2021-07-13 14:32:46 -04:00
FoxxMD
a3c923bda6 Fix passing dryrun arg 2021-07-13 10:31:29 -04:00
FoxxMD
a40c4c5e58 Fix erroneous debug change for heartbeat interval 2021-07-12 13:45:19 -04:00
FoxxMD
be9dcdee1c Refactor polling streams to reduce usage and improve error catching
* Re-implement all polling streams with extended poll class for catching errors
* Increase default polling interval to 30 seconds and limit to 50
* When using default interval/limit for mod polling use a shared stream between all managers and poll to /r/mod -- reduces api calls for polling mod streams to 1 regardless of how many subreddits are managed
2021-07-12 13:44:07 -04:00
FoxxMD
07b34caffb Add proxy options and handle polling errors
* Implement proxied snoowrap requests
* Extend snoostorm class so timeout/errors during polling can be caught
2021-07-12 10:01:53 -04:00
FoxxMD
c5a3404242 Remove erroneous debugging addition 2021-07-08 14:13:02 -04:00
FoxxMD
1e03b38f0a Add polling info on options build 2021-07-07 17:19:56 -04:00
FoxxMD
f64be77e70 Change log level of processing delay statement 2021-07-07 17:02:00 -04:00
FoxxMD
a3da77874b Ignore doc updates for docker 2021-07-07 16:42:06 -04:00
FoxxMD
a9f740c9fa Fix example links 2021-07-07 16:40:45 -04:00
FoxxMD
00e6346cdb Fix wording 2021-07-07 16:38:55 -04:00
FoxxMD
951359ac39 Add tuning documentation for repeat activity 2021-07-07 16:10:24 -04:00
FoxxMD
15824e5d0f Implement delay before processing
Using criteria for Activity being N seconds old allows for delay with possibility of immediate processing to avoid api call

Closes #23
2021-07-07 15:23:59 -04:00
FoxxMD
e7c794ec85 Fix activities example 2021-07-07 13:16:04 -04:00
FoxxMD
70e426de7e Fix dajys link 2021-07-07 13:14:27 -04:00
FoxxMD
cc2518d086 Refactor documentation and ENHANCE 2021-07-07 13:06:51 -04:00
FoxxMD
5517c75d4c Update examples 2021-07-07 09:44:12 -04:00
FoxxMD
8e2fee6d50 Fix heartbeat interval 2021-07-06 16:52:45 -04:00
FoxxMD
ed8be6dda2 Refactor app/manager building for in-situ updates
* Separate manager instantiation from configuration flow so config can be reloaded
* Move wiki page parsing into manager for better encapsulation
* Check for wiki revision date on heartbeat and on checks if older than one minute
* Catch config parsing issues and retry on next heartbeat
2021-07-06 16:28:18 -04:00
FoxxMD
00e38b5560 Use correct media property for anchor parsing 2021-07-06 12:59:46 -04:00
FoxxMD
9cac11f436 Implement author parsing for audio/podcast related media
Parse spotify and anchor.fm media sources
2021-07-06 11:33:00 -04:00
FoxxMD
f591c3a05a Implement more powerful content parsing options
* Can get wiki pages from other subreddits
* Can fetch from an external url
2021-07-06 10:27:30 -04:00
FoxxMD
39fad91c7f Fix missing author criteria 2021-07-05 17:20:35 -04:00
FoxxMD
529b8fc03e Further improvements for subreddit name parsing
* Allow whitespace on either side of regex value to parse since its automatically trimmed by getting capture group
* Implement sub name parsing everywhere subreddits can be specified and update documentation to remove prefix restrictions
2021-07-05 16:06:15 -04:00
FoxxMD
54eef5620d Update interfaces and documentation for new filters and item states 2021-07-05 15:39:08 -04:00
FoxxMD
99537fbebb Fix missing filtering behavior on repeat and add remove check
* Add missing include/exclude behavior for counting repeat submissions
* Add parameter to enable user to specify if removed activities should be included
2021-07-05 15:38:49 -04:00
FoxxMD
4c3f9ee082 Fix remove check on remove action 2021-07-05 15:37:37 -04:00
FoxxMD
5b028b6a45 Fix some item state checks and implement subreddit filtering on window
* Fix how removed check is performed since there are different behaviors for submission/comment
* Add filtered and deleted states for item check
* Add subreddit filters (include/exclude) on window criteria
2021-07-05 15:37:19 -04:00
FoxxMD
859bcf9213 Implement subreddit name parser to allow more lax input
Use regex to extract subreddit name regardless of prefix
2021-07-05 15:34:59 -04:00
FoxxMD
e790f7c260 Fix issue when activities retrieved for attribution rule 2021-06-25 15:20:31 -04:00
FoxxMD
20358294ce Fix domain ident aliases when not aggregating parent domain 2021-06-25 10:29:19 -04:00
FoxxMD
e0f18dc0a2 Add typescript dep 2021-06-25 10:28:39 -04:00
FoxxMD
9a788a8323 Add file logging for uncaught rejection/exceptions 2021-06-23 16:05:58 -04:00
FoxxMD
bed9a9682a Add item is key in log on found 2021-06-23 15:17:45 -04:00
FoxxMD
d39ce13209 Big improvements for Attribution Rule
* Move most qualifier properties into criteria
* Allow filtering on specific domains and consolidate "use sub as reference" into a special string to use here
* Above also enables using rule on Comment
* Enable more submission type filtering options and allow multiple options (link, media, self -- or a combination of any)
* Provide count and percent ranges in results
* Provide domains and friendly tiles in results
2021-06-23 15:09:56 -04:00
FoxxMD
4bd25e53b0 Better formatting for logging errors
Don't erase the original message if there is one
2021-06-23 14:56:20 -04:00
FoxxMD
ac87d5acfa Check if Activity is deleted before running checks 2021-06-23 14:55:45 -04:00
FoxxMD
0f541f1961 Try to fix attribution undefined issue on criteria results array having undefined index 2021-06-22 16:26:30 -04:00
FoxxMD
db2be949b4 Refactor polling to enable polling from multiple sources
* New polling sources: unmoderated and modqueue
* Can now poll from many sources
2021-06-22 14:47:20 -04:00
FoxxMD
8c6b18cf4d Add optional subreddit nickname to help make logs more readable 2021-06-22 13:00:53 -04:00
FoxxMD
add4204304 Make check labels more concise
* Shorten prefix to 'CHK'
* Truncate check names to 25 characters
2021-06-22 13:00:35 -04:00
FoxxMD
927d4ef07e Refactor recent activity to provide useful information on fail 2021-06-22 12:42:43 -04:00
FoxxMD
b8c12009ee Refactor history rule to provide useful logging information on fail 2021-06-22 11:59:43 -04:00
FoxxMD
7f9b4ce6a0 Refactor repeat rule to provide useful logging information on fail 2021-06-22 11:29:07 -04:00
FoxxMD
ad8a668a08 Make pass/fail symbols constants 2021-06-22 11:28:37 -04:00
FoxxMD
84c5e97c92 Refactor Attribution so fail condition also logs useful information 2021-06-22 10:52:29 -04:00
FoxxMD
03b2cb36ab Change pass/fail symbols so they aren't emojis
Makes logging more uniform
2021-06-22 10:52:01 -04:00
FoxxMD
93bdb89115 Shorten action logging label 2021-06-22 10:51:29 -04:00
FoxxMD
702e2ccccf Shorten Rule labels 2021-06-22 10:47:51 -04:00
FoxxMD
631d67928d Shorten kind for some rules to make logging more readable 2021-06-22 10:47:13 -04:00
FoxxMD
eea04344c0 Check for error timeout code 2021-06-21 22:53:50 -04:00
FoxxMD
7f29ade87b Refactor results to make check complexity easier to visualize in log
* Differentiate rule results from rule set results
* Implement function to parse all results for check and print and/or PLUS triggered state for all rule/sets in check
2021-06-21 17:10:24 -04:00
FoxxMD
cced86381b Fix minimum items for subthreshold 2021-06-21 13:57:31 -04:00
FoxxMD
01c575f2b2 Rename history criteria operator name to be consistent 2021-06-21 13:34:27 -04:00
FoxxMD
f1d04d4718 Fix regex named group 2021-06-21 13:33:49 -04:00
FoxxMD
6ca65079b3 Add author and item checks to Actions
So that Actions can be skipped based on item/author criteria
2021-06-21 13:00:45 -04:00
FoxxMD
73236e44ad Add karma comparisons and verified check for author criteria 2021-06-21 12:02:33 -04:00
FoxxMD
4bef85e1e4 Refactor most thresholds to use comparison operators (like automod) 2021-06-21 11:40:04 -04:00
FoxxMD
532f6aa3d8 Fix recent activity default thresholds 2021-06-20 14:20:39 -04:00
FoxxMD
e1e5b26264 One more duration fix 2021-06-20 14:15:06 -04:00
FoxxMD
46a583e20a Fix regex constant usage for duration 2021-06-20 14:01:00 -04:00
FoxxMD
24064dfe03 Refactor how activity window is parsed to use dayjs shorthand string
* Parse dayjs shorthand IE "90 days" alongside the rest of the activity types
* Update schema with clearer information on how activity window can be formed
2021-06-18 17:36:43 -04:00
FoxxMD
ad91901cc2 Refactor history criteria to use string regex parsing (like automod) 2021-06-18 16:33:13 -04:00
FoxxMD
58c51e56b1 Test Author account age using string comparison (like automod) 2021-06-18 13:52:52 -04:00
FoxxMD
9850ccb8f3 Fix dryrun usage for comment action 2021-06-17 15:04:11 -04:00
FoxxMD
79b82dab0f Refactor footer to be configurable at subreddit and Action level 2021-06-17 14:43:22 -04:00
FoxxMD
9c059beb85 Add usernotes to content template render data and contextualize bot footer 2021-06-17 13:13:46 -04:00
FoxxMD
88be7d8836 Missed ban case in action factory 2021-06-16 22:42:46 -04:00
FoxxMD
20acc12460 Add footer to comment/ban content
Closes #5
2021-06-16 22:29:28 -04:00
FoxxMD
60c0569e21 Add missing actions
Closes #19
2021-06-16 22:04:44 -04:00
FoxxMD
879807390d Add YAML as configuration language to readme 2021-06-16 21:30:43 -04:00
FoxxMD
08413dbe16 Implement YAML parsing 2021-06-16 21:27:48 -04:00
FoxxMD
75cbde8b8b Improve logging levels and add end-run stats
* Increase some noisy log statements to verbose
* Display action summary on info level
* verbose -- Display run stats (checks, rules, actions) at end of event
* verbose -- Display reddit api stats (initial, current, used) at end of event
2021-06-16 13:31:37 -04:00
FoxxMD
3acf268313 Check notes length before trying to get current note 2021-06-16 12:30:28 -04:00
FoxxMD
97b9391f3b Remove debug statement derp 2021-06-16 12:21:19 -04:00
FoxxMD
f8ec0d7ee0 Fix and/or condition logic for checks and rulesets 2021-06-16 12:20:38 -04:00
FoxxMD
0002c1bc11 Allow rules to be optional, increase startup logging, and change default log level
* Allow rules to be optional on json -- if no rules actions are run immediately after check passes author/item tests
* When verbose logging show much more detail about check stats, rules, and actions on startup
* Set verbose as default log level. New users should have more information of the box so they can understand how things work.
2021-06-16 11:38:45 -04:00
FoxxMD
a09f3fe4f1 Finally got action example working correctly 2021-06-16 10:39:27 -04:00
FoxxMD
daf66083d0 Schema documentation improvements 2021-06-16 10:33:36 -04:00
FoxxMD
7acd62d787 Refactor window criteria to actually work as described 2021-06-16 00:27:04 -04:00
FoxxMD
75889cc927 Add more error handling for reddit timeout issues 2021-06-15 17:05:14 -04:00
FoxxMD
db0440356c Refactor author usage to be more universal and change name to match item behavior
* (BC) rename authors to authorIs to match itemIs -- since they have the same behavior
* Add author filter to Check so it matches usage of itemIs
2021-06-15 16:12:06 -04:00
FoxxMD
016952128c Update documentation for Toolbox User Notes 2021-06-15 15:31:59 -04:00
FoxxMD
884966b8d3 Remove more debugging statements
ugh
2021-06-15 14:33:47 -04:00
FoxxMD
0ad7c66e9d Fix error display on config error 2021-06-15 14:26:42 -04:00
FoxxMD
c075e5fb24 Remove debug statements 2021-06-15 14:11:07 -04:00
FoxxMD
a3de885620 Add some action checks 2021-06-15 14:04:06 -04:00
FoxxMD
e29d19ada8 Implement itemIs test for Checks and Rules 2021-06-15 13:54:54 -04:00
FoxxMD
c52e1d5e1d Implement toolbox usernote action 2021-06-15 12:09:26 -04:00
FoxxMD
257563a3b8 Implement toolbox usernote author filter and rule criteria 2021-06-15 10:39:16 -04:00
FoxxMD
7761372091 Implement toolbox usernote read/write 2021-06-14 22:45:48 -04:00
FoxxMD
eb62e39975 Add unmoderated run command 2021-06-14 10:26:05 -04:00
FoxxMD
bdd72dc28e Add more schema examples 2021-06-12 00:54:23 -04:00
FoxxMD
e7b5a9bb60 Change repeat activity behavior when useSubmissionAsReference=true but not a link
Return false result instead of throwing an error since this is probably the expected behavior
2021-06-12 00:02:32 -04:00
FoxxMD
699f2577e5 Fix return value of author filter 2021-06-11 23:59:57 -04:00
FoxxMD
a22096a667 Fix snoowrap logger code
Accidentally not instantiating if debug not true
2021-06-11 23:43:52 -04:00
FoxxMD
a6e72dc79d Add rule ordering and api caching to advanced concepts 2021-06-11 19:35:25 -04:00
FoxxMD
962e44bf57 Fix some more links wtf 2021-06-11 19:20:16 -04:00
FoxxMD
2189d92725 Update recent activity examples descriptions 2021-06-11 19:17:09 -04:00
FoxxMD
14711efeb3 Use better link for example 2021-06-11 19:13:48 -04:00
FoxxMD
774b41c2a3 Add readmes for all sections 2021-06-11 19:12:11 -04:00
FoxxMD
4928b8b57a Add examples 2021-06-11 18:30:30 -04:00
FoxxMD
4b39794e2f Normalize named rule value 2021-06-11 18:29:19 -04:00
FoxxMD
c0ede0561c Add general percentages to history result data 2021-06-11 16:40:51 -04:00
FoxxMD
d7cea1f705 Add totalCount threshold property for recent activity rule 2021-06-11 16:00:17 -04:00
FoxxMD
3e29d7eb9f Remove noisy poll logging
Duh
2021-06-11 14:17:10 -04:00
FoxxMD
48ea60f886 Move heartbeat to app-level config 2021-06-11 14:15:54 -04:00
FoxxMD
1897d96a8f Add dryrun setting to help with testing
Can be configured at action, check, subreddit, or app level
2021-06-11 12:38:01 -04:00
FoxxMD
1279975a8a Add verbose log statements to log rule results and comment/report action contents 2021-06-11 11:56:55 -04:00
FoxxMD
7d0f7e8714 Decode content in render util instead of in each action 2021-06-11 11:44:36 -04:00
FoxxMD
f6b3f02e05 unescape content before report/comment 2021-06-11 10:36:37 -04:00
FoxxMD
1f439dc290 Re-add report length enforcement
Reddit returns 200 for longer content but then never actually makes the report
2021-06-11 10:28:51 -04:00
FoxxMD
0a299308fb Make history summary more succinct 2021-06-11 10:28:29 -04:00
FoxxMD
a84b39cc5a Wrap snoowrap logger function so arguments can be passed correctly to winston 2021-06-10 15:57:04 -04:00
FoxxMD
f64c6e0df5 Fix default for boolean option 2021-06-10 15:56:41 -04:00
FoxxMD
250313b6a8 Add separator between check name and description 2021-06-10 14:27:27 -04:00
FoxxMD
e4be9ed4e6 Display api limit on startup 2021-06-10 14:27:02 -04:00
FoxxMD
788af401b3 Support JSON5 for parsing configuration 2021-06-10 13:20:11 -04:00
FoxxMD
6bc74b383f Implement resource caching
User-configurable global/subreddit-level caching for author activities, wiki pages, and author checks
2021-06-10 13:13:57 -04:00
FoxxMD
51825a594d Handle errors from check iteration 2021-06-10 10:18:26 -04:00
FoxxMD
186d9ac4b7 Implement History rule 2021-06-09 17:09:33 -04:00
FoxxMD
ac02fdabfd Log link to invalid wiki page on error 2021-06-09 14:56:16 -04:00
FoxxMD
0eeb204371 Update ajv and fix schema generation issues
* Update ajv to fix #ref warning
* Consolidate ajv instantiation so config is always the same
* Remove propOrder flag for schema generation since its out of spec
2021-06-09 14:39:23 -04:00
FoxxMD
64a97ee048 Logging improvements
* Insert activity identifier into logging labels after subreddit using dynamic labels
* Simplify logger creation (don't need shuffle using improvements from above)
* Add logging to Actions
* Make check logging clearer and more succinct
* Log more information on startup
2021-06-09 13:36:32 -04:00
FoxxMD
318a1d3326 Set heroku url to default branch 2021-06-08 16:16:58 -04:00
FoxxMD
08db50426b Show check details and summary by default (info level) 2021-06-08 16:15:36 -04:00
FoxxMD
77f7a0167c Wiki value typo 2021-06-08 16:10:44 -04:00
FoxxMD
23a9f9d652 Remove potentially problematic heroku env 2021-06-08 16:09:00 -04:00
FoxxMD
72ed72ce4a Add heroku quick deploy button 2021-06-08 16:07:53 -04:00
FoxxMD
8cea19c7f2 Remove default env vars 2021-06-08 16:01:18 -04:00
FoxxMD
8eeaac2d53 Update heroku app file 2021-06-08 15:41:57 -04:00
FoxxMD
3cf838ba9f Create heroku app file 2021-06-08 15:39:14 -04:00
FoxxMD
16f3c2268b Create heroku file 2021-06-08 15:31:34 -04:00
FoxxMD
3be20b910d Fix missing return on activity filter 2021-06-08 14:03:49 -04:00
FoxxMD
78aed4321a Add support for reddit permalink when running check command 2021-06-08 13:55:03 -04:00
FoxxMD
0fe2fa8934 Add submission from comments convenience method 2021-06-08 13:46:45 -04:00
FoxxMD
37ba1dc1bf Fix default value when reference submission has no repeats 2021-06-08 13:46:08 -04:00
FoxxMD
5905c910b0 Implement name references for actions and rules
Action/Rule objects they can now be referenced by name from anywhere in the configuration
2021-06-08 12:40:00 -04:00
FoxxMD
d239d3c6cc Update dockerfile to use default run command 2021-06-08 00:52:23 -04:00
FoxxMD
16d0eebac6 Some small fixes for attribution 2021-06-08 00:35:34 -04:00
FoxxMD
1a393944c0 Refactor AttributionRule to be more robust and handle multiple window/thresholds
It's more useful to be abel to check thresholds for multiple windows to get a more holistic idea of attribution percents
2021-06-08 00:32:25 -04:00
FoxxMD
9f270010b7 Add trace to winston log levels so it can be used with snoowrap 2021-06-08 00:31:31 -04:00
FoxxMD
2548cff367 Friendly print schema validation errors 2021-06-08 00:30:57 -04:00
FoxxMD
c7acda46a0 Implement AttributionRule 2021-06-07 17:45:08 -04:00
FoxxMD
530675179b Refactor activity window
* Truncate items to window length when too many retrieved
* Correctly compare dates
2021-06-07 17:44:20 -04:00
FoxxMD
7960423678 Fix missing bold format character 2021-06-07 14:09:11 -04:00
FoxxMD
4ddb0f0963 Update readme with new cli syntax 2021-06-07 13:58:16 -04:00
FoxxMD
8a54ce15cd Refactor RepeatSubmission rule into RepeatActivity to allow more flexibility in use
* Refactor item repeat logic completely to simplify allow scenarios
* Can check for repeat comments now
* Add more context data and markdown content to display all repeats
2021-06-07 13:39:50 -04:00
FoxxMD
01161c3493 Implement specific activity checking through cli
* Refactor application input to use commando for extensibility
* Add all args/env as options for easy readout on command line
* Add 'check' command to allow running checks against a specific activity
* BC: must specify 'run' to run regular manager/unattended operation
2021-06-07 13:38:37 -04:00
FoxxMD
9970156a3d Fix leaf detection when undefined 2021-06-07 13:36:11 -04:00
FoxxMD
b437156d99 Clean up logging 2021-06-04 16:20:25 -04:00
FoxxMD
de3a279dc3 More verbose debug logging of checks added to subreddit manager on load 2021-06-04 15:07:30 -04:00
FoxxMD
86a6a75119 Add missing domain prefix to permalink 2021-06-04 15:03:32 -04:00
FoxxMD
9634b59b3a Add some temporal convenience logging
* heartbeat logging with configurable interval
* configurable api limit warning
2021-06-04 14:58:13 -04:00
FoxxMD
37f7c99155 Add snoodebug arg/env to control snoowrap debug output independently 2021-06-04 14:53:59 -04:00
FoxxMD
a99ab9a64a Add permalink to peek content to make logs more convenient 2021-06-04 14:53:29 -04:00
FoxxMD
51fb942d34 Fix collapse formatting 2021-06-04 14:04:26 -04:00
FoxxMD
2433610c7f Add documentation on templating 2021-06-04 13:59:44 -04:00
FoxxMD
473e4b7684 Update readme
* Add wikiConfig env
* Collapse for config example
* Config example showcases templating
2021-06-04 13:20:36 -04:00
FoxxMD
020da4b5fe Refactor action parameters to use RuleResults for templating when rendering content 2021-06-04 13:12:18 -04:00
FoxxMD
08c085e3a9 Specify name on rule/check/action must conform to pattern
alphanumeric with spaces, underscore, dashes -- so we can use them to normalize rule results for templating
2021-06-04 13:11:56 -04:00
FoxxMD
c9c42e68f8 Pass manager options back from json build function 2021-06-04 13:10:52 -04:00
FoxxMD
53983475b6 Add more data to RuleResults for use in templating 2021-06-04 13:10:08 -04:00
FoxxMD
1883039391 Content convenience methods
* Use rule result data to make templating more betterer
* Convenience method for extracting item title/content
2021-06-04 13:09:38 -04:00
FoxxMD
574195475f Fix truncate function
Should be removing length of truncate string as well
2021-06-04 13:08:22 -04:00
FoxxMD
cb02345960 Add wikiLocation arg/env for specifying config location 2021-06-04 13:07:48 -04:00
FoxxMD
fc20ee9561 Truncate report content length to 100 characters to fit reddit spec 2021-06-04 10:17:18 -04:00
101 changed files with 19885 additions and 1737 deletions

View File

@@ -4,3 +4,4 @@ Dockerfile
.gitignore
.git
src/logs
/docs

View File

@@ -5,7 +5,9 @@
<excludeFolder url="file://$MODULE_DIR$/temp" />
<excludeFolder url="file://$MODULE_DIR$/.tmp" />
<excludeFolder url="file://$MODULE_DIR$/tmp" />
<excludeFolder url="file://$MODULE_DIR$/src/logs" />
</content>
<content url="file://$MODULE_DIR$/node_modules" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>

View File

@@ -24,4 +24,8 @@ RUN mkdir -p $log_dir
VOLUME $log_dir
ENV LOG_DIR=$log_dir
CMD [ "node", "src/index.js" ]
ARG webPort=8085
ENV PORT=$webPort
EXPOSE $PORT
CMD [ "node", "src/index.js", "run" ]

191
README.md
View File

@@ -19,22 +19,27 @@ Some feature highlights:
* Simple rule-action behavior can be combined to create any level of complexity in behavior
* One instance can handle managing many subreddits (as many as it has moderator permissions in!)
* Per-subreddit configuration is handled by JSON stored in the subreddit wiki
* Any text-based actions (comment, submission, message, etc...) can be configured via a wiki page or raw text in JSON
* Any text-based actions (comment, submission, message, usernotes, etc...) can be configured via a wiki page or raw text in JSON
* All text-based actions support [mustache](https://mustache.github.io) templating
* History-based rules support multiple "valid window" types -- [ISO 8601 Durations](https://en.wikipedia.org/wiki/ISO_8601#Durations), [Day.js Durations](https://day.js.org/docs/en/durations/creating), and submission/comment count limits.
* All rules support skipping behavior based on author criteria -- name, css flair/text, and moderator status
* Docker container support *(coming soon...)*
* Checks/Rules support skipping behavior based on:
* author criteria (name, css flair/text, moderator status, and [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes))
* Activity state (removed, locked, distinguished, etc.)
* Rules and Actions support named references so you write rules/actions once and reference them anywhere
* User-configurable global/subreddit-level API caching with optional redis-backend
* Support for [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) as criteria or Actions (writing notes)
* Docker container support
## Table of Contents
# Table of Contents
* [How It Works](#how-it-works)
* [Installation](#installation)
* [Configuration](#configuration)
* [Configuration And Docs](#configuration)
* [Usage](#usage)
### How It Works
Context Bot's configuration is made up of an array of **Checks**. Each **Check** consists of :
Context Bot's configuration is made up of a list of **Checks**. Each **Check** consists of :
#### Kind
@@ -42,7 +47,7 @@ Is this check for a submission or comment?
#### Rules
A list of **Rule** objects to run against the activity. If **any** Rule object is triggered by the activity then the Check runs its **Actions**
A list of **Rule** objects to run against the activity. Triggered Rules can cause the whole Check to trigger and run its **Actions**
#### Actions
@@ -78,88 +83,129 @@ Adding [**environmental variables**](#usage) to your `docker run` command will p
docker run -e "CLIENT_ID=myId" ... foxxmd/reddit-context-bot
```
### [Heroku Quick Deploy](https://heroku.com/about)
[![Deploy](https://www.herokucdn.com/deploy/button.svg)](https://dashboard.heroku.com/new?template=https://github.com/FoxxMD/reddit-context-bot)
## Configuration
Context Bot's [configuration schema](/src/Schema/App.json) conforms to [JSON Schema](https://json-schema.org/) Draft 7.
[**Check the docs for in-depth explanations of all concepts and examples**](/docs)
Context Bot's configuration can be written in JSON, [JSON5](https://json5.org/) or YAML. It's [schema](/src/Schema/App.json) conforms to [JSON Schema Draft 7](https://json-schema.org/).
I suggest using [Atlassian JSON Schema Viewer](https://json-schema.app/start) ([direct link](https://json-schema.app/view/%23?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json)) so you can view all documentation while also interactively writing and validating your config! From there you can drill down into any object, see its requirements, view an example JSON document, and live-edit your configuration on the right-hand side.
### Example Config
### Action Templating
Below is a configuration fulfilling the example given at the start of this readme:
Actions that can submit text (Report, Comment) will have their `content` values run through a [Mustache Template](https://mustache.github.io/). This means you can insert data generated by Rules into your text before the Action is performed.
```json
See here for a [cheatsheet](https://gist.github.com/FoxxMD/d365707cf99fdb526a504b8b833a5b78) and [here](https://www.tsmean.com/articles/mustache/the-ultimate-mustache-tutorial/) for a more thorough tutorial.
All Actions with `content` have access to this data:
```json5
{
"checks": [
{
"name": "repeatSpam",
"kind": "submission",
"rules": [
{
"kind": "repeatSubmission",
"gapAllowance": 2,
"threshold": 10
}
],
"actions": [
{
"kind": "remove"
},
{
"kind": "comment",
"content": "Thank you for your submission but we do not allow mass crossposting. Your submission has been removed",
"distingish": true
}
]
item: {
kind: 'string', // the type of item (comment/submission)
author: 'string', // name of the item author (reddit user)
permalink: 'string', // a url to the item
url: 'string', // if the item is a Submission then its URL (external for link type submission, reddit link for self-posts)
title: 'string', // if the item is a Submission, then the title of the Submission,
botLink: 'string' // a link to the bot's FAQ
},
{
"name": "selfPromoActivity",
"kind": "submission",
"rules": [
{
"kind": "recentActivity",
"thresholds": [
{
"subreddits": [
"YouTubeSubscribeBoost",
"AdvertiseYourVideos"
]
}
]
}
],
"actions": [
{
"kind": "report",
"content": "CB: self-promo"
},
{
"kind": "comment",
"content": "wiki:botconfig/contextbot/reportSelfPromo",
"distingish": true
}
]
rules: {
// contains all rules that were run and are accessible using the name, lowercased, with all spaces/dashes/underscores removed
}
]
}
```
The properties of `rules` are accessible using the name, lower-cased, with all spaces/dashes/underscores. If no name is given `kind` is used as `name` Example:
```
"rules": [
{
"name": "My Custom-Recent Activity Rule", // mycustomrecentactivityrule
"kind": "recentActivity"
},
{
// name = repeatsubmission
"kind": "repeatActivity",
}
]
```
**To see what data is available for individual Rules [consult the schema](#configuration) for each Rule.**
#### Quick Templating Tutorial
<details>
As a quick example for how you will most likely be using templating -- wrapping a variable in curly brackets, `{{variable}}`, will cause the variable value to be rendered instead of the brackets:
```
myVariable = 50;
myOtherVariable = "a text fragment"
template = "This is my template, the variable is {{myVariable}}, my other variable is {{myOtherVariable}}, and that's it!";
console.log(Mustache.render(template, {myVariable});
// will render...
"This is my template, the variable is 50, my other variable is a text fragment, and that's it!";
```
**Note: When accessing an object or its properties you must use dot notation**
```
const item = {
aProperty: 'something',
anotherObject: {
bProperty: 'something else'
}
}
const content = "My content will render the property {{item.aProperty}} like this, and another nested property {{item.anotherObject.bProperty}} like this."
```
</details>
## Usage
`npm run start [list,of,subreddits] [...--options]`
```
Usage: index [options] [command]
CLI options take precedence over environmental variables
Options:
-h, --help display help for command
| CLI | Environmental Variable | Required | Description |
|------------------|------------------------|----------|----------------------------------------------------------------------------------------------------------------------------------|
| [First Argument] | | No | Comma-deliminated list of subreddits to run on if you don't want to run all the account has access to. |
| --clientId | CLIENT_ID | **Yes** | Your reddit application client id |
| --clientSecret | CLIENT_SECRET | **Yes** | Your reddit application client secret |
| --accessToken | ACCESS_TOKEN | **Yes** | A valid access token retrieved from completing the oauth flow for a user with your application. |
| --refreshToken | REFRESH_TOKEN | **Yes** | A valid refresh token retrieved from completing the oauth flow for a user with your application. |
| --logDir | LOG_DIR | No | The absolute path to where logs should be stored. use `false` to turn off log files. Defaults to `CWD/logs` |
| --logLevel | LOG_LEVEL | No | The minimum level to log at. Uses [Winston Log Levels](https://github.com/winstonjs/winston#logging-levels). Defaults to `info` |
Commands:
run [options] [interface] Monitor new activities from configured subreddits.
check [options] <activityIdentifier> [type] Run check(s) on a specific activity
unmoderated [options] <subreddits...> Run checks on all unmoderated activity in the modqueue
help [command] display help for command
Options:
-c, --operatorConfig <path> An absolute path to a JSON file to load all parameters from (default: process.env.OPERATOR_CONFIG)
-i, --clientId <id> Client ID for your Reddit application (default: process.env.CLIENT_ID)
-e, --clientSecret <secret> Client Secret for your Reddit application (default: process.env.CLIENT_SECRET)
-a, --accessToken <token> Access token retrieved from authenticating an account with your Reddit Application (default: process.env.ACCESS_TOKEN)
-r, --refreshToken <token> Refresh token retrieved from authenticating an account with your Reddit Application (default: process.env.REFRESH_TOKEN)
-u, --redirectUri <uri> Redirect URI for your Reddit application (default: process.env.REDIRECT_URI)
-t, --sessionSecret <secret> Secret use to encrypt session id/data (default: process.env.SESSION_SECRET || a random string)
-s, --subreddits <list...> List of subreddits to run on. Bot will run on all subs it has access to if not defined (default: process.env.SUBREDDITS)
-d, --logDir [dir] Absolute path to directory to store rotated logs in. Leaving undefined disables rotating logs (default: process.env.LOG_DIR)
-l, --logLevel <level> Minimum level to log at (default: process.env.LOG_LEVEL || verbose)
-w, --wikiConfig <path> Relative url to contextbot wiki page EX https://reddit.com/r/subreddit/wiki/<path> (default: process.env.WIKI_CONFIG || 'botconfig/contextbot')
--snooDebug Set Snoowrap to debug. If undefined will be on if logLevel='debug' (default: process.env.SNOO_DEBUG)
--authorTTL <ms> Set the TTL (ms) for the Author Activities shared cache (default: process.env.AUTHOR_TTL || 60000)
--heartbeat <s> Interval, in seconds, between heartbeat checks. (default: process.env.HEARTBEAT || 300)
--softLimit <limit> When API limit remaining (600/10min) is lower than this subreddits will have SLOW MODE enabled (default: process.env.SOFT_LIMIT || 250)
--hardLimit <limit> When API limit remaining (600/10min) is lower than this all subreddit polling will be paused until api limit reset (default: process.env.SOFT_LIMIT || 250)
--dryRun Set all subreddits in dry run mode, overriding configurations (default: process.env.DRYRUN || false)
--proxy <proxyEndpoint> Proxy Snoowrap requests through this endpoint (default: process.env.PROXY)
--operator <name> Username of the reddit user operating this application, used for displaying OP level info/actions in UI (default: process.env.OPERATOR)
--operatorDisplay <name> An optional name to display who is operating this application in the UI (default: process.env.OPERATOR_DISPLAY || Anonymous)
-p, --port <port> Port for web server to listen on (default: process.env.PORT || 8085)
-q, --shareMod If enabled then all subreddits using the default settings to poll "unmoderated" or "modqueue" will retrieve results from a shared request to /r/mod (default: process.env.SHARE_MOD || false)
-h, --help display help for command
```
### Logging
### Reddit App??
@@ -192,7 +238,8 @@ Visit https://not-an-aardvark.github.io/reddit-oauth-helper/
* report
* submit
* wikiread
* Click **Generate tokens*, you will get a popup asking you to approve access (or login) -- **the account you approve access with is the account that Bot will control.**
* wikiedit (if you are using Toolbox User Notes)
* Click **Generate tokens**, you will get a popup asking you to approve access (or login) -- **the account you approve access with is the account that Bot will control.**
* After approving an **Access Token** and **Refresh Token** will be shown at the bottom of the page. Write these down.
You should now have all the information you need to start the bot.

33
app.json Normal file
View File

@@ -0,0 +1,33 @@
{
"name": "Reddit Context Bot",
"description": "An event-based, reddit moderation bot built on top of snoowrap and written in typescript",
"repository": "https://github.com/FoxxMD/reddit-context-bot",
"stack": "container",
"env": {
"CLIENT_ID": {
"description": "Client ID for your Reddit application",
"value": "",
"required": true
},
"CLIENT_SECRET": {
"description": "Client Secret for your Reddit application",
"value": "",
"required": true
},
"REFRESH_TOKEN": {
"description": "Refresh token retrieved from authenticating an account with your Reddit Application",
"value": "",
"required": true
},
"ACCESS_TOKEN": {
"description": "Access token retrieved from authenticating an account with your Reddit Application",
"value": "",
"required": true
},
"WIKI_CONFIG": {
"description": "Relative url to contextbot wiki page EX https://reddit.com/r/subreddit/wiki/<path>",
"value": "botconfig/contextbot",
"required": false
}
}
}

211
docs/README.md Normal file
View File

@@ -0,0 +1,211 @@
# Documentation
# Table of Contents
* [Getting Started](#getting-started)
* [How It Works](#how-it-works)
* [Concepts](#concepts)
* [Rule](#rule)
* [Examples](#available-rules)
* [Rule Set](#rule-set)
* [Examples](#rule-set-examples)
* [Action](#action)
* [Examples](#available-actions)
* [Filters](#filters)
* [Configuration](#configuration)
* [Common Resources](#common-resources)
* [Activities `window`](#activities-window)
* [Comparisons](#thresholds-and-comparisons)
* [Best Practices](#best-practices)
* [Subreddit-ready Configurations](#subreddit-ready-configurations)
* FAQ
## Getting Started
Review **at least** the **How It Works** and **Concepts** below and then head to the [**Getting Started documentation.**](/docs/gettingStarted.md)
## How It Works
Where possible Reddit Context Bot (RCB) uses the same terminology as, and emulates the behavior, of **automoderator** so if you are familiar with that much of this may seem familiar to you.
RCB's lifecycle looks like this:
#### 1) A new event in your subreddit is received by RCB
The events RCB watches for are configured by you. These can be new modqueue items, submissions, or comments.
#### 2) RCB sequentially processes each Check in your configuration
A **Check** is a set of:
* One or more **Rules** that define what conditions should **trigger** this Check
* One or more **Actions** that define what the bot should do once the Check is **triggered**
#### 3) Each Check is processed, *in order*, until a Check is triggered
Once a Check is **triggered** no more Checks will be processed. This means all subsequent Checks in your configuration (in the order you listed them) are basically skipped.
#### 4) All Actions from that Check are executed
After all Actions are executed RCB returns to waiting for the next Event.
## Concepts
Core, high-level concepts regarding how RCB works.
### Checks
TODO
### Rule
A **Rule** is some set of **criteria** (conditions) that are tested against an Activity (comment/submission), a User, or a User's history. A Rule is considered **triggered** when the **criteria** for that rule are found to be **true** for whatever is being tested against.
There are generally three main properties for a Rule:
* **Critiera** -- The conditions/values you want to test for.
* **Activities Window** -- If applicable, the range of activities that the **criteria** will be tested against.
* **Rule-specific options** -- Any number of options that modify how the **criteria** are tested.
RCB has different **Rules** that can test against different types of behavior and aspects of a User, their history, and the Activity (submission/common) being checked.
#### Available Rules
Find detailed descriptions of all the Rules, with examples, below:
* [Attribution](/docs/examples/attribution)
* [Recent Activity](/docs/examples/recentActivity)
* [Repeat Activity](/docs/examples/repeatActivity)
* [History](/docs/examples/history)
* [Author](/docs/examples/author)
### Rule Set
A **Rule Set** is a "grouped" set of `Rules` with a **trigger condition** specified.
Rule Sets can be used interchangeably with other **Rules** and **Rule Sets** in the `rules` list of a **Check**.
They allow you to create more complex trigger behavior by combining multiple rules into one "parent rule".
It consists of:
* **condition** -- Under what condition should the Rule Set be considered triggered?
* `AND` -- ALL Rules in the Rule Set must **trigger** in order for the Rule Set to **trigger.**
* `OR` -- ANY Rule in the Rule Set that is **triggered** will trigger the whole Rule Set.
* **rules** -- The **Rules** for the Rule Set.
Example
```json5
{
"condition": "AND",
"rules": [
// all the rules go here
]
}
```
#### Rule Set Examples
* [**Detailed Example**](/docs/examples/advancedConcepts/ruleSets.json5)
### Action
An **Action** is some action the bot can take against the checked Activity (comment/submission) or Author of the Activity. RCB has Actions for most things a normal reddit user or moderator can do.
### Available Actions
* Remove (Comment/Submission)
* Flair (Submission)
* Ban (User)
* Approve (Comment/Submission)
* Comment (Reply to Comment/Submission)
* Lock (Comment/Submission)
* Report (Comment/Submission)
* [UserNote](/docs/examples/userNotes) (User, when /r/Toolbox is used)
For detailed explanation and options of what individual Actions can do [see the links in the `actions` property in the schema.](https://json-schema.app/view/%23/%23%2Fdefinitions%2FSubmissionCheckJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json)
### Filters
TODO
## Configuration
* For **Operator/Bot maintainers** see **[Operation Configuration](/docs/operatorConfiguration.md)**
* For **Moderators** see the [App Schema](https://json-schema.app/view/%23?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) and [examples](/docs/examples)
## Common Resources
Technical information on recurring, common data/patterns used in RCB.
### Activities `window`
Most **Rules** must define the **range of Activities (submissions and/or comments)** that will be used to check the criteria of the Rule. This range is defined wherever you see a `window` property in configuration.
Refer to the [Activities Window](/docs/activitiesWindow.md) documentation for a technical explanation with examples.
### Thresholds and Comparisons
TODO
## Best Practices
### Named Rules
All **Rules** in a subreddit's configuration can be assigned a **name** that can then be referenced from any other Check.
Create general-use rules so they can be reused and de-clutter your configuration. Additionally RCB will automatically cache the result of a rule so there is a performance and api usage benefit to re-using Rules.
See [ruleNameReuse.json5](/docs/examples/advancedConcepts/ruleNameReuse.json5) for a detailed configuration with annotations.
### Check Order
Checks are run in the order they appear in your configuration, therefore you should place your highest requirement/severe action checks at the top and lowest requirement/moderate actions at the bottom.
This is so that if an Activity warrants a more serious reaction that Check is triggered first rather than having a lower requirement check with less severe actions triggered and causing all subsequent Checks to be skipped.
* Attribution >50% AND Repeat Activity 8x AND Recent Activity in 2 subs => remove submission + ban
* Attribution >20% AND Repeat Activity 4x AND Recent Activity in 5 subs => remove submission + flair user restricted
* Attribution >20% AND Repeat Activity 2x => remove submission
* Attribution >20% AND History comments <30% => remove submission
* Attribution >15% => report
* Repeat Activity 2x => report
* Recent Activity in 3 subs => report
* Author not vetted => flair new user submission
### Rule Order
The ordering of your Rules within a Check/RuleSet can have an impact on Check performance (speed) as well as API usage.
Consider these three rules:
* Rule A -- Recent Activity => 3 subreddits => last 15 submissions
* Rule B -- Repeat Activity => last 3 days
* Rule C -- Attribution => >10% => last 90 days or 300 submissions
The first two rules are lightweight in their requirements -- Rule A can be completed in 1 API call, Rule B potentially completed in 1 Api call.
However, depending on how active the Author is, Rule C will take *at least* 3 API calls just to get all activities (Reddit limit 100 items per call).
If the Check is using `AND` condition for its rules (default) then if either Rule A or Rule B fail then Rule C will never run. This means 3 API calls never made plus the time waiting for each to return.
**It is therefore advantageous to list your lightweight Rules first in each Check.**
### API Caching
Context bot implements some basic caching functionality for **Author Activities** and wiki pages (on Comment/Report Actions).
**Author Activities** are cached for a subreddit-configurable amount of time (10 seconds by default). A cached activities set can be re-used if the **window on a Rule is identical to the window on another Rule**.
This means that when possible you should re-use window values.
IE If you want to check an Author's Activities for a time range try to always use **7 Days** or always use **50 Items** for absolute counts.
Re-use will result in less API calls and faster Check times.
## Subreddit-ready Configurations
TODO
## FAQ
TODO

217
docs/activitiesWindow.md Normal file
View File

@@ -0,0 +1,217 @@
# Activity Window
Most **Rules** have a `window` property somewhere within their configuration. This property defines the range of **Activities** (submission and/or comments) that should be retrieved for checking the criteria of the Rule.
As an example if you want to run an **Recent Activity Rule** to check if a user has had activity in /r/mealtimevideos you also need to define what range of activities you want to look at from that user's history.
## `window` property overview (tldr)
The value of `window` can be any of these types:
* `number` count of activities
* `string` [duration](#duration-string-recommended) or [iso 8601](#an-iso-8601-duration-string)
* [duration `object`](#duration-object)
* [ActivityWindowCriteria `object`](#activitywindowcriteria)
Examples of all of the above
<details>
```
// count, last 100 activities
{
"window": 100
}
// duration string, last 10 days
{
"window": "10 days"
}
// duration object, last 2 months and 5 days
{
"window": {
"months": 2,
"days": 5,
}
}
// iso 8601 string, last 15 minutes
{
"window": "PT15M"
}
// ActivityWindowCriteria, last 100 activities or 6 weeks of activities (whichever is found first)
{
"window": {
"count": 100,
"duration": "6 weeks"
}
}
```
</details>
## Types of Ranges
There are two types of values that can be used when defining a range:
### Count
This is the **number** of activities you want to retrieve. It's straightforward -- if you want to look at the last 100 activities for a user you can use `100` as the value.
### Duration
A **duration of time** between which all activities will be retrieved. This is a **relative value** that calculates the actual range based on **the duration of time subtracted from when the rule is run.**
For example:
* Today is **July 15th**
* You define a duration of **10 days**
Then the range of activities to be retrieved will be between **July 5th and July 15th** (10 days).
#### Duration Values
The value used to define the duration can be **any of these three types**:
##### Duration String (recommended)
A string consisting of
* A [Dayjs unit of time](https://day.js.org/docs/en/durations/creating#list-of-all-available-units)
* The value of that unit of time
Examples:
* `9 days`
* `14 hours`
* `80 seconds`
You can ensure your string is valid by testing it [here.](https://regexr.com/61em3)
##### Duration Object
If you need to specify multiple units of time for your duration you can instead provide a [Dayjs duration **object**](https://day.js.org/docs/en/durations/creating#list-of-all-available-units) consisting of Dayjs unit-values.
Example
```json
{
"days": 4,
"hours": 6,
"minutes": 20
}
```
##### An ISO 8601 duration string
If you're a real nerd you can also use a [standard duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)) string.
Examples
* `PT15M` (15 minutes)
Ensure your string is valid by testing it [here.](https://regexr.com/61em9)
## ActivityWindowCriteria
This is an object that lets you specify more granular conditions for your range.
The full object looks like this:
```json
{
"count": 100,
"duration": "10 days",
"satisfyOn": "any",
"subreddits": {
"include": ["mealtimevideos","pooptimevideos"],
"exclude": ["videos"]
}
}
```
### Specifying Range
You may use **one or both range properties.**
If both range properties are specified then the value `satisfyOn` determines how the final range is determined
#### Using `"satisfyOn": "any"` (default)
If **any** then Activities will be retrieved until one of the range properties is met, **whichever occurs first.**
Example
```json
{
"count": 80,
"duration": "90 days",
"satisfyOn": "any"
}
```
Activities are retrieved in chunks of 100 (or `count`, whichever is smaller)
* If 90 days of activities returns only 40 activities => returns 40 activities
* If 80 activities is only 20 days of range => 80 activities
#### Using `"satisfyOn": "all"`
If **all** then both ranges must be satisfied. Effectively, whichever range produces the most Activities will be the one that is used.
Example
```json
{
"count": 100,
"duration": "90 days",
"satisfyOn": "all"
}
```
Activities are retrieved in chunks of 100 (or `count`, whichever is smaller)
* If at 90 days of activities => 40 activities retrieved
* continue retrieving results until 100 activities
* so range is >90 days of activities
* If at 100 activities => 20 days of activities retrieved
* continue retrieving results until 90 days of range
* so results in >100 activities
### Filtering Activities
You may filter retrieved Activities using an array of subreddits.
**Note:** Activities are filtered **before** range check is made so you will always end up with specified range (but may require more api calls if many activities are filtered out)
#### Include
Use **include** to specify which subreddits should be included from results
Example where only activities from /r/mealtimevideos and /r/modsupport will be returned
```json
{
"count": 100,
"duration": "90 days",
"satisfyOn": "any",
"subreddits": {
"include": ["mealtimevideos","modsupport"]
}
}
```
#### Exclude
Use **exclude** to specify which subreddits should NOT be in the results
Example where activities from /r/mealtimevideos and /r/modsupport will not be returned in results
```json
{
"count": 100,
"duration": "90 days",
"satisfyOn": "any",
"subreddits": {
"exclude": ["mealtimevideos","modsupport"]
}
}
```
**Note:** `exclude` will be ignored if `include` is also present.

25
docs/examples/README.md Normal file
View File

@@ -0,0 +1,25 @@
# Examples
This directory contains example of valid, ready-to-go configurations for Context Bot for the purpose of:
* showcasing what the bot can do
* providing best practices for writing your configuration
* providing generally useful configurations **that can be used immediately** or as a jumping-off point for your configuration
### Examples Overview
* Rules
* [Attribution](/docs/examples/attribution)
* [Recent Activity](/docs/examples/recentActivity)
* [Repeat Activity](/docs/examples/repeatActivity)
* [History](/docs/examples/history)
* [Author](/docs/examples/author)
* [Toolbox User Notes](/docs/examples/userNotes)
* [Advanced Concepts](/docs/examples/advancedConcepts)
* [Rule Sets](/docs/examples/advancedConcepts/ruleSets.json5)
* [Name Rules](/docs/examples/advancedConcepts/ruleNameReuse.json5)
* [Check Ordering](/docs/examples/advancedConcepts)
* Subreddit-ready examples
* Coming soon...

View File

@@ -0,0 +1,56 @@
### Named Rules
See [ruleNameReuse.json5](/docs/examples/advancedConcepts/ruleNameReuse.json5)
### Check Order
Checks are run in the order they appear in your configuration, therefore you should place your highest requirement/severe action checks at the top and lowest requirement/moderate actions at the bottom.
This is so that if an Activity warrants a more serious reaction that Check is triggered first rather than having a lower requirement check with less severe actions triggered and causing all subsequent Checks to be skipped.
* Attribution >50% AND Repeat Activity 8x AND Recent Activity in 2 subs => remove submission + ban
* Attribution >20% AND Repeat Activity 4x AND Recent Activity in 5 subs => remove submission + flair user restricted
* Attribution >20% AND Repeat Activity 2x => remove submission
* Attribution >20% AND History comments <30% => remove submission
* Attribution >15% => report
* Repeat Activity 2x => report
* Recent Activity in 3 subs => report
* Author not vetted => flair new user submission
### Rule Sets
The `rules` array on a `Checks` can contain both `Rule` objects and `RuleSet` objects.
A **Rule Set** is a "nested" set of `Rule` objects with a passing condition specified. These allow you to create more complex trigger behavior by combining multiple rules.
See **[ruleSets.json5](/docs/examples/advancedConcepts/ruleSets.json5)** for a complete example as well as consulting the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FRuleSetJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json).
### Rule Order
The ordering of your Rules within a Check/RuleSet can have an impact on Check performance (speed) as well as API usage.
Consider these three rules:
* Rule A -- Recent Activity => 3 subreddits => last 15 submissions
* Rule B -- Repeat Activity => last 3 days
* Rule C -- Attribution => >10% => last 90 days or 300 submissions
The first two rules are lightweight in their requirements -- Rule A can be completed in 1 API call, Rule B potentially completed in 1 Api call.
However, depending on how active the Author is, Rule C will take *at least* 3 API calls just to get all activities (Reddit limit 100 items per call).
If the Check is using `AND` condition for its rules (default) then if either Rule A or Rule B fail then Rule C will never run. This means 3 API calls never made plus the time waiting for each to return.
**It is therefore advantageous to list your lightweight Rules first in each Check.**
### API Caching
Context bot implements some basic caching functionality for **Author Activities** and wiki pages (on Comment/Report Actions).
**Author Activities** are cached for a subreddit-configurable amount of time (10 seconds by default). A cached activities set can be re-used if the **window on a Rule is identical to the window on another Rule**.
This means that when possible you should re-use window values.
IE If you want to check an Author's Activities for a time range try to always use **7 Days** or always use **50 Items** for absolute counts.
Re-use will result in less API calls and faster Check times.

View File

@@ -0,0 +1,75 @@
{
"checks": [
{
"name": "Auto Remove SP Karma",
"description": "Remove submission because author has self-promo >10% and posted in karma subs recently",
"kind": "submission",
"rules": [
// named rules can be referenced at any point in the configuration (where they occur does not matter)
// and can be used in any Check
// Note: rules do not transfer between subreddit configurations
"freekarmasub",
{
"name": "attr10all",
"kind": "attribution",
"criteria": [
{
"threshold": "> 10%",
"window": "90 days"
},
{
"threshold": "> 10%",
"window": 100
}
],
}
],
"actions": [
{
"kind": "remove"
},
{
"kind": "comment",
"content": "Your submission was removed because you are over reddit's threshold for self-promotion and recently posted this content in a karma sub"
}
]
},
{
"name": "Free Karma On Submission Alert",
"description": "Check if author has posted this submission in 'freekarma' subreddits",
"kind": "submission",
"rules": [
{
// rules can be re-used throughout a configuration by referencing them by name
//
// The rule name itself can only contain spaces, hyphens and underscores
// The value used to reference it will have all of these removed, and lower-cased
//
// so to reference this rule use the value 'freekarmasub'
"name": "Free_Karma-SUB",
"kind": "recentActivity",
"lookAt": "submissions",
"useSubmissionAsReference":true,
"thresholds": [
{
"threshold": ">= 1",
"subreddits": [
"DeFreeKarma",
"FreeKarma4U",
"FreeKarma4You",
"upvote"
]
}
],
"window": "7 days"
}
],
"actions": [
{
"kind": "report",
"content": "Submission posted {{rules.freekarmasub.totalCount}} times in karma {{rules.freekarmasub.subCount}} subs over {{rules.freekarmasub.window}}: {{rules.freekarmasub.subSummary}}"
}
]
},
]
}

View File

@@ -0,0 +1,84 @@
{
"checks": [
{
"name": "Self Promo All or low comment",
"description": "SP >10% of all activities or >10% of submissions with low comment engagement",
"kind": "submission",
"rules": [
{
// this attribution rule is looking at all activities
//
// we want want this one rule to trigger the check because >10% of all activity (submission AND comments) is a good requirement
"name": "attr10all",
"kind": "attribution",
"criteria": [
{
"threshold": "> 10%",
"window": "90 days"
},
{
"threshold": "> 10%",
"window": 100
}
],
},
{
// this is a **Rule Set**
//
// it is made up of "nested" rules with a pass condition (AND/OR)
// if the nested rules pass the condition then the Rule Set triggers the Check
//
// AND = all nested rules must be triggered to make the Rule Set trigger
// AND = any of the nested Rules will be the Rule Set trigger
"condition": "AND",
// in this check we use an Attribution >10% on ONLY submissions, which is a lower requirement then the above attribution rule
// and combine it with a History rule looking for low comment engagement
// to make a "higher" requirement Rule Set our of two low requirement Rules
"rules": [
{
"name": "attr20sub",
"kind": "attribution",
"criteria": [
{
"threshold": "> 10%",
"thresholdOn": "submissions",
"window": "90 days"
},
{
"threshold": "> 10%",
"thresholdOn": "submissions",
"window": 100
}
],
"lookAt": "media"
},
{
"name": "lowOrOpComm",
"kind": "history",
"criteriaJoin": "OR",
"criteria": [
{
"window": "90 days",
"comment": "< 50%"
},
{
"window": "90 days",
"comment": "> 40% OP"
}
]
}
]
}
],
"actions": [
{
"kind": "remove"
},
{
"kind": "comment",
"content": "Your submission was removed because you are over reddit's threshold for self-promotion or exhibit low comment engagement"
}
]
},
],
}

View File

@@ -0,0 +1,14 @@
# Attribution
The **Attribution** rule will aggregate an Author's content Attribution (youtube channels, twitter, website domains, etc.) and can check on their totals or percentages of all Activities over a time period:
* Total # of attributions
* As percentage of all Activity or only Submissions
* Look at all domains or only media (youtube, vimeo, etc.)
* Include self posts (by reddit domain) or not
Consult the [schema](https://json-schema.app/view/%23/%23%2Fdefinitions%2FCheckJson/%23%2Fdefinitions%2FAttributionJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
### Examples
* [Self Promotion as percentage of all Activities](/docs/examples/attribution/redditSelfPromoAll.json5) - Check if Author is submitting much more than they comment.
* [Self Promotion as percentage of Submissions](/docs/examplesm/attribution/redditSelfPromoSubmissionsOnly.json5) - Check if any of Author's aggregated submission origins are >10% of their submissions

View File

@@ -0,0 +1,39 @@
{
"checks": [
{
"name": "Self Promo Activities",
"description": "Check if any of Author's aggregated submission origins are >10% of entire history",
// check will run on a new submission in your subreddit and look at the Author of that submission
"kind": "submission",
"rules": [
{
"name": "attr10all",
"kind": "attribution",
// criteria defaults to OR -- so either of these criteria will trigger the rule
"criteria": [
{
// threshold can be a percent or an absolute number
"threshold": "> 10%",
// The default is "all" -- calculate percentage of entire history (submissions & comments)
// "thresholdOn": "all",
// look at last 90 days of Author's activities (comments and submissions)
"window": "90 days"
},
{
"threshold": "> 10%",
// look at Author's last 100 activities (comments and submissions)
"window": 100
}
],
}
],
"actions": [
{
"kind": "report",
"content": "{{rules.attr10all.largestPercent}}% of {{rules.attr10all.activityTotal}} items over {{rules.attr10all.window}}"
}
]
}
]
}

View File

@@ -0,0 +1,40 @@
{
"checks": [
{
"name": "Self Promo Submissions",
"description": "Check if any of Author's aggregated submission origins are >10% of their submissions",
// check will run on a new submission in your subreddit and look at the Author of that submission
"kind": "submission",
"rules": [
{
"name": "attr10sub",
"kind": "attribution",
// criteria defaults to OR -- so either of these criteria will trigger the rule
"criteria": [
{
// threshold can be a percent or an absolute number
"threshold": "> 10%",
// calculate percentage of submissions, rather than entire history (submissions & comments)
"thresholdOn": "submissions",
// look at last 90 days of Author's activities (comments and submissions)
"window": "90 days"
},
{
"threshold": "> 10%",
"thresholdOn": "submissions",
// look at Author's last 100 activities (comments and submissions)
"window": 100
}
],
}
],
"actions": [
{
"kind": "report",
"content": "{{rules.attr10sub.largestPercent}}% of {{rules.attr10sub.activityTotal}} items over {{rules.attr10sub.window}}"
}
]
}
]
}

View File

@@ -0,0 +1,38 @@
# Author
## Rule
The **Author** rule triggers if any [AuthorCriteria](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) from a list are either **included** or **excluded**, depending on which property you put them in.
**AuthorCriteria** that can be checked:
* name (u/userName)
* author's subreddit flair text
* author's subreddit flair css
* author's subreddit mod status
* [Toolbox User Notes](/docs/examples/userNotes)
The Author **Rule** is best used in conjunction with other Rules to short-circuit a Check based on who the Author is. It is easier to use a Rule to do this then to write **author filters** for every Rule (and makes Rules more re-useable).
Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorRuleJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
### Examples
* Basic examples
* [Flair new user Submission](/docs/examples/author/flairNewUserSubmission.json5) - If the Author does not have the `vet` flair then flair the Submission with `New User`
* [Flair vetted user Submission](/docs/examples/author/flairNewUserSubmission.json5) - If the Author does have the `vet` flair then flair the Submission with `Vetted`
* Used with other Rules
* [Ignore vetted user](/docs/examples/author/flairNewUserSubmission.json5) - Short-circuit the Check if the Author has the `vet` flair
## Filter
All **Rules** and **Checks** have an optional `authorIs` property that takes an [AuthorOptions](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorOptions?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) object.
**This property works the same as the Author Rule except that:**
* On **Rules** if all criteria fail the Rule is **skipped.**
* If a Rule is skipped **it does not fail or pass** and so does not affect the outcome of the Check.
* However, if all Rules on a Check are skipped the Check will fail.
* On **Checks** if all criteria fail the Check **fails**.
### Examples
* [Skip recent activity check based on author](/docs/examples/author/authorFilter.json5) - Skip a Recent Activity check for a set of subreddits if the Author of the Submission has any set of flairs.

View File

@@ -0,0 +1,69 @@
{
"checks": [
{
"name": "Karma/Meme Sub Activity",
"description": "Report on karma sub activity or meme sub activity if user isn't a memelord",
// check will run on a new submission in your subreddit and look at the Author of that submission
"kind": "submission",
"rules": [
{
"name": "freekarma",
"kind": "recentActivity",
"lookAt": "submissions",
"thresholds": [
{
"threshold": ">= 1",
"subreddits": [
"DeFreeKarma",
"FreeKarma4U",
]
}
],
"window": "7 days"
},
{
"name": "noobmemer",
"kind": "recentActivity",
// authors filter will be checked before a rule is run. If anything passes then the Rule is skipped -- it is not failed or triggered.
// if *all* Rules for a Check are skipped due to authors filter then the Check will fail
"authorIs": {
// each property (include/exclude) can contain multiple AuthorCriteria
// if any AuthorCriteria passes its test the Rule is skipped
//
// for an AuthorCriteria to pass all properties present on it must pass
//
// if "include" is present it will always run and exclude will be skipped
// "include:" []
"exclude": [
// for this to pass the Author of the Submission must not have the flair "Supreme Memer" and have the name "user1" or "user2"
{
"flairText": ["Supreme Memer"],
"names": ["user1","user2"]
},
{
// for this to pass the Author of the Submission must not have the flair "Decent Memer"
"flairText": ["Decent Memer"]
}
]
},
"lookAt": "submissions",
"thresholds": [
{
"threshold": ">= 1",
"subreddits": [
"dankmemes",
]
}
],
"window": "7 days"
}
],
"actions": [
{
"kind": "report",
"content": "Author has posted in free karma sub, or in /r/dankmemes and does not have meme flair in this subreddit"
}
]
}
]
}

View File

@@ -0,0 +1,29 @@
{
"checks": [
{
"name": "Flair New User Sub",
"description": "Flair submission as sketchy if user does not have vet flair",
// check will run on a new submission in your subreddit and look at the Author of that submission
"kind": "submission",
"rules": [
{
"name": "newflair",
"kind": "author",
// rule will trigger if Author does not have "vet" flair text
"exclude": [
{
"flairText": ["vet"]
}
]
}
],
"actions": [
{
"kind": "flair",
"text": "New User",
"css": "orange"
}
]
}
]
}

View File

@@ -0,0 +1,29 @@
{
"checks": [
{
"name": "Flair Vetted User Submission",
"description": "Flair submission as Approved if user has vet flair",
// check will run on a new submission in your subreddit and look at the Author of that submission
"kind": "submission",
"rules": [
{
"name": "newflair",
"kind": "author",
// rule will trigger if Author has "vet" flair text
"include": [
{
"flairText": ["vet"]
}
]
}
],
"actions": [
{
"kind": "flair",
"text": "Vetted",
"css": "green"
}
]
}
]
}

View File

@@ -0,0 +1,75 @@
{
"checks": [
{
"name": "non-vetted karma/meme activity",
"description": "Report if Author has SP and has recent karma/meme sub activity and isn't vetted",
// check will run on a new submission in your subreddit and look at the Author of that submission
"kind": "submission",
"rules": [
{
// The Author Rule is best used in conjunction with other Rules --
// instead of having to write an AuthorFilter for every Rule where you want to skip it based on Author criteria
// you can write one Author Rule and make it fail on the required criteria
// so that the check fails and Actions don't run
"name": "nonvet",
"kind": "author",
"exclude": [
{
"flairText": ["vet"]
}
]
},
{
"name": "attr10",
"kind": "attribution",
"criteria": [
{
"threshold": "> 10%",
"window": "90 days"
},
{
"threshold": "> 10%",
"window": 100
}
],
},
{
"name": "freekarma",
"kind": "recentActivity",
"lookAt": "submissions",
"thresholds": [
{
"threshold": ">= 1",
"subreddits": [
"DeFreeKarma",
"FreeKarma4U",
]
}
],
"window": "7 days"
},
{
"name": "memes",
"kind": "recentActivity",
"lookAt": "submissions",
"thresholds": [
{
"threshold": ">= 3",
"subreddits": [
"dankmemes",
]
}
],
"window": "7 days"
}
],
// will NOT run if the Author for this Submission has the flair "vet"
"actions": [
{
"kind": "report",
"content": "Author has posted in free karma or meme subs recently"
}
]
}
]
}

View File

@@ -0,0 +1,13 @@
# History
The **History** rule can check an Author's submission/comment statistics over a time period:
* Submission total or percentage of All Activity
* Comment total or percentage of all Activity
* Comments made as OP (commented in their own Submission) total or percentage of all Comments
Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FHistoryJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
### Examples
* [Low Comment Engagement](/docs/examples/history/lowEngagement.json5) - Check if Author is submitting much more than they comment.
* [OP Comment Engagement](/docs/examples/history/opOnlyEngagement.json5) - Check if Author is mostly engaging only in their own content

View File

@@ -0,0 +1,30 @@
{
"checks": [
{
"name": "Low Comment Engagement",
"description": "Check if Author is submitting much more than they comment",
// check will run on a new submission in your subreddit and look at the Author of that submission
"kind": "submission",
"rules": [
{
"name": "lowComm",
"kind": "history",
"criteria": [
{
// look at last 90 days of Author's activities
"window": "90 days",
// trigger if less than 30% of their activities in this time period are comments
"comment": "< 30%"
},
]
}
],
"actions": [
{
"kind": "report",
"content": "Low engagement: comments were {{rules.lowcomm.commentPercent}} of {{rules.lowcomm.activityTotal}} over {{rules.lowcomm.window}}"
}
]
}
]
}

View File

@@ -0,0 +1,30 @@
{
"checks": [
{
"name": "Engaging Own Content Only",
"description": "Check if Author is mostly engaging in their own content only",
// check will run on a new submission in your subreddit and look at the Author of that submission
"kind": "submission",
"rules": [
{
"name": "opOnly",
"kind": "history",
"criteria": [
{
// look at last 90 days of Author's activities
"window": "90 days",
// trigger if more than 60% of their activities in this time period are comments as OP
"comment": "> 60% OP"
},
]
}
],
"actions": [
{
"kind": "report",
"content": "Selfish OP: {{rules.oponly.opPercent}} of {{rules.oponly.commentTotal}} comments over {{rules.oponly.window}} are as OP"
}
]
}
]
}

View File

@@ -0,0 +1,10 @@
# Recent Activity
The **Recent Activity** rule can check if an Author has made any Submissions/Comments in a list of defined Subreddits.
Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FRecentActivityRuleJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
### Examples
* [Free Karma Subreddits](/docs/examples/recentActivity/freeKarma.json5) - Check if the Author has recently posted in any "free karma" subreddits
* [Submission in Free Karma Subreddits](/docs/examples/recentActivity/freeKarmaOnSubmission.json5) - Check if the Author has posted the Submission this check is running on in any "free karma" subreddits recently

View File

@@ -0,0 +1,40 @@
{
"checks": [
{
"name": "Free Karma Alert",
"description": "Check if author has posted in 'freekarma' subreddits",
// check will run on a new submission in your subreddit and look at the Author of that submission
"kind": "submission",
"rules": [
{
"name": "freekarma",
"kind": "recentActivity",
"useSubmissionAsReference": false,
// when `lookAt` is not present this rule will look for submissions and comments
// lookAt: "submissions"
// lookAt: "comments"
"thresholds": [
{
// for all subreddits, if the number of activities (sub/comment) is equal to or greater than 1 then the rule is triggered
"threshold": ">= 1",
"subreddits": [
"DeFreeKarma",
"FreeKarma4U",
"FreeKarma4You",
"upvote"
]
}
],
// will look at all of the Author's activities in the last 7 days
"window": "7 days"
}
],
"actions": [
{
"kind": "report",
"content": "{{rules.freekarma.totalCount}} activities in karma {{rules.freekarma.subCount}} subs over {{rules.freekarma.window}}: {{rules.freekarma.subSummary}}"
}
]
}
]
}

View File

@@ -0,0 +1,41 @@
{
"checks": [
{
"name": "Free Karma On Submission Alert",
"description": "Check if author has posted this submission in 'freekarma' subreddits",
// check will run on a new submission in your subreddit and look at the Author of that submission
"kind": "submission",
"rules": [
{
"name": "freekarmasub",
"kind": "recentActivity",
// rule will only look at Author's submissions in these subreddits
"lookAt": "submissions",
// rule will only look at Author's submissions in these subreddits that have the same content (link) as the submission this event was made on
// In simpler terms -- rule will only check to see if the same link the author just posted is also posted in these subreddits
"useSubmissionAsReference":true,
"thresholds": [
{
// for all subreddits, if the number of activities (sub/comment) is equal to or greater than 1 then the rule is triggered
"threshold": ">= 1",
"subreddits": [
"DeFreeKarma",
"FreeKarma4U",
"FreeKarma4You",
"upvote"
]
}
],
// look at all of the Author's submissions in the last 7 days
"window": "7 days"
}
],
"actions": [
{
"kind": "report",
"content": "Submission posted {{rules.freekarmasub.totalCount}} times in karma {{rules.freekarmasub.subCount}} subs over {{rules.freekarmasub.window}}: {{rules.freekarmasub.subSummary}}"
}
]
}
]
}

View File

@@ -0,0 +1,49 @@
# Repeat Activity
The **Repeat Activity** rule will check for patterns of repetition in an Author's Submission/Comment history. Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FRepeatActivityJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
## Tuning
The most critical properties for this Rule are **gapAllowance** and **lookAt**.
### `lookAt`
Determines which Activities from a User's history are checked when looking for repeats.
Can be either:
* `all` -- All of a user's submissions and comments are considered
* `submissions` -- Only a user's submissions are considered
Defaults to `all`
### `gapAllowance`
`gapAllowance` determines how many **non-repeat Activities** are "allowed" between "in a row" submissions. `N` number of non-repeat activities will be thrown away during the count which allows checking for patterns with a bit of "fuzziness".
By default `gapAllowance: 0` so all repeats must be truly consecutive.
___
Consider the following example in a user's history:
* crossposts 2 times
* 1 comment
* crossposts 2 times
* 2 comments
* crossposts 4 times
Your goal is to remove a submission if it has been crossposted **5 times.**
With defaults for lookAt and gapAllowance this rule **would not be triggered** because no set of consecutive submissions was repeated 5 times.
With only `lookAt: "submissions"` this rule **would trigger** because all the comments would be ignored resulting in 8 repeats.
With only `gapAllowance: 1` this rule **would not trigger** because the 2 comment non-repeat would break the "in a row" count.
With only `gapAllowance: 2` this rule **would trigger** because the the 1 and 2 comment non-repeats would be thrown out resulting in 8 repeats.
**Note:** `lookAt: "submissions"` should be used with caution because all comments are thrown away. This isn't indicative of real repeat behavior if the user is a heavy commenter. For this reason the default is `all`.
## Examples
* [Crosspost Spamming](/docs/examples/repeatActivity/crosspostSpamming.json5) - Check if an Author is spamming their Submissions across multiple subreddits
* [Burst-posting](/docs/examples/repeatActivity/burstPosting.json5) - Check if Author is crossposting their Submissions in short bursts

View File

@@ -0,0 +1,30 @@
{
"checks": [
{
"name": "Burstpost Spam",
"description": "Check if Author is crossposting in short bursts",
// check will run on a new submission in your subreddit and look at the Author of that submission
"kind": "submission",
"rules": [
{
"name": "burstpost",
"kind": "repeatActivity",
// will only look at Submissions in Author's history that contain the same content (link) as the Submission this check was initiated by
"useSubmissionAsReference": true,
// the number of non-repeat activities (submissions or comments) to ignore between repeat submissions
"gapAllowance": 3,
// if the Author has posted this Submission 6 times, ignoring 3 non-repeat activities between each repeat, then this rule will trigger
"threshold": ">= 6",
// look at all of the Author's submissions in the last 7 days
"window": "7 days"
}
],
"actions": [
{
"kind": "report",
"content": "Author has burst-posted this link {{rules.burstpost.largestRepeat}} times over {{rules.burstpost.window}}"
}
]
}
]
}

View File

@@ -0,0 +1,28 @@
{
"checks": [
{
"name": "Crosspost Spam",
"description": "Check if Author is spamming Submissions across subreddits",
// check will run on a new submission in your subreddit and look at the Author of that submission
"kind": "submission",
"rules": [
{
"name": "xpostspam",
"kind": "repeatActivity",
// will only look at Submissions in Author's history that contain the same content (link) as the Submission this check was initiated by
"useSubmissionAsReference": true,
// if the Author has posted this Submission 5 times consecutively then this rule will trigger
"threshold": ">= 5",
// look at all of the Author's submissions in the last 7 days
"window": "7 days"
}
],
"actions": [
{
"kind": "report",
"content": "Author has posted this link {{rules.xpostspam.largestRepeat}} times over {{rules.xpostspam.window}}"
}
]
}
]
}

View File

@@ -0,0 +1,26 @@
# [Toolbox](https://www.reddit.com/r/toolbox/wiki/docs) [User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes)
Context Bot supports reading and writing [User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) for the [Toolbox](https://www.reddit.com/r/toolbox/wiki/docs) extension.
**You must have Toolbox setup for your subreddit and at least one User Note created before you can use User Notes related features on Context Bot.**
[Click here for the Toolbox Quickstart Guide](https://www.reddit.com/r/toolbox/wiki/docs/quick_start)
## Filter
User Notes are an additional criteria on [AuthorCriteria](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) that can be used alongside other Author properties for both [filtering rules and in the AuthorRule.](/docs/examples/author/)
Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FUserNoteCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the **UserNoteCriteria** object that can be used in AuthorCriteria.
### Examples
* [Do not tag user with Good User note](/docs/examples/userNotes/usernoteFilter.json5)
## Action
A User Note can also be added to the Author of a Submission or Comment with the [UserNoteAction.](https://json-schema.app/view/%23%2Fdefinitions%2FUserNoteActionJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json)
### Examples
* [Add note on user doing self promotion](/docs/examples/userNotes/usernoteSP.json5)

View File

@@ -0,0 +1,45 @@
{
"checks": [
{
"name": "Self Promo Activities",
"description": "Tag SP only if user does not have good contributor user note",
// check will run on a new submission in your subreddit and look at the Author of that submission
"kind": "submission",
"rules": [
{
"name": "attr10all",
"kind": "attribution",
"author": {
"exclude": [
{
// the key of the usernote type to look for https://github.com/toolbox-team/reddit-moderator-toolbox/wiki/Subreddit-Wikis%3A-usernotes#working-with-note-types
// rule will not run if current usernote on Author is of type 'gooduser'
"type": "gooduser"
}
]
},
"criteria": [
{
"threshold": "> 10%",
"window": "90 days"
},
{
"threshold": "> 10%",
"window": 100
}
],
}
],
"actions": [
{
"kind": "usernote",
// the key of usernote type
// https://github.com/toolbox-team/reddit-moderator-toolbox/wiki/Subreddit-Wikis%3A-usernotes#working-with-note-types
"type": "spamwarn",
// content is mustache templated as usual
"content": "Self Promotion: {{rules.attr10all.titlesDelim}} {{rules.attr10sub.largestPercent}}%"
}
]
}
]
}

View File

@@ -0,0 +1,36 @@
{
"checks": [
{
"name": "Self Promo Activities",
"description": "Check if any of Author's aggregated submission origins are >10% of entire history",
// check will run on a new submission in your subreddit and look at the Author of that submission
"kind": "submission",
"rules": [
{
"name": "attr10all",
"kind": "attribution",
"criteria": [
{
"threshold": "> 10%",
"window": "90 days"
},
{
"threshold": "> 10%",
"window": 100
}
],
}
],
"actions": [
{
"kind": "usernote",
// the key of usernote type
// https://github.com/toolbox-team/reddit-moderator-toolbox/wiki/Subreddit-Wikis%3A-usernotes#working-with-note-types
"type": "spamwarn",
// content is mustache templated as usual
"content": "Self Promotion: {{rules.attr10all.titlesDelim}} {{rules.attr10sub.largestPercent}}%"
}
]
}
]
}

19
docs/gettingStarted.md Normal file
View File

@@ -0,0 +1,19 @@
### Creating Your Configuration
#### Get the raw contents of the configuration
* In a new tab open the github page for the configuration you want ([example](/docs/examples/repeatActivity/crosspostSpamming.json5))
* Click the **Raw** button...keep this tab open and move on to the next step
#### Edit your wiki configuration
* Visit the wiki page of the subreddit you want the bot to moderate
* Using default bot settings this will be `https://old.reddit.com/r/YOURSUBERDDIT/wiki/botconfig/contextbot`
* If the page does not exist create it, otherwise click **Edit**
* Copy-paste the configuration into the wiki text box
* In the previous tab you opened (for the configuration) **Select All** (Ctrl+A), then **Copy**
* On the wiki page **Paste** into the text box
* Save the edited wiki page
* Ensure the wiki page visibility is restricted
* On the wiki page click **settings** (**Page settings** in new reddit)
* Check the box for **Only mods may edit and view** and then **save**

View File

@@ -0,0 +1,335 @@
The **Operator** configuration refers to configuration used configure to the actual application/bot. This is different
from the **Subreddit** configuration that is defined in each Subreddit's wiki and determines the rules/actions for
activities the Bot runs on.
# Table of Contents
* [Minimum Required Configuration](#minimum-required-configuration)
* [Defining Configuration](#defining-configuration)
* [Examples](#example-configurations)
* [Minimum Config](#minimum-config)
* [Using Config Overrides](#using-config-overrides)
* [Cache Configuration](#cache-configuration)
# Minimum Required Configuration
The minimum required configuration variables to run the bot on subreddits are:
* clientId
* clientSecret
* refreshToken
* accessToken
However, only **clientId** and **clientSecret** are required to run the **oauth helper** mode for generate the last two
configuration variables.
# Defining Configuration
RCB can be configured using **any or all** of the approaches below. **At each level ALL configuration values are
optional** but some are required depending on the mode of operation for the application.
Any values defined at a **lower-listed** level of configuration will override any values from a higher-listed
configuration.
* **ENV** -- Environment variables loaded from an [`.env`](https://github.com/toddbluhm/env-cmd) file (path may be
specified with `--file` cli argument)
* **ENV** -- Any already existing environment variables (exported on command line/terminal profile/etc.)
* **FILE** -- Values specified in a JSON configuration file using the structure shown below (TODO example json file)
* **ARG** -- Values specified as CLI arguments to the program (see [Usage](/README.md#usage)
or `node src/index.js run help` for details)
In the below configuration, if the variable is available at a level of configuration other than **FILE** it will be
noted with the same symbol as above. The value shown is the default.
**NOTE:** To load a JSON configuration (for **FILE**) use the `-c` cli argument EX: `node src/index.js -c /path/to/JSON/config.json`
```js
const config = {
operator: {
// Username of the reddit user operating this application, used for displaying OP level info/actions in UI
//
// ENV => OPERATOR
// ARG => --operator <name>
name: undefined,
// An optional name to display who is operating this application in the UI
//
// ENV => OPERATOR_DISPLAY
// ARG => --operator <name>
display: undefined,
},
// Values required to interact with Reddit's API
credentials: {
// Client ID for your Reddit application
//
// ENV => CLIENT_ID
// ARG => --clientId <id>
clientId: undefined,
// Client Secret for your Reddit application
//
// ENV => CLIENT_SECRET
// ARG => --clientSecret <secret>
clientSecret: undefined,
// Redirect URI for your Reddit application
//
// ENV => REDIRECT_URI
// ARG => --redirectUri <uri>
redirectUri: undefined,
// Access token retrieved from authenticating an account with your Reddit Application
//
// ENV => ACCESS_TOKEN
// ARG => --accessToken <token>
accessToken: undefined,
// Refresh token retrieved from authenticating an account with your Reddit Application
//
// ENV => REFRESH_TOKEN
// ARG => --refreshToken <token>
refreshToken: undefined
},
logging: {
// Minimum level to log at.
// Must be one of: error, warn, info, verbose, debug
//
// ENV => LOG_LEVEL
// ARG => --logLevel <level>
level: 'verbose',
// Absolute path to directory to store rotated logs in.
//
// Leaving undefined disables rotating logs
// Use ENV => true or ARG => --logDir to log to the current directory under /logs folder
//
// ENV => LOG_DIR
// ARG => --logDir [dir]
path: undefined,
},
snoowrap: {
// Proxy endpoint to make Snoowrap requests to
//
// ENV => PROXY
// ARG => --proxy <proxyEndpoint>
proxy: undefined,
// Set Snoowrap to log debug statements. If undefined will debug based on current log level
//
// ENV => SNOO_DEBUG
// ARG => --snooDebug
debug: false,
},
subreddits: {
// Names of subreddits for bot to run on
//
// If undefined bot will run on all subreddits it is a moderated of
//
// ENV => SUBREDDITS (comma-separated)
// ARG => --subreddits <list...>
names: undefined,
// If true set all subreddits in dry run mode, overriding configurations
//
// ENV => DRYRUN
// ARG => --dryRun
dryRun: false,
// The default relative url to contextbot wiki page EX https://reddit.com/r/subreddit/wiki/<path>
//
// ENV => WIKI_CONFIG
// ARG => --wikiConfig <path>
wikiConfig: 'botconfig/contextbot',
// Interval, in seconds, to perform application heartbeat
//
// ENV => HEARTBEAT
// ARG => --heartbeat <sec>
heartbeatInterval: 300,
},
polling: {
// If set to true all subreddits polling unmoderated/modqueue with default polling settings will share a request to "r/mod"
// otherwise each subreddit will poll its own mod view
//
// ENV => SHARE_MOD
// ARG => --shareMod
sharedMod: false,
// Default interval, in seconds, to poll activity sources at
interval: 30,
},
web: {
// Whether the web server interface should be started
// In most cases this does not need to be specified as the application will automatically detect if it is possible to start it --
// use this to specify 'cli' if you encounter errors with port/address or are paranoid
//
// ENV => WEB
// ARG => 'node src/index.js run [interface]' -- interface can be 'web' or 'cli'
enabled: true,
// Set the port for the web interface
//
// ENV => PORT
// ARG => --port <number>
port: 8085,
session: {
// The cache provider for sessions
// can be 'memory', 'redis', or a custom config
provider: 'memory',
// The secret value used to encrypt session data
// If provider is persistent (redis) specifying a value here will ensure sessions are valid between application restarts
//
// If undefined a random string is generated
secret: undefined,
},
// The default log level to filter to in the web interface
// If not specified will be same as application log level
logLevel: undefined,
// Maximum number of log statements to keep in memory for each subreddit
maxLogs: 200,
},
caching: {
// The default maximum age of cached data for an Author's history
//
// ENV => AUTHOR_TTL
// ARG => --authorTTL <sec>
authorTTL: 60,
// The default maximum age of cached usernotes for a subreddit
userNotesTTL: 300,
// The default maximum age of cached content, retrieved from an external URL or subreddit wiki, used for comments/ban/footer
wikiTTL: 300,
// The cache provider used for caching reddit API responses and some internal results
// can be 'memory', 'redis', or a custom config
provider: 'memory'
},
api: {
// The number of API requests remaining at which "slow mode" should be enabled
//
// ENV => SOFT_LIMT
// ARG => --softLimit <limit>
softLimit: 250,
// The number of API requests remaining at at which all subreddit event polling should be paused
//
// ENV => HARD_LIMIT
// ARG => --hardLimit <limit>
hardLimit: 50,
}
}
```
# Example Configurations
## Minimum Config
Below are examples of the minimum required config to run the application using all three config approaches independently.
Using **FILE**
<details>
```json
{
"credentials": {
"clientId": "f4b4df1c7b2",
"clientSecret": "34v5q1c56ub",
"refreshToken": "34_f1w1v4",
"accessToken": "p75_1c467b2"
}
}
```
</details>
Using **ENV** (`.env`)
<details>
```
CLIENT_ID=f4b4df1c7b2
CLIENT_SECRET=34v5q1c56ub
REFRESH_TOKEN=34_f1w1v4
ACCESS_TOKEN=p75_1c467b2
```
</details>
Using **ARG**
<details>
```
node src/index.js run --clientId=f4b4df1c7b2 --clientSecret=34v5q1c56ub --refreshToken=34_f1w1v4 --accessToken=p75_1c467b2
```
</details>
## Using Config Overrides
Using all three configs together:
**FILE**
<details>
```json
{
"credentials": {
"clientId": "f4b4df1c7b2",
"refreshToken": "34_f1w1v4",
"accessToken": "p75_1c467b2"
}
}
```
</details>
**ENV** (`.env`)
<details>
```
CLIENT_SECRET=34v5q1c56ub
SUBREDDITS=sub1,sub2,sub3
PORT=9008
LOG_LEVEL=DEBUG
```
</details>
**ARG**
<details>
```
node src/index.js run --subreddits=sub1
```
</details>
Produces these variables at runtime for the application:
```
clientId: f4b4df1c7b2
clientSecret: 34v5q1c56ub
refreshToken: 34_f1w1v4
accessToken: accessToken
subreddits: sub1
port: 9008
log level: debug
```
# Cache Configuration
RCB implements two caching backend **providers**. By default all providers use `memory`:
* `memory` -- in-memory (non-persistent) backend
* `redis` -- [Redis](https://redis.io/) backend
Each `provider` object in configuration can be specified as:
* one of the above **strings** to use the **defaults settings** or
* an **object** with keys to override default settings
A caching object in the json configuration:
```json5
{
"provider": {
"store": "memory", // one of "memory" or "redis"
"ttl": 60, // the default max age of a key in seconds
"max": 500, // the maximum number of keys in the cache (for "memory" only)
// the below properties only apply to 'redis' provider
"host": 'localhost',
"port": 6379,
"auth_pass": null,
"db": 0,
}
}
```

3
heroku.yml Normal file
View File

@@ -0,0 +1,3 @@
build:
docker:
worker: Dockerfile

2893
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -8,12 +8,15 @@
"build": "tsc",
"start": "node server.js",
"guard": "ts-auto-guard src/JsonConfig.ts",
"schema": "npm run -s schema-app & npm run -s schema-ruleset & npm run -s schema-rule & npm run -s schema-action",
"schema-app": "typescript-json-schema tsconfig.json JSONConfig --out src/Schema/App.json --required --tsNodeRegister --refs --propOrder",
"schema-ruleset": "typescript-json-schema tsconfig.json RuleSetJSONConfig --out src/Schema/RuleSet.json --required --tsNodeRegister --refs --propOrder",
"schema-rule": "typescript-json-schema tsconfig.json RuleJSONConfig --out src/Schema/Rule.json --required --tsNodeRegister --refs --propOrder",
"schema-action": "typescript-json-schema tsconfig.json ActionJSONConfig --out src/Schema/Action.json --required --tsNodeRegister --refs --propOrder",
"schemaNotWorking": "./node_modules/.bin/ts-json-schema-generator -f tsconfig.json -p src/JsonConfig.ts -t JSONConfig --out src/Schema/vegaSchema.json"
"schema": "npm run -s schema-app & npm run -s schema-ruleset & npm run -s schema-rule & npm run -s schema-action & npm run -s schema-config",
"schema-app": "typescript-json-schema tsconfig.json JSONConfig --out src/Schema/App.json --required --tsNodeRegister --refs",
"schema-ruleset": "typescript-json-schema tsconfig.json RuleSetJson --out src/Schema/RuleSet.json --required --tsNodeRegister --refs",
"schema-rule": "typescript-json-schema tsconfig.json RuleJson --out src/Schema/Rule.json --required --tsNodeRegister --refs",
"schema-action": "typescript-json-schema tsconfig.json ActionJson --out src/Schema/Action.json --required --tsNodeRegister --refs",
"schema-config": "typescript-json-schema tsconfig.json OperatorJsonConfig --out src/Schema/OperatorConfig.json --required --tsNodeRegister --refs",
"schemaNotWorking": "./node_modules/.bin/ts-json-schema-generator -f tsconfig.json -p src/JsonConfig.ts -t JSONConfig --out src/Schema/vegaSchema.json",
"circular": "madge --circular --extensions ts src/index.ts",
"circular-graph": "madge --image graph.svg --circular --extensions ts src/index.ts"
},
"engines": {
"node": ">=15"
@@ -22,24 +25,61 @@
"author": "",
"license": "ISC",
"dependencies": {
"ajv": "^6.12.6",
"@awaitjs/express": "^0.8.0",
"ajv": "^7.2.4",
"async": "^3.2.0",
"body-parser": "^1.19.0",
"cache-manager": "^3.4.4",
"cache-manager-redis-store": "^2.0.0",
"commander": "^8.0.0",
"dayjs": "^1.10.5",
"deepmerge": "^4.2.2",
"ejs": "^3.1.6",
"env-cmd": "^10.1.0",
"es6-error": "^4.1.1",
"express": "^4.17.1",
"express-session": "^1.17.2",
"express-session-cache-manager": "^1.0.2",
"express-socket.io-session": "^1.3.5",
"fast-deep-equal": "^3.1.3",
"minimist": "^1.2.5",
"fuse.js": "^6.4.6",
"he": "^1.2.0",
"js-yaml": "^4.1.0",
"json5": "^2.2.0",
"mustache": "^4.2.0",
"node-fetch": "^2.6.1",
"object-hash": "^2.2.0",
"p-event": "^4.2.0",
"safe-stable-stringify": "^1.1.1",
"snoostorm": "^1.5.2",
"snoowrap": "^1.23.0",
"winston": "FoxxMD/winston#9639da027cd4f3b46b055b0193f240639ef53409",
"winston-daily-rotate-file": "^4.5.5"
"socket.io": "^4.1.3",
"tcp-port-used": "^1.0.2",
"typescript": "^4.3.4",
"webhook-discord": "^3.7.7",
"winston": "FoxxMD/winston#fbab8de969ecee578981c77846156c7f43b5f01e",
"winston-daily-rotate-file": "^4.5.5",
"zlib": "^1.0.5"
},
"devDependencies": {
"@tsconfig/node14": "^1.0.0",
"@types/async": "^3.2.7",
"@types/cache-manager": "^3.4.2",
"@types/cache-manager-redis-store": "^2.0.0",
"@types/express": "^4.17.13",
"@types/express-session": "^1.17.4",
"@types/express-socket.io-session": "^1.3.6",
"@types/he": "^1.1.1",
"@types/js-yaml": "^4.0.1",
"@types/lru-cache": "^5.1.1",
"@types/memory-cache": "^0.2.1",
"@types/minimist": "^1.2.1",
"@types/mustache": "^4.1.1",
"@types/node": "^15.6.1",
"@types/node-fetch": "^2.5.10",
"@types/object-hash": "^2.1.0",
"@types/pako": "^1.0.1",
"@types/tcp-port-used": "^1.0.0",
"ts-auto-guard": "*",
"ts-json-schema-generator": "^0.93.0",
"typescript-json-schema": "^0.50.1"

View File

@@ -1,23 +1,33 @@
import {CommentAction, CommentActionJSONConfig} from "./CommentAction";
import {CommentAction, CommentActionJson} from "./CommentAction";
import LockAction from "./LockAction";
import {RemoveAction} from "./RemoveAction";
import {ReportAction, ReportActionJSONConfig} from "./ReportAction";
import {FlairAction, FlairActionJSONConfig} from "./SubmissionAction/FlairAction";
import Action, {ActionJSONConfig} from "./index";
import {ReportAction, ReportActionJson} from "./ReportAction";
import {FlairAction, FlairActionJson} from "./SubmissionAction/FlairAction";
import Action, {ActionJson} from "./index";
import {Logger} from "winston";
import {UserNoteAction, UserNoteActionJson} from "./UserNoteAction";
import ApproveAction, {ApproveActionConfig} from "./ApproveAction";
import BanAction, {BanActionJson} from "./BanAction";
export function actionFactory
(config: ActionJSONConfig): Action {
(config: ActionJson, logger: Logger, subredditName: string): Action {
switch (config.kind) {
case 'comment':
return new CommentAction(config as CommentActionJSONConfig);
return new CommentAction({...config as CommentActionJson, logger, subredditName});
case 'lock':
return new LockAction();
return new LockAction({...config, logger, subredditName});
case 'remove':
return new RemoveAction();
return new RemoveAction({...config, logger, subredditName});
case 'report':
return new ReportAction(config as ReportActionJSONConfig);
return new ReportAction({...config as ReportActionJson, logger, subredditName});
case 'flair':
return new FlairAction(config as FlairActionJSONConfig);
return new FlairAction({...config as FlairActionJson, logger, subredditName});
case 'approve':
return new ApproveAction({...config as ApproveActionConfig, logger, subredditName});
case 'usernote':
return new UserNoteAction({...config as UserNoteActionJson, logger, subredditName});
case 'ban':
return new BanAction({...config as BanActionJson, logger, subredditName});
default:
throw new Error('rule "kind" was not recognized.');
}

View File

@@ -0,0 +1,36 @@
import {ActionJson, ActionConfig} from "./index";
import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../Rule";
export class ApproveAction extends Action {
getKind() {
return 'Approve';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
//snoowrap typing issue, thinks comments can't be locked
// @ts-ignore
if (item.approved) {
this.logger.warn('Item is already approved');
}
if (!dryRun) {
// @ts-ignore
await item.approve();
}
}
}
export interface ApproveActionConfig extends ActionConfig {
}
/**
* Ban the Author of the Activity this Check is run on
* */
export interface ApproveActionJson extends ApproveActionConfig, ActionJson {
kind: 'approve'
}
export default ApproveAction;

110
src/Action/BanAction.ts Normal file
View File

@@ -0,0 +1,110 @@
import {ActionJson, ActionConfig, ActionOptions} from "./index";
import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../Rule";
import {renderContent} from "../Utils/SnoowrapUtils";
import {Footer} from "../Common/interfaces";
export class BanAction extends Action {
message?: string;
reason?: string;
duration?: number;
note?: string;
footer?: false | string;
constructor(options: BanActionOptions) {
super(options);
const {
message,
reason,
duration,
note,
footer,
} = options;
this.footer = footer;
this.message = message;
this.reason = reason;
this.duration = duration;
this.note = note;
}
getKind() {
return 'Ban';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
const content = this.message === undefined ? undefined : await this.resources.getContent(this.message, item.subreddit);
const renderedBody = content === undefined ? undefined : await renderContent(content, item, ruleResults, this.resources.userNotes);
const renderedContent = renderedBody === undefined ? undefined : `${renderedBody}${await this.resources.generateFooter(item, this.footer)}`;
let banPieces = [];
banPieces.push(`Message: ${renderedContent === undefined ? 'None' : `${renderedContent.length > 100 ? `\r\n${renderedContent}` : renderedContent}`}`);
banPieces.push(`Reason: ${this.reason || 'None'}`);
banPieces.push(`Note: ${this.note || 'None'}`);
const durText = this.duration === undefined ? 'permanently' : `for ${this.duration} days`;
this.logger.info(`Banning ${item.author.name} ${durText}${this.reason !== undefined ? ` (${this.reason})` : ''}`);
this.logger.verbose(`\r\n${banPieces.join('\r\n')}`);
if (!dryRun) {
// @ts-ignore
const fetchedSub = await item.subreddit.fetch();
const fetchedName = await item.author.name;
await fetchedSub.banUser({
name: fetchedName,
banMessage: renderedContent === undefined ? undefined : renderedContent,
banReason: this.reason,
banNote: this.note,
duration: this.duration
});
}
}
}
export interface BanActionConfig extends ActionConfig, Footer {
/**
* The message that is sent in the ban notification. `message` is interpreted as reddit-flavored Markdown.
*
* If value starts with `wiki:` then the proceeding value will be used to get a wiki page
*
* EX `wiki:botconfig/mybot` tries to get `https://reddit.com/mySubredditExample/wiki/botconfig/mybot`
*
* EX `this is plain text` => "this is plain text"
*
* EX `this is **bold** markdown text` => "this is **bold** markdown text"
*
* @examples ["This is the content of a comment/report/usernote", "this is **bold** markdown text", "wiki:botconfig/acomment" ]
* */
message?: string
/**
* Reason for ban.
* @maxLength 100
* @examples ["repeat spam"]
* */
reason?: string
/**
* Number of days to ban the Author. If not specified Author will be banned permanently.
* @minimum 1
* @maximum 999
* @examples [90]
* */
duration?: number
/**
* A mod note for this ban
* @maxLength 100
* @examples ["Sock puppet for u/AnotherUser"]
* */
note?: string
}
export interface BanActionOptions extends BanActionConfig, ActionOptions {
}
/**
* Ban the Author of the Activity this Check is run on
* */
export interface BanActionJson extends BanActionConfig, ActionJson {
kind: 'ban',
}
export default BanAction;

View File

@@ -1,21 +1,16 @@
import Action, {ActionJSONConfig, ActionConfig, ActionOptions} from "./index";
import Snoowrap, {Comment} from "snoowrap";
import Action, {ActionJson, ActionOptions} from "./index";
import {Comment} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import dayjs, {Dayjs} from "dayjs";
import {renderContent} from "../Utils/SnoowrapUtils";
import {RichContent} from "../Common/interfaces";
export const WIKI_DESCRIM = 'wiki:';
import {Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
import {RuleResult} from "../Rule";
export class CommentAction extends Action {
content: string;
hasWiki: boolean;
wiki?: string;
wikiFetched?: Dayjs;
lock: boolean = false;
sticky: boolean = false;
distinguish: boolean = false;
name?: string = 'Comment';
footer?: false | string;
constructor(options: CommentActionOptions) {
super(options);
@@ -24,42 +19,53 @@ export class CommentAction extends Action {
lock = false,
sticky = false,
distinguish = false,
footer,
} = options;
this.hasWiki = content.trim().substring(0, WIKI_DESCRIM.length) === WIKI_DESCRIM;
this.footer = footer;
this.content = content;
if (this.hasWiki) {
this.wiki = this.content.trim().substring(WIKI_DESCRIM.length);
}
this.lock = lock;
this.sticky = sticky;
this.distinguish = distinguish;
}
async handle(item: Comment | Submission, client: Snoowrap): Promise<void> {
if (this.hasWiki && (this.wikiFetched === undefined || Math.abs(dayjs().diff(this.wikiFetched, 'minute')) > 5)) {
try {
const wiki = item.subreddit.getWikiPage(this.wiki as string);
this.content = await wiki.content_md;
this.wikiFetched = dayjs();
} catch (err) {
this.logger.error(err);
throw new Error(`Could not read wiki page. Please ensure the page '${this.wiki}' exists and is readable`);
getKind() {
return 'Comment';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
const content = await this.resources.getContent(this.content, item.subreddit);
const body = await renderContent(content, item, ruleResults, this.resources.userNotes);
const footer = await this.resources.generateFooter(item, this.footer);
const renderedContent = `${body}${footer}`;
this.logger.verbose(`Contents:\r\n${renderedContent.length > 100 ? `\r\n${renderedContent}` : renderedContent}`);
if(item.archived) {
this.logger.warn('Cannot comment because Item is archived');
return;
}
let reply: Comment;
if(!dryRun) {
// @ts-ignore
reply = await item.reply(renderedContent);
}
if (this.lock) {
if (!dryRun) {
// snoopwrap typing issue, thinks comments can't be locked
// @ts-ignore
await item.lock();
}
}
// @ts-ignore
const reply: Comment = await item.reply(renderContent(this.content, item));
if (this.lock && item instanceof Submission) {
// @ts-ignore
await item.lock();
}
if (this.distinguish) {
if (this.distinguish && !dryRun) {
// @ts-ignore
await reply.distinguish({sticky: this.sticky});
}
}
}
export interface CommentActionConfig extends RichContent {
export interface CommentActionConfig extends RequiredRichContent, Footer {
/**
* Lock the comment after creation?
* */
@@ -74,12 +80,12 @@ export interface CommentActionConfig extends RichContent {
distinguish?: boolean,
}
export interface CommentActionOptions extends CommentActionConfig,ActionOptions {
export interface CommentActionOptions extends CommentActionConfig, ActionOptions {
}
/**
* Reply to the Activity. For a submission the reply will be a top-level comment.
* */
export interface CommentActionJSONConfig extends CommentActionConfig, ActionJSONConfig {
export interface CommentActionJson extends CommentActionConfig, ActionJson {
kind: 'comment'
}

View File

@@ -1,11 +1,22 @@
import {ActionJSONConfig, ActionConfig} from "./index";
import {ActionJson, ActionConfig} from "./index";
import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../Rule";
export class LockAction extends Action {
name?: string = 'Lock';
async handle(item: Comment|Submission, client: Snoowrap): Promise<void> {
if (item instanceof Submission) {
getKind() {
return 'Lock';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
//snoowrap typing issue, thinks comments can't be locked
// @ts-ignore
if (item.locked) {
this.logger.warn('Item is already locked');
}
if (!dryRun) {
//snoowrap typing issue, thinks comments can't be locked
// @ts-ignore
await item.lock();
}
@@ -19,8 +30,8 @@ export interface LockActionConfig extends ActionConfig {
/**
* Lock the Activity
* */
export interface LockActionJSONConfig extends LockActionConfig, ActionJSONConfig {
export interface LockActionJson extends LockActionConfig, ActionJson {
kind: 'lock'
}
export default LockAction;

View File

@@ -1,12 +1,26 @@
import {ActionJSONConfig, ActionConfig} from "./index";
import {ActionJson, ActionConfig} from "./index";
import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../Rule";
import {activityIsRemoved} from "../Utils/SnoowrapUtils";
export class RemoveAction extends Action {
name?: string = 'Remove';
async handle(item: Comment|Submission, client: Snoowrap): Promise<void> {
getKind() {
return 'Remove';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
// issue with snoowrap typings, doesn't think prop exists on Submission
// @ts-ignore
await item.remove();
if (activityIsRemoved(item)) {
this.logger.warn('Item is already removed');
return;
}
if (!dryRun) {
// @ts-ignore
await item.remove();
}
}
}
@@ -17,6 +31,6 @@ export interface RemoveActionConfig extends ActionConfig {
/**
* Remove the Activity
* */
export interface RemoveActionJSONConfig extends RemoveActionConfig, ActionJSONConfig {
export interface RemoveActionJson extends RemoveActionConfig, ActionJson {
kind: 'remove'
}

View File

@@ -1,27 +1,42 @@
import {ActionJSONConfig, ActionConfig, ActionOptions} from "./index";
import {ActionJson, ActionConfig, ActionOptions} from "./index";
import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {truncateStringToLength} from "../util";
import {renderContent} from "../Utils/SnoowrapUtils";
import {RuleResult} from "../Rule";
import {RichContent} from "../Common/interfaces";
// https://www.reddit.com/dev/api/oauth#POST_api_report
// denotes 100 characters maximum
const reportTrunc = truncateStringToLength(100);
// actually only applies to VISIBLE text on OLD reddit... on old reddit rest of text is visible on hover. on new reddit the whole thing displays (up to at least 400 characters)
export class ReportAction extends Action {
content: string;
name?: string = 'Report';
constructor(options: ReportActionOptions) {
super(options);
this.content = options.content;
this.content = options.content || '';
}
async handle(item: Comment | Submission, client: Snoowrap): Promise<void> {
// @ts-ignore
await item.report({reason: this.content});
getKind() {
return 'Report';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
const content = await this.resources.getContent(this.content, item.subreddit);
const renderedContent = await renderContent(content, item, ruleResults, this.resources.userNotes);
this.logger.verbose(`Contents:\r\n${renderedContent}`);
const truncatedContent = reportTrunc(renderedContent);
if(!dryRun) {
// @ts-ignore
await item.report({reason: truncatedContent});
}
}
}
export interface ReportActionConfig {
/**
* The text of the report
* */
content: string,
export interface ReportActionConfig extends RichContent {
}
export interface ReportActionOptions extends ReportActionConfig, ActionOptions {
@@ -30,6 +45,6 @@ export interface ReportActionOptions extends ReportActionConfig, ActionOptions {
/**
* Report the Activity
* */
export interface ReportActionJSONConfig extends ReportActionConfig, ActionJSONConfig {
export interface ReportActionJson extends ReportActionConfig, ActionJson {
kind: 'report'
}

View File

@@ -1,11 +1,11 @@
import {SubmissionActionConfig} from "./index";
import Action, {ActionJSONConfig} from "../index";
import Action, {ActionJson, ActionOptions} from "../index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../../Rule";
export class FlairAction extends Action {
text: string;
css: string;
name?: string = 'Flair';
constructor(options: FlairActionOptions) {
super(options);
@@ -16,10 +16,18 @@ export class FlairAction extends Action {
this.css = options.css || '';
}
async handle(item: Comment | Submission, client: Snoowrap): Promise<void> {
getKind() {
return 'Flair';
}
async process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
if (item instanceof Submission) {
// @ts-ignore
await item.assignFlair({text: this.text, cssClass: this.css})
if(!this.dryRun) {
// @ts-ignore
await item.assignFlair({text: this.text, cssClass: this.css})
}
} else {
this.logger.warn('Cannot flair Comment');
}
}
}
@@ -28,7 +36,7 @@ export class FlairAction extends Action {
* @minProperties 1
* @additionalProperties false
* */
export interface FlairActionOptions extends SubmissionActionConfig {
export interface FlairActionConfig extends SubmissionActionConfig {
/**
* The text of the flair to apply
* */
@@ -39,9 +47,13 @@ export interface FlairActionOptions extends SubmissionActionConfig {
css?: string,
}
export interface FlairActionOptions extends FlairActionConfig,ActionOptions {
}
/**
* Flair the Submission
* */
export interface FlairActionJSONConfig extends FlairActionOptions, ActionJSONConfig {
export interface FlairActionJson extends FlairActionConfig, ActionJson {
kind: 'flair'
}

View File

@@ -0,0 +1,66 @@
import {ActionJson, ActionConfig, ActionOptions} from "./index";
import Action from "./index";
import {Comment} from "snoowrap";
import {renderContent} from "../Utils/SnoowrapUtils";
import {RuleResult} from "../Rule";
import {UserNote, UserNoteJson} from "../Subreddit/UserNotes";
import Submission from "snoowrap/dist/objects/Submission";
export class UserNoteAction extends Action {
content: string;
type: string;
allowDuplicate: boolean;
constructor(options: UserNoteActionOptions) {
super(options);
const {type, content = '', allowDuplicate = false} = options;
this.type = type;
this.content = content;
this.allowDuplicate = allowDuplicate;
}
getKind() {
return 'User Note';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
const content = await this.resources.getContent(this.content, item.subreddit);
const renderedContent = await renderContent(content, item, ruleResults, this.resources.userNotes);
this.logger.verbose(`Note:\r\n(${this.type}) ${renderedContent}`);
if (!this.allowDuplicate) {
const notes = await this.resources.userNotes.getUserNotes(item.author);
const existingNote = notes.find((x) => x.link.includes(item.id));
if (existingNote) {
this.logger.info(`Will not add note because one already exists for this Activity (${existingNote.time.local().format()}) and allowDuplicate=false`);
return;
}
}
if (!dryRun) {
await this.resources.userNotes.addUserNote(item, this.type, renderedContent);
} else if (!await this.resources.userNotes.warningExists(this.type)) {
this.logger.warn(`UserNote type '${this.type}' does not exist. If you meant to use this please add it through Toolbox first.`);
}
}
}
export interface UserNoteActionConfig extends ActionConfig,UserNoteJson {
/**
* Add Note even if a Note already exists for this Activity
* @examples [false]
* @default false
* */
allowDuplicate?: boolean,
}
export interface UserNoteActionOptions extends UserNoteActionConfig, ActionOptions {
}
/**
* Add a Toolbox User Note to the Author of this Activity
* */
export interface UserNoteActionJson extends UserNoteActionConfig, ActionJson {
kind: 'usernote'
}

View File

@@ -1,50 +1,140 @@
import Snoowrap, {Comment, Submission} from "snoowrap";
import {Comment, Submission} from "snoowrap";
import {Logger} from "winston";
import {createLabelledLogger, loggerMetaShuffle} from "../util";
import {RuleResult} from "../Rule";
import ResourceManager, {SubredditResources} from "../Subreddit/SubredditResources";
import {ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
import Author, {AuthorOptions} from "../Author/Author";
import {isItem} from "../Utils/SnoowrapUtils";
export abstract class Action {
name?: string;
logger: Logger;
resources: SubredditResources;
authorIs: AuthorOptions;
itemIs: TypedActivityStates;
dryRun: boolean;
constructor(options: ActionOptions = {}) {
constructor(options: ActionOptions) {
const {
name,
loggerPrefix = '',
name = this.getKind(),
logger,
subredditName,
dryRun = false,
authorIs: {
include = [],
exclude = [],
} = {},
itemIs = [],
} = options;
if (name !== undefined) {
this.name = name;
}
if (logger === undefined) {
const prefix = `${loggerPrefix}|${this.name}`;
this.logger = createLabelledLogger(prefix, prefix);
} else {
this.logger = logger.child(loggerMetaShuffle(logger, name || 'Action', undefined, {truncateLength: 100}));
this.name = name;
this.dryRun = dryRun;
this.resources = ResourceManager.get(subredditName) as SubredditResources;
this.logger = logger.child({labels: [`Action ${this.getActionUniqueName()}`]});
this.authorIs = {
exclude: exclude.map(x => new Author(x)),
include: include.map(x => new Author(x)),
}
this.itemIs = itemIs;
}
abstract handle(item: Comment | Submission, client: Snoowrap): Promise<void>;
abstract getKind(): string;
getActionUniqueName() {
return this.name === this.getKind() ? this.getKind() : `${this.getKind()} - ${this.name}`;
}
async handle(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
let actionRun = false;
const [itemPass, crit] = isItem(item, this.itemIs, this.logger);
if (!itemPass) {
this.logger.verbose(`Activity did not pass 'itemIs' test, Action not run`);
return;
}
const authorRun = async () => {
if (this.authorIs.include !== undefined && this.authorIs.include.length > 0) {
for (const auth of this.authorIs.include) {
if (await this.resources.testAuthorCriteria(item, auth)) {
await this.process(item, ruleResults, runtimeDryrun);
return true;
}
}
this.logger.verbose('Inclusive author criteria not matched, Action not run');
return false;
}
if (!actionRun && this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
for (const auth of this.authorIs.exclude) {
if (await this.resources.testAuthorCriteria(item, auth, false)) {
await this.process(item, ruleResults, runtimeDryrun);
return true;
}
}
this.logger.verbose('Exclusive author criteria not matched, Action not run');
return false;
}
return null;
}
const authorRunResults = await authorRun();
if (null === authorRunResults) {
await this.process(item, ruleResults, runtimeDryrun);
} else if (!authorRunResults) {
return;
}
this.logger.verbose(`${dryRun ? 'DRYRUN - ' : ''}Done`);
}
abstract process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryun?: boolean): Promise<void>;
}
export interface ActionOptions {
name?: string;
logger?: Logger,
loggerPrefix?: string,
export interface ActionOptions extends ActionConfig {
logger: Logger;
subredditName: string;
}
export interface ActionConfig {
export interface ActionConfig extends ChecksActivityState {
/**
* A friendly name for this Action
* An optional, but highly recommended, friendly name for this Action. If not present will default to `kind`.
*
* Can only contain letters, numbers, underscore, spaces, and dashes
*
* @pattern ^[a-zA-Z]([\w -]*[\w])?$
* @examples ["myDescriptiveAction"]
* */
name?: string;
/**
* If `true` the Action will not make the API request to Reddit to perform its action.
*
* @default false
* @examples [false, true]
* */
dryRun?: boolean;
/**
* If present then these Author criteria are checked before running the Action. If criteria fails then the Action is not run.
* */
authorIs?: AuthorOptions
/**
* A list of criteria to test the state of the `Activity` against before running the Action.
*
* If any set of criteria passes the Action will be run.
*
* */
itemIs?: TypedActivityStates
}
/** @see {isActionConfig} ts-auto-guard:type-guard */
export interface ActionJSONConfig extends ActionConfig {
export interface ActionJson extends ActionConfig {
/**
* The type of action that will be performed
*/
kind: 'comment' | 'lock' | 'remove' | 'report' | 'flair'
kind: 'comment' | 'lock' | 'remove' | 'report' | 'approve' | 'ban' | 'flair' | 'usernote'
}
export const isActionJson = (obj: object): obj is ActionJson => {
return (obj as ActionJson).kind !== undefined;
}
export default Action;

462
src/App.ts Normal file
View File

@@ -0,0 +1,462 @@
import Snoowrap, {Subreddit} from "snoowrap";
import {Manager} from "./Subreddit/Manager";
import winston, {Logger} from "winston";
import {
argParseInt,
createRetryHandler, formatNumber,
labelledFormat, logLevels,
parseBool, parseDuration,
parseFromJsonOrYamlToObject,
parseSubredditName,
sleep
} from "./util";
import pEvent from "p-event";
import EventEmitter from "events";
import CacheManager from './Subreddit/SubredditResources';
import dayjs, {Dayjs} from "dayjs";
import LoggedError from "./Utils/LoggedError";
import {ProxiedSnoowrap, RequestTrackingSnoowrap} from "./Utils/SnoowrapClients";
import {ModQueueStream, UnmoderatedStream} from "./Subreddit/Streams";
import {getLogger} from "./Utils/loggerFactory";
import {DurationString, OperatorConfig, PAUSED, RUNNING, STOPPED, SYSTEM, USER} from "./Common/interfaces";
import { Duration } from "dayjs/plugin/duration";
const {transports} = winston;
const snooLogWrapper = (logger: Logger) => {
return {
warn: (...args: any[]) => logger.warn(args.slice(0, 2).join(' '), [args.slice(2)]),
debug: (...args: any[]) => logger.debug(args.slice(0, 2).join(' '), [args.slice(2)]),
info: (...args: any[]) => logger.info(args.slice(0, 2).join(' '), [args.slice(2)]),
trace: (...args: any[]) => logger.debug(args.slice(0, 2).join(' '), [args.slice(2)]),
}
}
export class App {
client: Snoowrap;
subreddits: string[];
subManagers: Manager[] = [];
logger: Logger;
wikiLocation: string;
dryRun?: true | undefined;
heartbeatInterval: number;
nextHeartbeat?: Dayjs;
heartBeating: boolean = false;
//apiLimitWarning: number;
softLimit: number | string = 250;
hardLimit: number | string = 50;
nannyMode?: 'soft' | 'hard';
nextExpiration!: Dayjs;
botName?: string;
startedAt: Dayjs = dayjs();
sharedModqueue: boolean = false;
apiSample: number[] = [];
interval: any;
apiRollingAvg: number = 0;
apiEstDepletion?: Duration;
depletedInSecs: number = 0;
constructor(config: OperatorConfig) {
const {
subreddits: {
names = [],
wikiConfig,
dryRun,
heartbeatInterval,
},
credentials: {
clientId,
clientSecret,
refreshToken,
accessToken,
},
snoowrap: {
proxy,
debug,
},
polling: {
sharedMod,
},
caching: {
authorTTL,
provider: {
store
}
},
api: {
softLimit,
hardLimit,
}
} = config;
CacheManager.setDefaultsFromConfig(config);
this.dryRun = parseBool(dryRun) === true ? true : undefined;
this.heartbeatInterval = heartbeatInterval;
//this.apiLimitWarning = argParseInt(apiLimitWarning);
this.softLimit = softLimit;
this.hardLimit = hardLimit;
this.wikiLocation = wikiConfig;
this.sharedModqueue = sharedMod;
this.logger = getLogger(config.logging);
if (this.dryRun) {
this.logger.info('Running in DRYRUN mode');
}
this.subreddits = names.map(parseSubredditName);
const creds = {
userAgent: `web:contextBot:dev`,
clientId,
clientSecret,
refreshToken,
accessToken,
};
const missingCreds = [];
for(const [k,v] of Object.entries(creds)) {
if(v === undefined || v === '' || v === null) {
missingCreds.push(k);
}
}
if(missingCreds.length > 0) {
this.logger.error('There are credentials missing that would prevent initializing the Reddit API Client and subsequently the rest of the application');
this.logger.error(`Missing credentials: ${missingCreds.join(', ')}`)
this.logger.info(`If this is a first-time setup use the 'web' command for a web-based guide to configuring your application`);
this.logger.info(`Or check the USAGE section of the readme for the correct naming of these arguments/environment variables`);
throw new LoggedError(`Missing credentials: ${missingCreds.join(', ')}`);
}
this.client = proxy === undefined ? new Snoowrap(creds) : new ProxiedSnoowrap({...creds, proxy});
this.client.config({
warnings: true,
maxRetryAttempts: 5,
debug,
logger: snooLogWrapper(this.logger.child({labels: ['Snoowrap']})),
continueAfterRatelimitError: true,
});
const retryHandler = createRetryHandler({maxRequestRetry: 8, maxOtherRetry: 1}, this.logger);
const modStreamErrorListener = (name: string) => async (err: any) => {
this.logger.error('Polling error occurred', err);
const shouldRetry = await retryHandler(err);
if(shouldRetry) {
defaultUnmoderatedStream.startInterval();
} else {
for(const m of this.subManagers) {
if(m.modStreamCallbacks.size > 0) {
m.notificationManager.handle('runStateChanged', `${name.toUpperCase()} Polling Stopped`, 'Encountered too many errors from Reddit while polling. Will try to restart on next heartbeat.');
}
}
this.logger.error(`Mod stream ${name.toUpperCase()} encountered too many errors while polling. Will try to restart on next heartbeat.`);
}
}
const defaultUnmoderatedStream = new UnmoderatedStream(this.client, {subreddit: 'mod'});
// @ts-ignore
defaultUnmoderatedStream.on('error', modStreamErrorListener('unmoderated'));
const defaultModqueueStream = new ModQueueStream(this.client, {subreddit: 'mod'});
// @ts-ignore
defaultModqueueStream.on('error', modStreamErrorListener('modqueue'));
CacheManager.modStreams.set('unmoderated', defaultUnmoderatedStream);
CacheManager.modStreams.set('modqueue', defaultModqueueStream);
const onTerm = () => {
for(const m of this.subManagers) {
m.notificationManager.handle('runStateChanged', 'Application Shutdown', 'The application was shutdown');
}
}
process.on('SIGTERM', () => {
onTerm();
});
}
async testClient() {
try {
// @ts-ignore
await this.client.getMe();
this.logger.info('Test API call successful');
} catch (err) {
this.logger.error('An error occurred while trying to initialize the Reddit API Client which would prevent the entire application from running.');
if(err.name === 'StatusCodeError') {
const authHeader = err.response.headers['www-authenticate'];
if (authHeader !== undefined && authHeader.includes('insufficient_scope')) {
this.logger.error('Reddit responded with a 403 insufficient_scope. Please ensure you have chosen the correct scopes when authorizing your account.');
} else if(err.statusCode === 401) {
this.logger.error('It is likely a credential is missing or incorrect. Check clientId, clientSecret, refreshToken, and accessToken');
}
this.logger.error(`Error Message: ${err.message}`);
} else {
this.logger.error(err);
}
err.logged = true;
throw err;
}
}
async buildManagers(subreddits: string[] = []) {
let availSubs = [];
const name = await this.client.getMe().name;
this.logger.info(`Reddit API Limit Remaining: ${this.client.ratelimitRemaining}`);
this.logger.info(`Authenticated Account: /u/${name}`);
this.botName = name;
for (const sub of await this.client.getModeratedSubreddits()) {
// TODO don't know a way to check permissions yet
availSubs.push(sub);
}
this.logger.info(`/u/${name} is a moderator of these subreddits: ${availSubs.map(x => x.display_name_prefixed).join(', ')}`);
let subsToRun: Subreddit[] = [];
const subsToUse = subreddits.length > 0 ? subreddits.map(parseSubredditName) : this.subreddits;
if (subsToUse.length > 0) {
this.logger.info(`Operator-defined subreddit constraints detected (CLI argument or environmental variable), will try to run on: ${subsToUse.join(', ')}`);
for (const sub of subsToUse) {
const asub = availSubs.find(x => x.display_name.toLowerCase() === sub.toLowerCase())
if (asub === undefined) {
this.logger.warn(`Will not run on ${sub} because is not modded by, or does not have appropriate permissions to mod with, for this client.`);
} else {
// @ts-ignore
const fetchedSub = await asub.fetch();
subsToRun.push(fetchedSub);
}
}
} else {
// otherwise assume all moddable subs from client should be run on
this.logger.info('No user-defined subreddit constraints detected, will try to run on all');
subsToRun = availSubs;
}
let subSchedule: Manager[] = [];
// get configs for subs we want to run on and build/validate them
for (const sub of subsToRun) {
const manager = new Manager(sub, this.client, this.logger, {dryRun: this.dryRun, sharedModqueue: this.sharedModqueue});
try {
await manager.parseConfiguration('system', true, {suppressNotification: true});
} catch (err) {
if (!(err instanceof LoggedError)) {
this.logger.error(`Config was not valid:`, {subreddit: sub.display_name_prefixed});
this.logger.error(err, {subreddit: sub.display_name_prefixed});
}
}
subSchedule.push(manager);
}
this.subManagers = subSchedule;
}
async heartbeat() {
try {
this.heartBeating = true;
while (true) {
this.nextHeartbeat = dayjs().add(this.heartbeatInterval, 'second');
await sleep(this.heartbeatInterval * 1000);
const heartbeat = `HEARTBEAT -- API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ~${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion === undefined ? 'N/A' : this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`
this.logger.info(heartbeat);
for (const s of this.subManagers) {
if(s.botState.state === STOPPED && s.botState.causedBy === USER) {
this.logger.debug('Skipping config check/restart on heartbeat due to previously being stopped by user', {subreddit: s.displayLabel});
continue;
}
try {
const newConfig = await s.parseConfiguration();
if(newConfig || (s.queueState.state !== RUNNING && s.queueState.causedBy === SYSTEM))
{
await s.startQueue('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running queue'});
}
if(newConfig || (s.eventsState.state !== RUNNING && s.eventsState.causedBy === SYSTEM))
{
await s.startEvents('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running events'});
}
if(s.botState.state !== RUNNING && s.eventsState.state === RUNNING && s.queueState.state === RUNNING) {
s.botState = {
state: RUNNING,
causedBy: 'system',
}
}
} catch (err) {
this.logger.info('Stopping event polling to prevent activity processing queue from backing up. Will be restarted when config update succeeds.')
await s.stopEvents('system', {reason: 'Invalid config will cause events to pile up in queue. Will be restarted when config update succeeds (next heartbeat).'});
if(!(err instanceof LoggedError)) {
this.logger.error(err, {subreddit: s.displayLabel});
}
if(this.nextHeartbeat !== undefined) {
this.logger.info(`Will retry parsing config on next heartbeat (in ${dayjs.duration(this.nextHeartbeat.diff(dayjs())).humanize()})`, {subreddit: s.displayLabel});
}
}
}
await this.runModStreams(true);
}
} catch (err) {
this.logger.error('Error occurred during heartbeat', err);
throw err;
} finally {
this.nextHeartbeat = undefined;
this.heartBeating = false;
}
}
async runModStreams(notify = false) {
for(const [k,v] of CacheManager.modStreams) {
if(!v.running && v.listeners('item').length > 0) {
v.startInterval();
this.logger.info(`Starting default ${k.toUpperCase()} mod stream`);
if(notify) {
for(const m of this.subManagers) {
if(m.modStreamCallbacks.size > 0) {
m.notificationManager.handle('runStateChanged', `${k.toUpperCase()} Polling Started`, 'Polling was successfully restarted on heartbeat.');
}
}
}
}
}
}
async runManagers() {
if(this.subManagers.every(x => !x.validConfigLoaded)) {
this.logger.warn('All managers have invalid configs!');
}
for (const manager of this.subManagers) {
if (manager.validConfigLoaded && manager.botState.state !== RUNNING) {
await manager.start('system', {reason: 'Caused by application startup'});
}
}
await this.runModStreams();
if (this.heartbeatInterval !== 0 && !this.heartBeating) {
this.heartbeat();
}
this.runApiNanny();
const emitter = new EventEmitter();
await pEvent(emitter, 'end');
}
async runApiNanny() {
while(true) {
await sleep(10000);
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
const nowish = dayjs().add(10, 'second');
if(nowish.isAfter(this.nextExpiration)) {
// it's possible no api calls are being made because of a hard limit
// need to make an api call to update this
// @ts-ignore
await this.client.getMe();
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
}
const rollingSample = this.apiSample.slice(0, 7)
rollingSample.unshift(this.client.ratelimitRemaining);
this.apiSample = rollingSample;
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
if(this.apiSample[index + 1] !== undefined) {
const d = Math.abs(curr - this.apiSample[index + 1]);
if(d === 0) {
return [...acc, 0];
}
return [...acc, d/10];
}
return acc;
}, []);
this.apiRollingAvg = diff.reduce((acc, curr) => acc + curr,0) / diff.length; // api requests per second
this.depletedInSecs = this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
this.apiEstDepletion = dayjs.duration({seconds: this.depletedInSecs});
this.logger.debug(`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`);
let hardLimitHit = false;
if(typeof this.hardLimit === 'string') {
const hardDur = parseDuration(this.hardLimit);
hardLimitHit = hardDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
hardLimitHit = this.hardLimit > this.client.ratelimitRemaining;
}
if(hardLimitHit) {
if(this.nannyMode === 'hard') {
continue;
}
this.logger.info(`Detected HARD LIMIT of ${this.hardLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${this.apiRollingAvg}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info(`All subreddit event polling has been paused`, {leaf: 'Api Nanny'});
for(const m of this.subManagers) {
m.pauseEvents('system');
m.notificationManager.handle('runStateChanged', 'Hard Limit Triggered', `Hard Limit of ${this.hardLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit event polling has been paused.`, 'system', 'warn');
}
this.nannyMode = 'hard';
continue;
}
let softLimitHit = false;
if(typeof this.softLimit === 'string') {
const softDur = parseDuration(this.softLimit);
softLimitHit = softDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
softLimitHit = this.softLimit > this.client.ratelimitRemaining;
}
if(softLimitHit) {
if(this.nannyMode === 'soft') {
continue;
}
this.logger.info(`Detected SOFT LIMIT of ${this.softLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info('Trying to detect heavy usage subreddits...', {leaf: 'Api Nanny'});
let threshold = 0.5;
let offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
if(offenders.length === 0) {
threshold = 0.25;
// reduce threshold
offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
}
if(offenders.length > 0) {
this.logger.info(`Slowing subreddits using >- ${threshold}req/s:`, {leaf: 'Api Nanny'});
for(const m of offenders) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
} else {
this.logger.info(`Couldn't detect specific offenders, slowing all...`, {leaf: 'Api Nanny'});
for(const m of this.subManagers) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
}
this.nannyMode = 'soft';
continue;
}
if(this.nannyMode !== undefined) {
this.logger.info('Turning off due to better conditions...', {leaf: 'Api Nanny'});
for(const m of this.subManagers) {
if(m.delayBy !== undefined) {
m.delayBy = undefined;
m.notificationManager.handle('runStateChanged', 'Normal Processing Resumed', 'Slow Mode has been turned off due to better API conditions', 'system');
}
if(m.queueState.state === PAUSED && m.queueState.causedBy === SYSTEM) {
m.startQueue('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
if(m.eventsState.state === PAUSED && m.eventsState.causedBy === SYSTEM) {
await m.startEvents('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
}
this.nannyMode = undefined;
}
}
}
}

129
src/Author/Author.ts Normal file
View File

@@ -0,0 +1,129 @@
import {DurationComparor, UserNoteCriteria} from "../Rule";
import {CompareValue, CompareValueOrPercent} from "../Common/interfaces";
/**
* If present then these Author criteria are checked before running the rule. If criteria fails then the rule is skipped.
* @examples [{"include": [{"flairText": ["Contributor","Veteran"]}, {"isMod": true}]}]
* */
export interface AuthorOptions {
/**
* Will "pass" if any set of AuthorCriteria passes
* */
include?: AuthorCriteria[];
/**
* Only runs if `include` is not present. Will "pass" if any of set of the AuthorCriteria **does not** pass
* */
exclude?: AuthorCriteria[];
}
/**
* Criteria with which to test against the author of an Activity. The outcome of the test is based on:
*
* 1. All present properties passing and
* 2. If a property is a list then any value from the list matching
*
* @minProperties 1
* @additionalProperties false
* @examples [{"flairText": ["Contributor","Veteran"], "isMod": true, "name": ["FoxxMD", "AnotherUser"] }]
* */
export interface AuthorCriteria {
/**
* A list of reddit usernames (case-insensitive) to match against. Do not include the "u/" prefix
*
* EX to match against /u/FoxxMD and /u/AnotherUser use ["FoxxMD","AnotherUser"]
* @examples ["FoxxMD","AnotherUser"]
* */
name?: string[],
/**
* A list of (user) flair css class values from the subreddit to match against
* @examples ["red"]
* */
flairCssClass?: string[],
/**
* A list of (user) flair text values from the subreddit to match against
* @examples ["Approved"]
* */
flairText?: string[],
/**
* Is the author a moderator?
* */
isMod?: boolean,
/**
* A list of UserNote properties to check against the User Notes attached to this Author in this Subreddit (must have Toolbox enabled and used User Notes at least once)
* */
userNotes?: UserNoteCriteria[]
/**
* Test the age of the Author's account (when it was created) against this comparison
*
* The syntax is `(< OR > OR <= OR >=) <number> <unit>`
*
* * EX `> 100 days` => Passes if Author's account is older than 100 days
* * EX `<= 2 months` => Passes if Author's account is younger than or equal to 2 months
*
* Unit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)
*
* [See] https://regexr.com/609n8 for example
*
* @pattern ^\s*(?<opStr>>|>=|<|<=)\s*(?<time>\d+)\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\s*$
* */
age?: DurationComparor
/**
* A string containing a comparison operator and a value to compare link karma against
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign]`
*
* * EX `> 100` => greater than 100 link karma
* * EX `<= 75%` => link karma is less than or equal to 75% of **all karma**
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* */
linkKarma?: CompareValueOrPercent
/**
* A string containing a comparison operator and a value to compare karma against
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign]`
*
* * EX `> 100` => greater than 100 comment karma
* * EX `<= 75%` => comment karma is less than or equal to 75% of **all karma**
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* */
commentKarma?: CompareValueOrPercent
totalKarma?: CompareValue
/**
* Does Author's account have a verified email?
* */
verified?: boolean
}
export class Author implements AuthorCriteria {
name?: string[];
flairCssClass?: string[];
flairText?: string[];
isMod?: boolean;
userNotes?: UserNoteCriteria[];
age?: string;
commentKarma?: string;
linkKarma?: string;
totalKarma?: string;
verified?: boolean;
constructor(options: AuthorCriteria) {
this.name = options.name;
this.flairCssClass = options.flairCssClass;
this.flairText = options.flairText;
this.isMod = options.isMod;
this.userNotes = options.userNotes;
this.age = options.age;
this.commentKarma = options.commentKarma;
this.linkKarma = options.linkKarma;
this.totalKarma = options.totalKarma;
}
}
export default Author;

View File

@@ -1,8 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CommentCheck = void 0;
const index_1 = require("./index");
class CommentCheck extends index_1.Check {
}
exports.CommentCheck = CommentCheck;
//# sourceMappingURL=CommentCheck.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"CommentCheck.js","sourceRoot":"","sources":["CommentCheck.ts"],"names":[],"mappings":";;;AAAA,mCAA8B;AAE9B,MAAa,YAAa,SAAQ,aAAK;CAEtC;AAFD,oCAEC"}

View File

@@ -1,5 +1,17 @@
import {Check} from "./index";
import {Check, CheckOptions} from "./index";
import {CommentState} from "../Common/interfaces";
export class CommentCheck extends Check {
itemIs: CommentState[];
constructor(options: CheckOptions) {
super(options);
const {itemIs = []} = options;
this.itemIs = itemIs;
this.logSummary();
}
logSummary() {
super.logSummary('comment');
}
}

View File

@@ -1,8 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SubmissionCheck = void 0;
const index_1 = require("./index");
class SubmissionCheck extends index_1.Check {
}
exports.SubmissionCheck = SubmissionCheck;
//# sourceMappingURL=SubmissionCheck.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"SubmissionCheck.js","sourceRoot":"","sources":["SubmissionCheck.ts"],"names":[],"mappings":";;;AACA,mCAA8B;AAE9B,MAAa,eAAgB,SAAQ,aAAK;CAEzC;AAFD,0CAEC"}

View File

@@ -1,6 +1,18 @@
import {Check} from "./index";
import {Check, CheckOptions} from "./index";
import {SubmissionState} from "../Common/interfaces";
export class SubmissionCheck extends Check {
itemIs: SubmissionState[];
constructor(options: CheckOptions) {
super(options);
const {itemIs = []} = options;
this.itemIs = itemIs;
this.logSummary();
}
logSummary() {
super.logSummary('submission');
}
}

View File

@@ -1,26 +1,36 @@
import {RuleSet, IRuleSet, RuleSetJSONConfig} from "../Rule/RuleSet";
import {IRule, Triggerable, Rule, RuleJSONConfig, RuleResult} from "../Rule";
import Action, {ActionConfig, ActionJSONConfig} from "../Action";
import {RuleSet, IRuleSet, RuleSetJson, RuleSetObjectJson} from "../Rule/RuleSet";
import {IRule, isRuleSetResult, Rule, RuleJSONConfig, RuleResult, RuleSetResult} from "../Rule";
import Action, {ActionConfig, ActionJson} from "../Action";
import {Logger} from "winston";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RecentActivityRuleJSONConfig} from "../Rule/RecentActivityRule";
import {RepeatSubmissionJSONConfig} from "../Rule/SubmissionRule/RepeatSubmissionRule";
import {FlairActionJSONConfig} from "../Action/SubmissionAction/FlairAction";
import {CommentActionJSONConfig} from "../Action/CommentAction";
import {Comment, Submission} from "snoowrap";
import {actionFactory} from "../Action/ActionFactory";
import {ruleFactory} from "../Rule/RuleFactory";
import {createLabelledLogger, determineNewResults, loggerMetaShuffle, mergeArr} from "../util";
import {AuthorRuleJSONConfig} from "../Rule/AuthorRule";
import {ReportActionJSONConfig} from "../Action/ReportAction";
import {LockActionJSONConfig} from "../Action/LockAction";
import {RemoveActionJSONConfig} from "../Action/RemoveAction";
import {JoinCondition, JoinOperands} from "../Common/interfaces";
import {
createAjvFactory,
FAIL,
mergeArr,
PASS,
resultsSummary,
ruleNamesFromResults,
truncateStringToLength
} from "../util";
import {
ChecksActivityState,
CommentState,
JoinCondition,
JoinOperands,
SubmissionState,
TypedActivityStates
} from "../Common/interfaces";
import * as RuleSchema from '../Schema/Rule.json';
import * as RuleSetSchema from '../Schema/RuleSet.json';
import * as ActionSchema from '../Schema/Action.json';
import Ajv from 'ajv';
import {ActionObjectJson, RuleJson, RuleObjectJson, ActionJson as ActionTypeJson} from "../Common/types";
import {isItem} from "../Utils/SnoowrapUtils";
import ResourceManager, {SubredditResources} from "../Subreddit/SubredditResources";
import {Author, AuthorCriteria, AuthorOptions} from "../Author/Author";
const ajv = new Ajv();
const checkLogName = truncateStringToLength(25);
export class Check implements ICheck {
actions: Action[] = [];
@@ -29,6 +39,14 @@ export class Check implements ICheck {
condition: JoinOperands;
rules: Array<RuleSet | Rule> = [];
logger: Logger;
itemIs: TypedActivityStates;
authorIs: {
include: AuthorCriteria[],
exclude: AuthorCriteria[]
};
dryRun?: boolean;
notifyOnTrigger: boolean;
resources: SubredditResources;
constructor(options: CheckOptions) {
const {
@@ -37,18 +55,32 @@ export class Check implements ICheck {
condition = 'AND',
rules = [],
actions = [],
notifyOnTrigger = false,
subredditName,
itemIs = [],
authorIs: {
include = [],
exclude = [],
} = {},
dryRun,
} = options;
if (options.logger !== undefined) {
// @ts-ignore
this.logger = options.logger.child(loggerMetaShuffle(options.logger, undefined, [`CHK ${name}`]), mergeArr);
} else {
this.logger = createLabelledLogger('Check');
}
this.logger = options.logger.child({labels: [`CHK ${checkLogName(name)}`]}, mergeArr);
const ajv = createAjvFactory(this.logger);
this.resources = ResourceManager.get(subredditName) as SubredditResources;
this.name = name;
this.description = description;
this.notifyOnTrigger = notifyOnTrigger;
this.condition = condition;
this.itemIs = itemIs;
this.authorIs = {
exclude: exclude.map(x => new Author(x)),
include: include.map(x => new Author(x)),
}
this.dryRun = dryRun;
for (const r of rules) {
if (r instanceof Rule || r instanceof RuleSet) {
this.rules.push(r);
@@ -57,16 +89,13 @@ export class Check implements ICheck {
let setErrors: any = [];
let ruleErrors: any = [];
if (valid) {
// @ts-ignore
r.logger = this.logger;
this.rules.push(new RuleSet(r as RuleSetJSONConfig));
const ruleConfig = r as RuleSetObjectJson;
this.rules.push(new RuleSet({...ruleConfig, logger: this.logger, subredditName}));
} else {
setErrors = ajv.errors;
valid = ajv.validate(RuleSchema, r);
if (valid) {
// @ts-ignore
r.logger = this.logger;
this.rules.push(ruleFactory(r as RuleJSONConfig));
this.rules.push(ruleFactory(r as RuleJSONConfig, this.logger, subredditName));
} else {
ruleErrors = ajv.errors;
const leastErrorType = setErrors.length < ruleErrors ? 'RuleSet' : 'Rule';
@@ -86,7 +115,11 @@ export class Check implements ICheck {
} else {
let valid = ajv.validate(ActionSchema, a);
if (valid) {
this.actions.push(actionFactory(a as ActionJSONConfig));
const aj = a as ActionJson;
this.actions.push(actionFactory({
...aj,
dryRun: this.dryRun || aj.dryRun
}, this.logger, subredditName));
// @ts-ignore
a.logger = this.logger;
} else {
@@ -94,75 +127,241 @@ export class Check implements ICheck {
}
}
}
}
async run(item: Submission | Comment, existingResults: RuleResult[] = []): Promise<[boolean, RuleResult[]]> {
this.logger.debug('Starting check');
let allResults: RuleResult[] = [];
let runOne = false;
logSummary(type: string) {
const runStats = [];
const ruleSetCount = this.rules.reduce((x, r) => r instanceof RuleSet ? x + 1 : x, 0);
const rulesInSetsCount = this.rules.reduce((x, r) => r instanceof RuleSet ? x + r.rules.length : x, 0);
if (ruleSetCount > 0) {
runStats.push(`${ruleSetCount} Rule Sets (${rulesInSetsCount} Rules)`);
}
const topRuleCount = this.rules.reduce((x, r) => r instanceof Rule ? x + 1 : x, 0);
if (topRuleCount > 0) {
runStats.push(`${topRuleCount} Top-Level Rules`);
}
runStats.push(`${this.actions.length} Actions`);
// not sure if this should be info or verbose
this.logger.info(`${type.toUpperCase()} (${this.condition})${this.notifyOnTrigger ? ' ||Notify on Trigger|| ' : ''} => ${runStats.join(' | ')}${this.description !== undefined ? ` => ${this.description}` : ''}`);
if (this.rules.length === 0 && this.itemIs.length === 0 && this.authorIs.exclude.length === 0 && this.authorIs.include.length === 0) {
this.logger.warn('No rules, item tests, or author test found -- this check will ALWAYS PASS!');
}
let ruleSetIndex = 1;
for (const r of this.rules) {
const combinedResults = [...existingResults, ...allResults];
const [passed, results] = await r.run(item, combinedResults);
//allResults = allResults.concat(determineNewResults(combinedResults, results));
allResults = allResults.concat(results);
if (passed === null) {
continue;
}
runOne = true;
if (passed) {
if (this.condition === 'OR') {
return [true, allResults];
if (r instanceof RuleSet) {
for (const ru of r.rules) {
this.logger.verbose(`(Rule Set ${ruleSetIndex} ${r.condition}) => ${ru.getRuleUniqueName()}`);
}
} else if (this.condition === 'AND') {
return [false, allResults];
ruleSetIndex++;
} else {
this.logger.verbose(`(Rule) => ${r.getRuleUniqueName()}`);
}
}
if (!runOne) {
return [false, allResults];
for (const a of this.actions) {
this.logger.verbose(`(Action) => ${a.getActionUniqueName()}`);
}
return [true, allResults];
}
async runActions(item: Submission | Comment, client: Snoowrap): Promise<void> {
for (const a of this.actions) {
await a.handle(item, client);
async runRules(item: Submission | Comment, existingResults: RuleResult[] = []): Promise<[boolean, RuleResult[]]> {
try {
let allRuleResults: RuleResult[] = [];
let allResults: (RuleResult | RuleSetResult)[] = [];
const [itemPass, crit] = isItem(item, this.itemIs, this.logger);
if (!itemPass) {
this.logger.verbose(`${FAIL} => Item did not pass 'itemIs' test`);
return [false, allRuleResults];
}
let authorPass = null;
if (this.authorIs.include !== undefined && this.authorIs.include.length > 0) {
for (const auth of this.authorIs.include) {
if (await this.resources.testAuthorCriteria(item, auth)) {
authorPass = true;
break;
}
}
if (!authorPass) {
this.logger.verbose(`${FAIL} => Inclusive author criteria not matched`);
return Promise.resolve([false, allRuleResults]);
}
}
if (authorPass === null && this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
for (const auth of this.authorIs.exclude) {
if (await this.resources.testAuthorCriteria(item, auth, false)) {
authorPass = true;
break;
}
}
if (!authorPass) {
this.logger.verbose(`${FAIL} => Exclusive author criteria not matched`);
return Promise.resolve([false, allRuleResults]);
}
}
if (this.rules.length === 0) {
this.logger.info(`${PASS} => No rules to run, check auto-passes`);
return [true, allRuleResults];
}
let runOne = false;
for (const r of this.rules) {
//let results: RuleResult | RuleSetResult;
const combinedResults = [...existingResults, ...allRuleResults];
const [passed, results] = await r.run(item, combinedResults);
if (isRuleSetResult(results)) {
allRuleResults = allRuleResults.concat(results.results);
} else {
allRuleResults = allRuleResults.concat(results as RuleResult);
}
allResults.push(results);
if (passed === null) {
continue;
}
runOne = true;
if (passed) {
if (this.condition === 'OR') {
this.logger.info(`${PASS} => Rules: ${resultsSummary(allResults, this.condition)}`);
return [true, allRuleResults];
}
} else if (this.condition === 'AND') {
this.logger.verbose(`${FAIL} => Rules: ${resultsSummary(allResults, this.condition)}`);
return [false, allRuleResults];
}
}
if (!runOne) {
this.logger.verbose(`${FAIL} => All Rules skipped because of Author checks or itemIs tests`);
return [false, allRuleResults];
} else if (this.condition === 'OR') {
// if OR and did not return already then none passed
this.logger.verbose(`${FAIL} => Rules: ${resultsSummary(allResults, this.condition)}`);
return [false, allRuleResults];
}
// otherwise AND and did not return already so all passed
this.logger.info(`${PASS} => Rules: ${resultsSummary(allResults, this.condition)}`);
return [true, allRuleResults];
} catch (e) {
e.logged = true;
this.logger.warn(`Running rules failed due to uncaught exception`, e);
throw e;
}
}
async runActions(item: Submission | Comment, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<Action[]> {
const dr = runtimeDryrun || this.dryRun;
this.logger.debug(`${dr ? 'DRYRUN - ' : ''}Running Actions`);
const runActions: Action[] = [];
for (const a of this.actions) {
try {
await a.handle(item, ruleResults, runtimeDryrun);
runActions.push(a);
} catch (err) {
this.logger.error(`Action ${a.getActionUniqueName()} encountered an error while running`, err);
}
}
this.logger.info(`${dr ? 'DRYRUN - ' : ''}Ran Actions: ${runActions.map(x => x.getActionUniqueName()).join(' | ')}`);
return runActions;
}
}
export interface ICheck extends JoinCondition {
export interface ICheck extends JoinCondition, ChecksActivityState {
/**
* A friendly name for this check (highly recommended) -- EX "repeatCrosspostReport"
* Friendly name for this Check EX "crosspostSpamCheck"
*
* Can only contain letters, numbers, underscore, spaces, and dashes
*
* @pattern ^[a-zA-Z]([\w -]*[\w])?$
* @examples ["myNewCheck"]
* */
name: string,
/**
* @examples ["A short description of what this check looks for and actions it performs"]
* */
description?: string,
/**
* Use this option to override the `dryRun` setting for all of its `Actions`
* @examples [false, true]
* */
dryRun?: boolean;
/**
* A list of criteria to test the state of the `Activity` against before running the check.
*
* If any set of criteria passes the Check will be run. If the criteria fails then the Check will fail.
*
* * @examples [[{"over_18": true, "removed': false}]]
* */
itemIs?: TypedActivityStates
/**
* If present then these Author criteria are checked before running the Check. If criteria fails then the Check will fail.
* */
authorIs?: AuthorOptions
}
export interface CheckOptions extends ICheck {
rules: Array<IRuleSet | IRule>
actions: ActionConfig[]
logger?: Logger
logger: Logger
subredditName: string
notifyOnTrigger?: boolean
}
/**
* An object consisting of Rules (tests) and Actions to perform if Rules are triggered
* @see {isCheckConfig} ts-auto-guard:type-guard
* */
export interface CheckJSONConfig extends ICheck {
export interface CheckJson extends ICheck {
/**
* The type of event (new submission or new comment) this check should be run against
* @examples ["submission", "comment"]
*/
kind: 'submission' | 'comment'
/**
* Rules are run in the order found in configuration. Can be Rules or RuleSets
* @minItems 1
* A list of Rules to run.
*
* If `Rule` objects are triggered based on `condition` then `actions` will be performed.
*
* Can be `Rule`, `RuleSet`, or the `name` of any **named** `Rule` in your subreddit's configuration.
*
* **If `rules` is an empty array or not present then `actions` are performed immediately.**
* */
rules: Array<RuleSetJSONConfig | RecentActivityRuleJSONConfig | RepeatSubmissionJSONConfig | AuthorRuleJSONConfig>
rules?: Array<RuleSetJson | RuleJson>
/**
* The actions to run after the check is successfully triggered. ALL actions will run in the order they are listed
* The `Actions` to run after the check is successfully triggered. ALL `Actions` will run in the order they are listed
*
* Can be `Action` or the `name` of any **named** `Action` in your subreddit's configuration
*
* @minItems 1
* @examples [[{"kind": "comment", "content": "this is the content of the comment", "distinguish": true}, {"kind": "lock"}]]
* */
actions: Array<FlairActionJSONConfig | CommentActionJSONConfig | ReportActionJSONConfig | LockActionJSONConfig | RemoveActionJSONConfig>
actions: Array<ActionTypeJson>
/**
* If notifications are configured and this is `true` then an `eventActioned` event will be sent when this check is triggered.
*
* @default false
* */
notifyOnTrigger?: boolean,
}
export interface SubmissionCheckJson extends CheckJson {
kind: 'submission'
itemIs?: SubmissionState[]
}
export interface CommentCheckJson extends CheckJson {
kind: 'comment'
itemIs?: CommentState[]
}
export type CheckStructuredJson = SubmissionCheckStructuredJson | CommentCheckStructuredJson;
// export interface CheckStructuredJson extends CheckJson {
// rules: Array<RuleSetObjectJson | RuleObjectJson>
// actions: Array<ActionObjectJson>
// }
export interface SubmissionCheckStructuredJson extends SubmissionCheckJson {
rules: Array<RuleSetObjectJson | RuleObjectJson>
actions: Array<ActionObjectJson>
}
export interface CommentCheckStructuredJson extends CommentCheckJson {
rules: Array<RuleSetObjectJson | RuleObjectJson>
actions: Array<ActionObjectJson>
}

2
src/Common/defaults.ts Normal file
View File

@@ -0,0 +1,2 @@
export const cacheOptDefaults = {ttl: 60, max: 500, checkPeriod: 600};
export const cacheTTLDefaults = {authorTTL: 60, userNotesTTL: 300, wikiTTL: 300};

View File

@@ -1,51 +1,191 @@
import {Duration} from "dayjs/plugin/duration";
import Poll from "snoostorm/out/util/Poll";
import Snoowrap from "snoowrap";
/**
* An ISO 8601 Duration
* @pattern ^(-?)P(?=\d|T\d)(?:(\d+)Y)?(?:(\d+)M)?(?:(\d+)([DW]))?(?:T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+(?:\.\d+)?)S)?)?$
* */
export type ISO8601 = string;
export type ActivityWindowType = Duration | number | ActivityWindowCriteria;
export type Duration = ISO8601 | DurationObject;
/**
* If both properties are defined then the first criteria met will be used IE if # of activities = count before duration is reached then count will be used, or vice versa
* A shorthand value for a DayJS duration consisting of a number value and time unit
*
* * EX `9 days`
* * EX `3 months`
* @pattern ^\s*(?<time>\d+)\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\s*$
* */
export type DayJSShorthand = string;
export type DurationString = DayJSShorthand | ISO8601;
/**
* A value to define the range of Activities to retrieve.
*
* Acceptable values:
*
* **`ActivityWindowCriteria` object**
*
* Allows specify multiple range properties and more specific behavior
*
* **A `number` of Activities to retrieve**
*
* * EX `100` => 100 Activities
*
* *****
*
* Any of the below values that specify the amount of time to subtract from `NOW` to create a time range IE `NOW <---> [duration] ago`
*
* Acceptable values:
*
* **A `string` consisting of a value and a [Day.js](https://day.js.org/docs/en/durations/creating#list-of-all-available-units) time UNIT**
*
* * EX `9 days` => Range is `NOW <---> 9 days ago`
*
* **A [Day.js](https://day.js.org/docs/en/durations/creating) `object`**
*
* * EX `{"days": 90, "minutes": 15}` => Range is `NOW <---> 90 days and 15 minutes ago`
*
* **An [ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) `string`**
*
* * EX `PT15M` => 15 minutes => Range is `NOW <----> 15 minutes ago`
*
* @examples ["90 days"]
* */
export type ActivityWindowType = ActivityWindowCriteria | DurationVal | number;
export type DurationVal = DurationString | DurationObject;
/**
* Multiple properties that may be used to define what range of Activity to retrieve.
*
* May specify one, or both properties along with the `satisfyOn` property, to affect the retrieval behavior.
*
* @examples [{"count": 100, "duration": {"days": 90}}]
* @minProperties 1
* @additionalProperties false
* */
export interface ActivityWindowCriteria {
/**
* The number of activities (submission/comments) to consider
* @examples [15]
* */
count?: number,
/**
* An ISO 8601 duration or Day.js duration object.
* A value that specifies the amount of time to subtract from `NOW` to create a time range IE `NOW <---> [duration] ago`
*
* The duration will be subtracted from the time when the rule is run to create a time range like this:
* Acceptable values:
*
* endTime = NOW <----> startTime = (NOW - duration)
* **A `string` consisting of a value and a [Day.js](https://day.js.org/docs/en/durations/creating) time unit** ([test your value](https://regexr.com/61em3))
*
* EX endTime = 3:00PM <----> startTime = (NOW - 15 minutes) = 2:45PM -- so look for activities between 2:45PM and 3:00PM
* @examples ["PT1M", {"minutes": 15}]
* * EX `9 days` => Range is `NOW <---> 9 days ago`
*
* **A [Day.js](https://day.js.org/docs/en/durations/creating) `object`**
*
* * EX `{"days": 90, "minutes": 15}` => Range is `NOW <---> 90 days and 15 minutes ago`
*
* **An [ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) `string`** ([test your value](https://regexr.com/61em9))
*
* * EX `PT15M` => 15 minutes => Range is `NOW <----> 15 minutes ago`
*
* @examples ["90 days", "PT15M", {"minutes": 15}]
* */
duration?: Duration
duration?: DurationVal
/**
* Define the condition under which both criteria are considered met
*
* **If `any` then it will retrieve Activities until one of the criteria is met, whichever occurs first**
*
* EX `{"count": 100, duration: "90 days"}`:
* * If 90 days of activities = 40 activities => returns 40 activities
* * If 100 activities is only 20 days => 100 activities
*
* **If `all` then both criteria must be met.**
*
* Effectively, whichever criteria produces the most Activities...
*
* EX `{"count": 100, duration: "90 days"}`:
* * If at 90 days of activities => 40 activities, continue retrieving results until 100 => results in >90 days of activities
* * If at 100 activities => 20 days of activities, continue retrieving results until 90 days => results in >100 activities
*
* @examples ["any"]
* @default any
* */
satisfyOn?: 'any' | 'all';
/**
* Filter which subreddits (case-insensitive) Activities are retrieved from.
*
* **Note:** Filtering occurs **before** `duration/count` checks are performed.
* */
subreddits?: {
/**
* Include only results from these subreddits
*
* @examples [["mealtimevideos","askscience"]]
* */
include?: string[],
/**
* Exclude any results from these subreddits
*
* **Note:** `exclude` is ignored if `include` is present
*
* @examples [["mealtimevideos","askscience"]]
* */
exclude?: string[],
}
}
/**
* A Day.js duration object
* A [Day.js duration object](https://day.js.org/docs/en/durations/creating)
*
* https://day.js.org/docs/en/durations/creating
* @examples [{"minutes": 30, "hours": 1}]
* @minProperties 1
* @additionalProperties false
* */
export interface DurationObject {
/**
* @examples [15]
* */
seconds?: number
/**
* @examples [50]
* */
minutes?: number
/**
* @examples [4]
* */
hours?: number
/**
* @examples [7]
* */
days?: number
/**
* @examples [2]
* */
weeks?: number
/**
* @examples [3]
* */
months?: number
/**
* @examples [0]
* */
years?: number
}
export interface DurationComparison {
operator: StringOperator,
duration: Duration
}
export interface GenericComparison {
operator: StringOperator,
value: number,
isPercent: boolean,
extra?: string,
displayText: string,
}
export const windowExample: ActivityWindowType[] = [
15,
@@ -68,15 +208,7 @@ export const windowExample: ActivityWindowType[] = [
export interface ActivityWindow {
/**
* Criteria for defining what set of activities should be considered.
*
* The value of this property may be either count OR duration -- to use both write it as an ActivityWindowCriteria
*
* See ActivityWindowCriteria for descriptions of what count/duration do
* @examples require('./interfaces.ts').windowExample
* @default 15
*/
window?: ActivityWindowType,
}
@@ -92,81 +224,615 @@ export interface RichContent {
/**
* The Content to submit for this Action. Content is interpreted as reddit-flavored Markdown.
*
* If value starts with 'wiki:' then the proceeding value will be used to get a wiki page
* If value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit
*
* EX "wiki:botconfig/mybot" tries to get https://reddit.com/mySubredditExample/wiki/botconfig/mybot
* * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/mybot`
*
* EX "this is plain text"
* If the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page
*
* EX "this is **bold** markdown text"
* * EX `wiki:replytemplates/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/replytemplates/test`
*
* @examples ["this is plain text", "this is **bold** markdown text", "wiki:botconfig/acomment" ]
* If the value starts with `url:` then the value is fetched as an external url and expects raw text returned
*
* * EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`
*
* If none of the above is used the value is treated as the raw context
*
* * EX `this is **bold** markdown text` => "this is **bold** markdown text"
*
* All Content is rendered using [mustache](https://github.com/janl/mustache.js/#templates) to enable [Action Templating](https://github.com/FoxxMD/reddit-context-bot#action-templating).
*
* The following properties are always available in the template (view individual Rules to see rule-specific template data):
* ```
* item.kind => The type of Activity that was checked (comment/submission)
* item.author => The name of the Author of the Activity EX FoxxMD
* item.permalink => A permalink URL to the Activity EX https://reddit.com/r/yourSub/comments/o1h0i0/title_name/1v3b7x
* item.url => If the Activity is Link Sumbission then the external URL
* item.title => If the Activity is a Submission then the title of that Submission
* rules => An object containing RuleResults of all the rules run for this check. See Action Templating for more details on naming
* ```
*
* @examples ["This is the content of a comment/report/usernote", "this is **bold** markdown text", "wiki:botconfig/acomment" ]
* */
content: string,
content?: string,
}
export interface RequiredRichContent extends RichContent {
content: string
}
/**
* A list of subreddits (case-insensitive) to look for. Do not include "r/" prefix.
* A list of subreddits (case-insensitive) to look for.
*
* EX to match against /r/mealtimevideos and /r/askscience use ["mealtimevideos","askscience"]
* EX ["mealtimevideos","askscience"]
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
export type SubredditList = string[];
export interface SubredditCriteria {
subreddits: SubredditList
/**
* A list of Subreddits (by name, case-insensitive) to look for.
*
* EX ["mealtimevideos","askscience"]
* @examples [["mealtimevideos","askscience"]]
* @minItems 1
* */
subreddits: string[]
}
export type JoinOperands = 'OR' | 'AND';
export interface JoinCondition {
/**
* Under what condition should a set of rules be considered "successful"?
* Under what condition should a set of run `Rule` objects be considered "successful"?
*
* If "OR" then ANY triggered rule results in success.
* If `OR` then **any** triggered `Rule` object results in success.
*
* If "AND" then ALL rules must be triggered to result in success.
* If `AND` then **all** `Rule` objects must be triggered to result in success.
*
* @default "AND"
* @examples ["AND"]
* */
condition?: JoinOperands,
}
/**
* You may specify polling options independently for submissions/comments
* */
export interface PollingOptions {
export type PollOn = 'unmoderated' | 'modqueue' | 'newSub' | 'newComm';
export interface PollingOptionsStrong extends PollingOptions {
limit: number,
interval: number,
}
export interface PollingDefaults {
/**
* Polling options for submission events
* The maximum number of Activities to get on every request
* @default 50
* @examples [50]
* */
submissions?: {
/**
* The number of submissions to pull from /r/subreddit/new on every request
* @default 10
* */
limit?: number
/**
* Amount of time, in seconds, to wait between requests
*
* @default 30
* @examples [30]
* */
interval?: number,
/**
* Delay processing Activity until it is `N` seconds old
*
* Useful if there are other bots that may process an Activity and you want this bot to run first/last/etc.
*
* If the Activity is already `N` seconds old when it is initially retrieved no refresh of the Activity occurs (no API request is made) and it is immediately processed.
*
* */
delayUntil?: number,
}
/**
* A configuration for where, how, and when to poll Reddit for Activities to process
*
* @examples [{"pollOn": "unmoderated","limit": 25, "interval": 20000}]
* */
export interface PollingOptions extends PollingDefaults {
/**
* What source to get Activities from. The source you choose will modify how the bots behaves so choose carefully.
*
* ### unmoderated (default)
*
* Activities that have yet to be approved/removed by a mod. This includes all modqueue (reports/spam) **and new submissions**.
*
* Use this if you want the bot to act like a regular moderator and act on anything that can be seen from mod tools.
*
* **Note:** Does NOT include new comments, only comments that are reported/filtered by Automoderator. If you want to process all unmoderated AND all new comments then use some version of `polling: ["unmoderated","newComm"]`
*
* ### modqueue
*
* Activities requiring moderator review, such as reported things and items caught by the spam filter.
*
* Use this if you only want the Bot to process reported/filtered Activities.
*
* ### newSub
*
* Get only `Submissions` that show up in `/r/mySubreddit/new`
*
* Use this if you want the bot to process Submissions only when:
*
* * they are not initially filtered by Automoderator or
* * after they have been manually approved from modqueue
*
* ### newComm
*
* Get only new `Comments`
*
* Use this if you want the bot to process Comments only when:
*
* * they are not initially filtered by Automoderator or
* * after they have been manually approved from modqueue
*
* */
pollOn: 'unmoderated' | 'modqueue' | 'newSub' | 'newComm'
}
export interface TTLConfig {
/**
* Amount of time, in seconds, author activities (Comments/Submission) should be cached
* @examples [60]
* @default 60
* */
authorTTL?: number;
/**
* Amount of time, in seconds, wiki content pages should be cached
* @examples [300]
* @default 300
* */
wikiTTL?: number;
/**
* Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
* @examples [300]
* @default 300
* */
userNotesTTL?: number;
}
export interface SubredditCacheConfig extends TTLConfig {
provider?: CacheProvider | CacheOptions
}
export interface Footer {
/**
* Customize the footer for Actions that send replies (Comment/Ban)
*
* If `false` no footer is appended
*
* If `string` the value is rendered as markdown or will use `wiki:` parser the same way `content` properties on Actions are rendered with [templating](https://github.com/FoxxMD/reddit-context-bot#action-templating).
*
* If footer is `undefined` (not set) the default footer will be used:
*
* > *****
* > This action was performed by [a bot.] Mention a moderator or [send a modmail] if you any ideas, questions, or concerns about this action.
*
* *****
*
* The following properties are available for [templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):
* ```
* subName => name of subreddit Action was performed in (EX 'mealtimevideos')
* permaLink => The permalink for the Activity the Action was performed on EX https://reddit.com/r/yourSub/comments/o1h0i0/title_name/1v3b7x
* modmaiLink => An encoded URL that will open a new message to your subreddit with the Action permalink appended to the body
* botLink => A permalink to the FAQ for this bot.
* ```
* If you use your own footer or no footer **please link back to the bot FAQ** using the `{{botLink}}` property in your content :)
*
* */
footer?: false | string
}
export interface ManagerOptions {
/**
* An array of sources to process Activities from
*
* Values in the array may be either:
*
* **A `string` representing the `pollOn` value to use**
*
* One of:
*
* * `unmoderated`
* * `modqueue`
* * `newSub`
* * `newComm`
*
* with the rest of the `PollingOptions` properties as defaults
*
* **A `PollingOptions` object**
*
* If you want to specify non-default properties
*
* ****
* If not specified the default is `["unmoderated"]`
*
* @default [["unmoderated"]]
* @example [["unmoderated","newComm"]]
* */
polling?: (string | PollingOptions)[]
/**
* Per-subreddit config for caching TTL values. If set to `false` caching is disabled.
* */
caching?: SubredditCacheConfig
/**
* Use this option to override the `dryRun` setting for all `Checks`
*
* @default undefined
* @examples [false,true]
* */
dryRun?: boolean;
/**
* Customize the footer for Actions that send replies (Comment/Ban). **This sets the default value for all Actions without `footer` specified in their configuration.**
*
* If `false` no footer is appended
*
* If `string` the value is rendered as markdown or will use `wiki:` parser the same way `content` properties on Actions are rendered with [templating](https://github.com/FoxxMD/reddit-context-bot#action-templating).
*
* If footer is `undefined` (not set) the default footer will be used:
*
* > *****
* > This action was performed by [a bot.] Mention a moderator or [send a modmail] if you any ideas, questions, or concerns about this action.
*
* *****
*
* The following properties are available for [templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):
* ```
* subName => name of subreddit Action was performed in (EX 'mealtimevideos')
* permaLink => The permalink for the Activity the Action was performed on EX https://reddit.com/r/yourSub/comments/o1h0i0/title_name/1v3b7x
* modmaiLink => An encoded URL that will open a new message to your subreddit with the Action permalink appended to the body
* botLink => A permalink to the FAQ for this bot.
* ```
* If you use your own footer or no footer **please link back to the bot FAQ** using the `{{botLink}}` property in your content :)
*
* @default undefined
* */
footer?: false | string
/*
* An alternate identifier to use in logs to identify your subreddit
*
* If your subreddit has a very long name it can make logging unwieldy. Specify a shorter name here to make log statements more readable (and shorter)
* @example ["shortName"]
* */
nickname?: string
notifications?: NotificationConfig
}
/**
* A string containing a comparison operator and a value to compare against
*
* The syntax is `(< OR > OR <= OR >=) <number>`
*
* * EX `> 100` => greater than 100
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* */
export type CompareValue = string;
/**
* A string containing a comparison operator and a value to compare against
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign]`
*
* * EX `> 100` => greater than 100
* * EX `<= 75%` => less than or equal to 75%
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* */
export type CompareValueOrPercent = string;
export type StringOperator = '>' | '>=' | '<' | '<=';
export interface ThresholdCriteria {
/**
* The number or percentage to trigger this criteria at
*
* * If `threshold` is a `number` then it is the absolute number of items to trigger at
* * If `threshold` is a `string` with percentage (EX `40%`) then it is the percentage of the total this item must reach to trigger
*
* @default 10%
* @examples ["10%", 15]
* */
threshold: number | string
/**
* @examples [">",">=","<","<="]
* */
condition: StringOperator
}
export interface ChecksActivityState {
itemIs?: TypedActivityStates
}
export interface ActivityState {
removed?: boolean
filtered?: boolean
deleted?: boolean
locked?: boolean
spam?: boolean
stickied?: boolean
distinguished?: boolean
approved?: boolean
}
/**
* Different attributes a `Submission` can be in. Only include a property if you want to check it.
* @examples [{"over_18": true, "removed": false}]
* */
export interface SubmissionState extends ActivityState {
pinned?: boolean
spoiler?: boolean
/**
* NSFW
* */
over_18?: boolean
is_self?: boolean
}
/**
* Different attributes a `Comment` can be in. Only include a property if you want to check it.
* @examples [{"op": true, "removed": false}]
* */
export interface CommentState extends ActivityState {
/*
* Is this Comment Author also the Author of the Submission this comment is in?
* */
op?: boolean
}
export type TypedActivityStates = SubmissionState[] | CommentState[];
export interface DomainInfo {
display: string,
domain: string,
aliases: string[],
provider?: string,
mediaType?: string
}
export const DEFAULT_POLLING_INTERVAL = 30;
export const DEFAULT_POLLING_LIMIT = 50;
export type Invokee = 'system' | 'user';
export const SYSTEM = 'system';
export const USER = 'user';
export type RunState = 'running' | 'paused' | 'stopped';
export const STOPPED = 'stopped';
export const RUNNING = 'running';
export const PAUSED = 'paused';
export interface NamedGroup {
[name: string]: string
}
export interface GlobalRegExResult {
match: string,
groups: string[],
named: NamedGroup | undefined
}
export interface RegExResult {
matched: boolean,
matches: string[],
global: GlobalRegExResult[]
}
type LogLevel = "error" | "warn" | "info" | "verbose" | "debug";
export type CacheProvider = 'memory' | 'redis' | 'none';
// export type StrongCache = SubredditCacheConfig & {
// provider: CacheOptions
// }
export type StrongCache = {
authorTTL: number,
userNotesTTL: number,
wikiTTL: number
provider: CacheOptions
}
export interface CacheOptions {
store: CacheProvider,
host?: string | undefined,
port?: number | undefined,
auth_pass?: string | undefined,
db?: number | undefined,
ttl?: number,
max?: number
}
export type NotificationProvider = 'discord';
export type NotificationEventType = 'runStateChanged' | 'pollingError' | 'eventActioned' | 'configUpdated'
export interface NotificationProviderConfig {
name: string
type: NotificationProvider
}
export interface DiscordProviderConfig extends NotificationProviderConfig {
url: string
}
export type NotificationProviders = DiscordProviderConfig;
export interface NotificationEventConfig {
types: NotificationEventType[]
providers: string[]
}
export interface NotificationContent {
logLevel?: string
title: string
body?: string
footer?: string
}
export type NotificationEvents = (NotificationEventType[] | NotificationEventConfig)[];
export interface NotificationConfig {
providers: NotificationProviders[],
events: NotificationEvents
}
export interface Notifier {
name: string
type: string;
handle: Function
}
export interface ManagerStateChangeOption {
reason?: string
suppressNotification?: boolean
}
export interface OperatorJsonConfig {
operator?: {
name?: string,
display?: string,
},
credentials?: {
clientId?: string,
clientSecret?: string,
redirectUri?: string,
accessToken?: string,
refreshToken?: string
},
notifications?: NotificationConfig
logging?: {
level?: LogLevel,
path?: string,
},
snoowrap?: {
proxy?: string,
debug?: boolean,
}
subreddits?: {
names?: string[],
dryRun?: boolean,
wikiConfig?: string,
heartbeatInterval?: number,
},
polling?: PollingDefaults & {
sharedMod?: boolean,
limit?: number,
/**
* Amount of time, in milliseconds, to wait between requests to /r/subreddit/new
*
* @default 10000
* */
interval?: number,
},
/**
* Polling options for comment events
* */
comments?: {
web?: {
enabled?: boolean,
port?: number,
session?: {
provider?: 'memory' | 'redis' | CacheOptions,
secret?: string,
}
logLevel?: LogLevel,
maxLogs?: number,
}
// caching?: (SubredditCacheConfig & {
// provider?: CacheProvider | CacheOptions | undefined
// }) | CacheProvider | undefined
caching?: {
/**
* The number of new comments to pull on every request
* @default 10
* */
limit?: number,
/**
* Amount of time, in milliseconds, to wait between requests for new comments
*
* Amount of time, in milliseconds, author activities (Comments/Submission) should be cached
* @examples [10000]
* @default 10000
* */
interval?: number,
authorTTL?: number;
/**
* Amount of time, in milliseconds, wiki content pages should be cached
* @examples [300000]
* @default 300000
* */
wikiTTL?: number;
/**
* Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
* @examples [60000]
* @default 60000
* */
userNotesTTL?: number;
provider?: CacheProvider | CacheOptions
} | CacheProvider
api?: {
softLimit?: number,
hardLimit?: number,
}
}
export interface OperatorConfig extends OperatorJsonConfig {
operator: {
name?: string
display?: string,
},
credentials: {
clientId: string,
clientSecret: string,
redirectUri?: string,
accessToken?: string,
refreshToken?: string
},
notifications?: NotificationConfig
logging: {
level: LogLevel,
path?: string,
},
snoowrap: {
proxy?: string,
debug?: boolean,
}
subreddits: {
names?: string[],
dryRun?: boolean,
wikiConfig: string,
heartbeatInterval: number,
},
polling: {
sharedMod: boolean,
limit: number,
interval: number,
},
web: {
enabled: boolean,
port: number,
session: {
provider: CacheOptions,
secret: string,
}
logLevel?: LogLevel,
maxLogs: number,
}
caching: {
authorTTL: number,
userNotesTTL: number,
wikiTTL: number
provider: CacheOptions
},
api: {
softLimit: number,
hardLimit: number,
}
}
//export type OperatorConfig = Required<OperatorJsonConfig>;
interface CacheTypeStat {
requests: number,
miss: number,
}
export interface ResourceStats {
[key: string]: CacheTypeStat
}

20
src/Common/types.ts Normal file
View File

@@ -0,0 +1,20 @@
import {RecentActivityRuleJSONConfig} from "../Rule/RecentActivityRule";
import {RepeatActivityJSONConfig} from "../Rule/SubmissionRule/RepeatActivityRule";
import {AuthorRuleJSONConfig} from "../Rule/AuthorRule";
import {AttributionJSONConfig} from "../Rule/AttributionRule";
import {FlairActionJson} from "../Action/SubmissionAction/FlairAction";
import {CommentActionJson} from "../Action/CommentAction";
import {ReportActionJson} from "../Action/ReportAction";
import {LockActionJson} from "../Action/LockAction";
import {RemoveActionJson} from "../Action/RemoveAction";
import {HistoryJSONConfig} from "../Rule/HistoryRule";
import {UserNoteActionJson} from "../Action/UserNoteAction";
import {ApproveActionJson} from "../Action/ApproveAction";
import {BanActionJson} from "../Action/BanAction";
import {RegexRuleJSONConfig} from "../Rule/RegexRule";
export type RuleJson = RecentActivityRuleJSONConfig | RepeatActivityJSONConfig | AuthorRuleJSONConfig | AttributionJSONConfig | HistoryJSONConfig | RegexRuleJSONConfig | string;
export type RuleObjectJson = Exclude<RuleJson, string>
export type ActionJson = CommentActionJson | FlairActionJson | ReportActionJson | LockActionJson | RemoveActionJson | ApproveActionJson | BanActionJson | UserNoteActionJson | string;
export type ActionObjectJson = Exclude<ActionJson, string>;

View File

@@ -1,50 +1,598 @@
import {Logger} from "winston";
import {createLabelledLogger, loggerMetaShuffle, mergeArr} from "./util";
import {
buildCacheOptionsFromProvider,
createAjvFactory,
mergeArr,
normalizeName,
overwriteMerge,
parseBool, randomId,
readJson,
removeUndefinedKeys
} from "./util";
import {CommentCheck} from "./Check/CommentCheck";
import {SubmissionCheck} from "./Check/SubmissionCheck";
import Ajv from 'ajv';
import * as schema from './Schema/App.json';
import Ajv, {Schema} from 'ajv';
import * as appSchema from './Schema/App.json';
import * as operatorSchema from './Schema/OperatorConfig.json';
import {JSONConfig} from "./JsonConfig";
import LoggedError from "./Utils/LoggedError";
const ajv = new Ajv();
import {CheckStructuredJson} from "./Check";
import {
DEFAULT_POLLING_INTERVAL,
DEFAULT_POLLING_LIMIT,
OperatorJsonConfig,
OperatorConfig,
PollingOptions,
PollingOptionsStrong,
PollOn, StrongCache, CacheProvider, CacheOptions
} from "./Common/interfaces";
import {isRuleSetJSON, RuleSetJson, RuleSetObjectJson} from "./Rule/RuleSet";
import deepEqual from "fast-deep-equal";
import {ActionJson, ActionObjectJson, RuleJson, RuleObjectJson} from "./Common/types";
import {isActionJson} from "./Action";
import {getLogger} from "./Utils/loggerFactory";
import {GetEnvVars} from 'env-cmd';
import {operatorConfig} from "./Utils/CommandConfig";
import merge from 'deepmerge';
import * as process from "process";
import {cacheOptDefaults, cacheTTLDefaults} from "./Common/defaults";
export interface ConfigBuilderOptions {
logger?: Logger,
logger: Logger,
}
export const validateJson = (config: object, schema: Schema, logger: Logger): any => {
const ajv = createAjvFactory(logger);
const valid = ajv.validate(schema, config);
if (valid) {
return config;
} else {
logger.error('Json config was not valid. Please use schema to check validity.', {leaf: 'Config'});
if (Array.isArray(ajv.errors)) {
for (const err of ajv.errors) {
let parts = [
`At: ${err.dataPath}`,
];
let data;
if (typeof err.data === 'string') {
data = err.data;
} else if (err.data !== null && typeof err.data === 'object' && (err.data as any).name !== undefined) {
data = `Object named '${(err.data as any).name}'`;
}
if (data !== undefined) {
parts.push(`Data: ${data}`);
}
let suffix = '';
// @ts-ignore
if (err.params.allowedValues !== undefined) {
// @ts-ignore
suffix = err.params.allowedValues.join(', ');
suffix = ` [${suffix}]`;
}
parts.push(`${err.keyword}: ${err.schemaPath} => ${err.message}${suffix}`);
// if we have a reference in the description parse it out so we can log it here for context
if (err.parentSchema !== undefined && err.parentSchema.description !== undefined) {
const desc = err.parentSchema.description as string;
const seeIndex = desc.indexOf('[See]');
if (seeIndex !== -1) {
let newLineIndex: number | undefined = desc.indexOf('\n', seeIndex);
if (newLineIndex === -1) {
newLineIndex = undefined;
}
const seeFragment = desc.slice(seeIndex + 5, newLineIndex);
parts.push(`See:${seeFragment}`);
}
}
logger.error(`Schema Error:\r\n${parts.join('\r\n')}`, {leaf: 'Config'});
}
}
throw new LoggedError('Config schema validity failure');
}
}
export class ConfigBuilder {
configLogger: Logger;
logger: Logger;
constructor(options: ConfigBuilderOptions) {
if (options.logger !== undefined) {
this.logger = options.logger.child(loggerMetaShuffle(options.logger, 'Config'), mergeArr);
this.configLogger = options.logger.child({leaf: 'Config'}, mergeArr);
this.logger = options.logger;
}
validateJson(config: object): JSONConfig {
const validConfig = validateJson(config, appSchema, this.logger);
return validConfig as JSONConfig;
}
parseToStructured(config: JSONConfig): CheckStructuredJson[] {
let namedRules: Map<string, RuleObjectJson> = new Map();
let namedActions: Map<string, ActionObjectJson> = new Map();
const {checks = []} = config;
for (const c of checks) {
const {rules = []} = c;
namedRules = extractNamedRules(rules, namedRules);
namedActions = extractNamedActions(c.actions, namedActions);
}
const structuredChecks: CheckStructuredJson[] = [];
for (const c of checks) {
const {rules = []} = c;
const strongRules = insertNamedRules(rules, namedRules);
const strongActions = insertNamedActions(c.actions, namedActions);
const strongCheck = {...c, rules: strongRules, actions: strongActions} as CheckStructuredJson;
structuredChecks.push(strongCheck);
}
return structuredChecks;
}
}
export const buildPollingOptions = (values: (string | PollingOptions)[]): PollingOptionsStrong[] => {
let opts: PollingOptionsStrong[] = [];
for (const v of values) {
if (typeof v === 'string') {
opts.push({pollOn: v as PollOn, interval: DEFAULT_POLLING_INTERVAL, limit: DEFAULT_POLLING_LIMIT});
} else {
this.logger = createLabelledLogger(`Config`, `Config`);
const {
pollOn: p,
interval = DEFAULT_POLLING_INTERVAL,
limit = DEFAULT_POLLING_LIMIT,
delayUntil,
} = v;
opts.push({pollOn: p as PollOn, interval, limit, delayUntil});
}
}
return opts;
}
export const extractNamedRules = (rules: Array<RuleSetJson | RuleJson>, namedRules: Map<string, RuleObjectJson> = new Map()): Map<string, RuleObjectJson> => {
//const namedRules = new Map();
for (const r of rules) {
let rulesToAdd: RuleObjectJson[] = [];
if ((typeof r === 'object')) {
if ((r as RuleObjectJson).kind !== undefined) {
// itsa rule
const rule = r as RuleObjectJson;
if (rule.name !== undefined) {
rulesToAdd.push(rule);
}
} else {
const ruleSet = r as RuleSetJson;
const nestedNamed = extractNamedRules(ruleSet.rules);
rulesToAdd = [...nestedNamed.values()];
}
for (const rule of rulesToAdd) {
const name = rule.name as string;
const normalName = normalizeName(name);
const {name: n, ...rest} = rule;
const ruleNoName = {...rest};
if (namedRules.has(normalName)) {
const {name: nn, ...ruleRest} = namedRules.get(normalName) as RuleObjectJson;
if (!deepEqual(ruleRest, ruleNoName)) {
throw new Error(`Rule names must be unique (case-insensitive). Conflicting name: ${name}`);
}
} else {
namedRules.set(normalName, rule);
}
}
}
}
return namedRules;
}
export const insertNamedRules = (rules: Array<RuleSetJson | RuleJson>, namedRules: Map<string, RuleObjectJson> = new Map()): Array<RuleSetObjectJson | RuleObjectJson> => {
const strongRules: Array<RuleSetObjectJson | RuleObjectJson> = [];
for (const r of rules) {
if (typeof r === 'string') {
const foundRule = namedRules.get(r.toLowerCase());
if (foundRule === undefined) {
throw new Error(`No named Rule with the name ${r} was found`);
}
strongRules.push(foundRule);
} else if (isRuleSetJSON(r)) {
const {rules: sr, ...rest} = r;
const setRules = insertNamedRules(sr, namedRules);
const strongSet = {rules: setRules, ...rest} as RuleSetObjectJson;
strongRules.push(strongSet);
} else {
strongRules.push(r);
}
}
buildFromJson(config: object): (Array<SubmissionCheck> | Array<CommentCheck>)[] {
const commentChecks: Array<CommentCheck> = [];
const subChecks: Array<SubmissionCheck> = [];
const valid = ajv.validate(schema, config);
if(valid) {
const validConfig = config as JSONConfig;
for (const jCheck of validConfig.checks) {
if (jCheck.kind === 'comment') {
commentChecks.push(new CommentCheck({...jCheck, logger: this.logger}));
} else if (jCheck.kind === 'submission') {
subChecks.push(new SubmissionCheck({...jCheck, logger: this.logger}));
return strongRules;
}
export const extractNamedActions = (actions: Array<ActionJson>, namedActions: Map<string, ActionObjectJson> = new Map()): Map<string, ActionObjectJson> => {
for (const a of actions) {
if (!(typeof a === 'string')) {
if (isActionJson(a) && a.name !== undefined) {
const normalName = a.name.toLowerCase();
const {name: n, ...rest} = a;
const actionNoName = {...rest};
if (namedActions.has(normalName)) {
// @ts-ignore
const {name: nn, ...aRest} = namedActions.get(normalName) as ActionObjectJson;
if (!deepEqual(aRest, actionNoName)) {
throw new Error(`Actions names must be unique (case-insensitive). Conflicting name: ${a.name}`);
}
} else {
namedActions.set(normalName, a);
}
}
}
}
return namedActions;
}
export const insertNamedActions = (actions: Array<ActionJson>, namedActions: Map<string, ActionObjectJson> = new Map()): Array<ActionObjectJson> => {
const strongActions: Array<ActionObjectJson> = [];
for (const a of actions) {
if (typeof a === 'string') {
const foundAction = namedActions.get(a.toLowerCase());
if (foundAction === undefined) {
throw new Error(`No named Action with the name ${a} was found`);
}
strongActions.push(foundAction);
} else {
strongActions.push(a);
}
}
return strongActions;
}
export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
const {
subreddits,
clientId,
clientSecret,
accessToken,
refreshToken,
redirectUri,
wikiConfig,
dryRun,
heartbeat,
softLimit,
hardLimit,
authorTTL,
operator,
operatorDisplay,
snooProxy,
snooDebug,
sharedMod,
logLevel,
logDir,
port,
sessionSecret,
caching,
web
} = args || {};
const data = {
operator: {
name: operator,
display: operatorDisplay
},
credentials: {
clientId,
clientSecret,
accessToken,
refreshToken,
redirectUri,
},
subreddits: {
names: subreddits,
wikiConfig,
heartbeatInterval: heartbeat,
dryRun
},
logging: {
level: logLevel,
path: logDir === true ? `${process.cwd()}/logs` : undefined,
},
snoowrap: {
proxy: snooProxy,
debug: snooDebug,
},
web: {
enabled: web,
port,
session: {
secret: sessionSecret
}
},
polling: {
sharedMod,
},
caching: {
provider: caching,
authorTTL
},
api: {
softLimit,
hardLimit
}
}
return removeUndefinedKeys(data) as OperatorJsonConfig;
}
export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
let subsVal = process.env.SUBREDDITS;
let subs;
if (subsVal !== undefined) {
subsVal = subsVal.trim();
if (subsVal.includes(',')) {
// try to parse using comma
subs = subsVal.split(',').map(x => x.trim()).filter(x => x !== '');
} else {
// otherwise try spaces
subs = subsVal.split(' ')
// remove any extraneous spaces
.filter(x => x !== ' ' && x !== '');
}
if (subs.length === 0) {
subs = undefined;
}
}
const data = {
operator: {
name: process.env.OPERATOR,
display: process.env.OPERATOR_DISPLAY
},
credentials: {
clientId: process.env.CLIENT_ID,
clientSecret: process.env.CLIENT_SECRET,
accessToken: process.env.ACCESS_TOKEN,
refreshToken: process.env.REFRESH_TOKEN,
redirectUri: process.env.REDIRECT_URI,
},
subreddits: {
names: subs,
wikiConfig: process.env.WIKI_CONFIG,
heartbeatInterval: process.env.HEARTBEAT !== undefined ? parseInt(process.env.HEARTBEAT) : undefined,
dryRun: parseBool(process.env.DRYRUN, undefined),
},
logging: {
// @ts-ignore
level: process.env.LOG_LEVEL,
path: process.env.LOG_DIR === 'true' ? `${process.cwd()}/logs` : undefined,
},
snoowrap: {
proxy: process.env.PROXY,
debug: parseBool(process.env.SNOO_DEBUG, undefined),
},
web: {
enabled: process.env.WEB !== undefined ? parseBool(process.env.WEB) : undefined,
port: process.env.PORT !== undefined ? parseInt(process.env.PORT) : undefined,
session: {
provider: process.env.SESSION_PROVIDER,
secret: process.env.SESSION_SECRET
}
},
polling: {
sharedMod: parseBool(process.env.SHARE_MOD),
},
caching: {
provider: {
// @ts-ignore
store: process.env.CACHING
},
authorTTL: process.env.AUTHOR_TTL !== undefined ? parseInt(process.env.AUTHOR_TTL) : undefined
},
api: {
softLimit: process.env.SOFT_LIMIT !== undefined ? parseInt(process.env.SOFT_LIMIT) : undefined,
hardLimit: process.env.HARD_LIMIT !== undefined ? parseInt(process.env.HARD_LIMIT) : undefined
}
}
return removeUndefinedKeys(data) as OperatorJsonConfig;
}
// Hierarchy (lower level overwrites above)
//
// .env file
// Actual ENVs (from environment)
// json config
// args from cli
export const parseOperatorConfigFromSources = async (args: any): Promise<OperatorJsonConfig> => {
const {logLevel = process.env.LOG_LEVEL, logDir = process.env.LOG_DIR || false} = args || {};
const envPath = process.env.OPERATOR_ENV;
// create a pre config logger to help with debugging
const initLogger = getLogger({logLevel, logDir: logDir === true ? `${process.cwd()}/logs` : logDir}, 'init');
try {
const vars = await GetEnvVars({
envFile: {
filePath: envPath,
fallback: true
}
});
// if we found variables in the file of at a fallback path then add them in before we do main arg parsing
for (const [k, v] of Object.entries(vars)) {
// don't override existing
if (process.env[k] === undefined) {
process.env[k] = v;
}
}
} catch (err) {
let msg = 'No .env file found at default location (./env)';
if (envPath !== undefined) {
msg = `${msg} or OPERATOR_ENV path (${envPath})`;
}
initLogger.warn(`${msg} -- this may be normal if neither was provided.`);
// mimicking --silent from env-cmd
//swallow silently for now 😬
}
const {operatorConfig = process.env.OPERATOR_CONFIG} = args;
let configFromFile: OperatorJsonConfig = {};
if (operatorConfig !== undefined) {
let rawConfig;
try {
rawConfig = await readJson(operatorConfig, {log: initLogger});
} catch (err) {
initLogger.error('Cannot continue app startup because operator config file was not parseable.');
err.logged = true;
throw err;
}
try {
configFromFile = validateJson(rawConfig, operatorSchema, initLogger) as OperatorJsonConfig;
} catch (err) {
initLogger.error('Cannot continue app startup because operator config file was not valid.');
throw err;
}
}
const configFromArgs = parseOpConfigFromArgs(args);
const configFromEnv = parseOpConfigFromEnv();
const mergedConfig = merge.all([configFromEnv, configFromFile, configFromArgs], {
arrayMerge: overwriteMerge,
});
return removeUndefinedKeys(mergedConfig) as OperatorJsonConfig;
}
export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): OperatorConfig => {
const {
operator: {
name,
display = 'Anonymous'
} = {},
credentials: {
clientId: ci,
clientSecret: cs,
...restCred
} = {},
subreddits: {
names = [],
wikiConfig = 'botconfig/contextbot',
heartbeatInterval = 300,
dryRun
} = {},
logging: {
level = 'verbose',
path,
} = {},
snoowrap = {},
web: {
enabled = true,
port = 8085,
maxLogs = 200,
session: {
secret = randomId(),
provider: sessionProvider = { store: 'memory' },
} = {}
} = {},
polling: {
sharedMod = false,
limit = 100,
interval = 30,
} = {},
caching = 'memory',
api: {
softLimit = 250,
hardLimit = 50
} = {},
} = data;
let cache = {
...cacheTTLDefaults,
provider: {
store: 'memory',
...cacheOptDefaults
}
};
if (typeof caching === 'string') {
cache = {
provider: {
store: caching as CacheProvider,
...cacheOptDefaults
},
...cacheTTLDefaults
};
} else if (typeof caching === 'object') {
const {provider, ...restConfig} = caching;
if (typeof provider === 'string') {
cache = {
...cacheTTLDefaults,
...restConfig,
provider: {
store: provider as CacheProvider,
...cacheOptDefaults
}
}
} else {
this.logger.error('Json config was not valid. Please use schema to check validity.', ajv.errors);
this.logger.error(ajv.errors);
throw new LoggedError();
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
cache = {
...cacheTTLDefaults,
...restConfig,
provider: {
store,
...cacheOptDefaults,
...rest,
},
}
}
return [subChecks, commentChecks];
}
const config: OperatorConfig = {
operator: {
name,
display
},
credentials: {
clientId: (ci as string),
clientSecret: (cs as string),
...restCred,
},
logging: {
level,
path
},
snoowrap,
subreddits: {
names,
wikiConfig,
heartbeatInterval,
dryRun,
},
web: {
enabled,
port,
session: {
secret,
provider: typeof sessionProvider === 'string' ? {
...buildCacheOptionsFromProvider({
ttl: 86400000,
store: sessionProvider,
})
} : {
...buildCacheOptionsFromProvider(sessionProvider),
ttl: 86400000,
},
},
maxLogs,
},
// @ts-ignore
caching: cache,
polling: {
sharedMod,
limit,
interval,
},
api: {
softLimit,
hardLimit
}
};
return config;
}

View File

@@ -1,7 +1,7 @@
import {CheckJSONConfig} from "./Check";
import {PollingOptions} from "./Common/interfaces";
import {CheckJson, CommentCheckJson, SubmissionCheckJson} from "./Check";
import {ManagerOptions} from "./Common/interfaces";
export interface JSONConfig {
export interface JSONConfig extends ManagerOptions {
/**
* A list of all the checks that should be run for a subreddit.
*
@@ -12,6 +12,5 @@ export interface JSONConfig {
* When a check "passes", and actions are performed, then all subsequent checks are skipped.
* @minItems 1
* */
checks: CheckJSONConfig[]
polling?: PollingOptions
checks: Array<SubmissionCheckJson|CommentCheckJson>
}

View File

@@ -0,0 +1,46 @@
import webhook from 'webhook-discord';
import {NotificationContent} from "../Common/interfaces";
class DiscordNotifier {
name: string
type: string = 'Discord';
url: string;
constructor(name: string, url: string) {
this.name = name;
this.url = url;
}
handle(val: NotificationContent) {
const h = new webhook.Webhook(this.url);
const hook = new webhook.MessageBuilder();
const {logLevel, title, footer, body = ''} = val;
hook.setName('RCB')
.setTitle(title)
.setDescription(body)
if (footer !== undefined) {
// @ts-ignore
hook.setFooter(footer, false);
}
switch (logLevel) {
case 'error':
hook.setColor("##ff0000");
break;
case 'warn':
hook.setColor("#ffe900");
break;
default:
hook.setColor("#00fffa");
break;
}
h.send(hook);
}
}
export default DiscordNotifier;

View File

@@ -0,0 +1,122 @@
import {
NotificationConfig,
NotificationEventConfig,
NotificationEvents,
NotificationEventType,
Notifier
} from "../Common/interfaces";
import DiscordNotifier from "./DiscordNotifier";
import {Logger} from "winston";
import {mergeArr} from "../util";
import Subreddit from "snoowrap/dist/objects/Subreddit";
class NotificationManager {
notifiers: Notifier[] = [];
events: NotificationEvents = [];
logger: Logger;
subreddit: Subreddit;
name: string;
constructor(logger: Logger, subreddit: Subreddit, displayName: string, config?: NotificationConfig) {
this.logger = logger.child({leaf: 'Notifications'}, mergeArr);
this.subreddit = subreddit;
this.name = displayName;
if (config !== undefined) {
const {events = [], providers = []} = config;
this.events = events;
for (const p of providers) {
switch (p.type) {
case 'discord':
this.notifiers.push(new DiscordNotifier(p.name, p.url));
break;
default:
this.logger.warn(`Notification provider type of ${p.type} not recognized.`);
break;
}
}
if (this.events.length > 0 && this.notifiers.length === 0) {
this.logger.warn(`Config specified ${this.events.length} event hooks but not notification providers were setup!`);
}
}
}
getStats() {
let notifiers: string[] = [];
if (this.notifiers.length > 0) {
notifiers = this.notifiers.map(x => `${x.name} (${x.type})`);
}
let events: string[] = [];
if (this.events.length > 0) {
events = this.events.reduce((acc: string[], curr) => {
const e = Array.isArray(curr) ? curr : curr.types;
for (const ev of e) {
if (!acc.includes(ev)) {
acc.push(ev);
}
}
return acc;
}, []);
}
return {
notifiers,
events,
}
}
handle(name: NotificationEventType, title: string, body?: string, causedBy?: string, logLevel?: string) {
if (this.notifiers.length === 0 || this.events.length === 0) {
return;
}
let notifiers: Notifier[] = [];
for (const e of this.events) {
// array of event NotificationEventType
if (Array.isArray(e)) {
const ev = e as NotificationEventType[];
for (const v of ev) {
if (v === name) {
// if we find the event here then we want to sent the event to all configured notifiers
notifiers = notifiers.concat(this.notifiers);
}
}
} else {
// e is a NotificationEventConfig
const ev = e as NotificationEventConfig;
const hasEvent = ev.types.some(x => x === name);
if (hasEvent) {
const p = ev.providers.map(y => y.toLowerCase());
const validNotifiers = this.notifiers.filter(x => p.includes(x.name.toLowerCase()));
notifiers = notifiers.concat(validNotifiers);
}
}
}
// remove dups
notifiers = notifiers.reduce((acc: Notifier[], curr: Notifier) => {
if (!acc.some(x => x.name === curr.name)) {
return acc.concat(curr);
}
return acc;
}, []);
let footer = [];
if (causedBy !== undefined) {
footer.push(`* Performed by "${causedBy}"`);
}
footer.push(`* Notification triggered by "${name}"`);
this.logger.info(`Sending notification for ${name} to providers: ${notifiers.map(x => `${x.name} (${x.type})`).join(', ')}`);
for (const n of notifiers) {
n.handle({
title: `${title} (${this.name})`,
body: body || '',
footer: footer.length > 0 ? footer.join('\n') : undefined,
logLevel
});
}
}
}
export default NotificationManager;

440
src/Rule/AttributionRule.ts Normal file
View File

@@ -0,0 +1,440 @@
import {SubmissionRule, SubmissionRuleJSONConfig} from "./SubmissionRule";
import {ActivityWindowType, DomainInfo, ReferenceSubmission} from "../Common/interfaces";
import {Rule, RuleOptions, RuleResult} from "./index";
import Submission from "snoowrap/dist/objects/Submission";
import {getAttributionIdentifier} from "../Utils/SnoowrapUtils";
import dayjs from "dayjs";
import {
comparisonTextOp,
FAIL,
formatNumber,
parseGenericValueOrPercentComparison,
parseSubredditName,
PASS
} from "../util";
import { Comment } from "snoowrap/dist/objects";
import SimpleError from "../Utils/SimpleError";
export interface AttributionCriteria {
/**
* A string containing a comparison operator and a value to compare comments against
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign]`
*
* * EX `> 12` => greater than 12 activities originate from same attribution
* * EX `<= 10%` => less than 10% of all Activities have the same attribution
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* @default "> 10%"
* */
threshold: string
window: ActivityWindowType
/**
* What activities to use for total count when determining what percentage an attribution comprises
*
* EX:
*
* Author has 100 activities, 40 are submissions and 60 are comments
*
* * If `submission` then if 10 submission are for Youtube Channel A then percentage => 10/40 = 25%
* * If `all` then if 10 submission are for Youtube Channel A then percentage => 10/100 = 10%
*
* @default all
**/
thresholdOn?: 'submissions' | 'all'
/**
* The minimum number of activities that must exist for this criteria to run
* @default 5
* */
minActivityCount?: number
/**
* A list of domains whose Activities will be tested against `threshold`.
*
* If this is present then `aggregateOn` is ignored.
*
* The values are tested as partial strings so you do not need to include full URLs, just the part that matters.
*
* EX `["youtube"]` will match submissions with the domain `https://youtube.com/c/aChannel`
* EX `["youtube.com/c/bChannel"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`
*
* If you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`
*
* **If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**
*
* If nothing is specified or list is empty (default) aggregate using `aggregateOn`
*
* @default [[]]
* */
domains?: string[],
/**
* Set to `true` if you wish to combine all of the Activities from `domains` to test against `threshold` instead of testing each `domain` individually
*
* @default false
* @examples [false]
* */
domainsCombined?: boolean,
/**
* Only include Activities from this list of Subreddits (by name, case-insensitive)
*
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
include?: string[],
/**
* Do not include Activities from this list of Subreddits (by name, case-insensitive)
*
* Will be ignored if `include` is present.
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
exclude?: string[],
/**
* If `domains` is not specified this list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`
*
* * If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)
* * If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)
* * If `link` is included then aggregate author's submission history which is external links but not media
*
* If nothing is specified or list is empty (default) all domains are aggregated
*
* @default undefined
* @examples [[]]
* */
aggregateOn?: ('media' | 'self' | 'link')[],
/**
* Should the criteria consolidate recognized media domains into the parent domain?
*
* Submissions to major media domains (youtube, vimeo) can be identified by individual Channel/Author...
*
* * If `false` then domains will be aggregated at the channel level IE Youtube Channel A (2 counts), Youtube Channel B (3 counts)
* * If `true` then then media domains will be consolidated at domain level and then aggregated IE youtube.com (5 counts)
*
* @default false
* @examples [false]
* */
consolidateMediaDomains?: boolean
name?: string
}
const SUBMISSION_DOMAIN = 'AGG:SELF';
const defaultCriteria = [{threshold: '10%', window: 100}];
interface DomainAgg {
info: DomainInfo,
count: number
}
export class AttributionRule extends Rule {
criteria: AttributionCriteria[];
criteriaJoin: 'AND' | 'OR';
constructor(options: AttributionOptions) {
super(options);
const {
criteria = defaultCriteria,
criteriaJoin = 'OR',
} = options || {};
this.criteria = criteria;
this.criteriaJoin = criteriaJoin;
if (this.criteria.length === 0) {
throw new Error('Must provide at least one AttributionCriteria');
}
}
getKind(): string {
return "Attr";
}
protected getSpecificPremise(): object {
return {
criteria: this.criteria,
criteriaJoin: this.criteriaJoin,
}
}
protected async process(item: Comment | Submission): Promise<[boolean, RuleResult]> {
let criteriaResults = [];
for (const criteria of this.criteria) {
const {
threshold = '> 10%',
window,
thresholdOn = 'all',
minActivityCount = 10,
aggregateOn = [],
consolidateMediaDomains = false,
domains = [],
domainsCombined = false,
include: includeRaw = [],
exclude: excludeRaw = [],
} = criteria;
const include = includeRaw.map(x => parseSubredditName(x).toLowerCase());
const exclude = excludeRaw.map(x => parseSubredditName(x).toLowerCase());
const {operator, value, isPercent, extra = ''} = parseGenericValueOrPercentComparison(threshold);
let activities = thresholdOn === 'submissions' ? await this.resources.getAuthorSubmissions(item.author, {window: window}) : await this.resources.getAuthorActivities(item.author, {window: window});
activities = activities.filter(act => {
if (include.length > 0) {
return include.some(x => x === act.subreddit.display_name.toLowerCase());
} else if (exclude.length > 0) {
return !exclude.some(x => x === act.subreddit.display_name.toLowerCase())
}
return true;
});
let activityTotal = 0;
let firstActivity, lastActivity;
if(activities.length === 0) {
this.logger.debug(`No activities retrieved for criteria`);
continue;
}
activityTotal = activities.length;
firstActivity = activities[0];
lastActivity = activities[activities.length - 1];
const activityTotalWindow = dayjs.duration(dayjs(firstActivity.created_utc * 1000).diff(dayjs(lastActivity.created_utc * 1000)));
if (activities.length < minActivityCount) {
criteriaResults.push({criteria, activityTotal, activityTotalWindow, triggered: false, aggDomains: [], minCountMet: false});
this.logger.debug(`${activities.length } activities retrieved was less than min activities required to run criteria (${minActivityCount})`);
continue;
}
const realDomains: DomainInfo[] = domains.map(x => {
if(x === SUBMISSION_DOMAIN) {
if(!(item instanceof Submission)) {
throw new SimpleError('Cannot run Attribution Rule with the domain SELF:AGG on a Comment');
}
return getAttributionIdentifier(item, consolidateMediaDomains);
}
return {display: x, domain: x, aliases: [x]};
});
const realDomainIdents = realDomains.map(x => x.aliases).flat(1).map(x => x.toLowerCase());
const submissions: Submission[] = thresholdOn === 'submissions' ? activities as Submission[] : activities.filter(x => x instanceof Submission) as Submission[];
const aggregatedSubmissions = submissions.reduce((acc: Map<string, DomainAgg>, sub) => {
const domainInfo = getAttributionIdentifier(sub, consolidateMediaDomains)
let domainType = 'link';
if(sub.secure_media !== undefined && sub.secure_media !== null) {
domainType = 'media';
} else if((sub.is_self || sub.is_video || sub.domain === 'i.redd.it')) {
domainType = 'self';
}
if(realDomains.length === 0 && aggregateOn.length !== 0) {
if(domainType === 'media' && !aggregateOn.includes('media')) {
return acc;
}
if(domainType === 'self' && !aggregateOn.includes('self')) {
return acc;
}
if(domainType === 'link' && !aggregateOn.includes('link')) {
return acc;
}
}
if(realDomains.length > 0) {
if(domainInfo.aliases.map(x => x.toLowerCase()).some(x => realDomainIdents.includes(x))) {
const domainAgg = acc.get(domainInfo.display) || {info: domainInfo, count: 0};
acc.set(domainInfo.display, {...domainAgg, count: domainAgg.count + 1});
}
} else {
const domainAgg = acc.get(domainInfo.display) || {info: domainInfo, count: 0};
acc.set(domainInfo.display, {...domainAgg, count: domainAgg.count + 1});
}
return acc;
}, new Map());
let aggDomains = [];
if(domainsCombined) {
let combinedCount = 0;
let domains = [];
let triggered = false;
for (const [domain, dAgg] of aggregatedSubmissions) {
domains.push(domain);
combinedCount += dAgg.count;
}
if(isPercent) {
triggered = comparisonTextOp(combinedCount / activityTotal, operator, (value/100));
}
else {
triggered = comparisonTextOp(combinedCount, operator, value);
}
const combinedDomain = Array.from(aggregatedSubmissions.values()).map(x => x.info.domain).join(' and ');
const combinedDisplay = Array.from(aggregatedSubmissions.values()).map(x => `${x.info.display}${x.info.provider !== undefined ? ` (${x.info.provider})` : ''}`).join(' and ');
aggDomains.push({
domain: {display: combinedDisplay, domain: combinedDomain, aliases: [combinedDomain]},
count: combinedCount,
percent: Math.round((combinedCount / activityTotal) * 100),
triggered,
});
} else {
for (const [domain, dAgg] of aggregatedSubmissions) {
let triggered = false;
if(isPercent) {
triggered = comparisonTextOp(dAgg.count / activityTotal, operator, (value/100));
}
else {
triggered = comparisonTextOp(dAgg.count, operator, value);
}
aggDomains.push({
domain: dAgg.info,
count: dAgg.count,
percent: Math.round((dAgg.count / activityTotal) * 100),
triggered,
});
}
}
criteriaResults.push({criteria, activityTotal, activityTotalWindow, aggDomains, minCountMet: true});
}
let criteriaMeta = false;
if (this.criteriaJoin === 'OR') {
criteriaMeta = criteriaResults.some(x => x.aggDomains.length > 0 && x.aggDomains.some(y => y.triggered === true));
} else {
criteriaMeta = criteriaResults.every(x => x.aggDomains.length > 0 && x.aggDomains.some(y => y.triggered === true));
}
let usableCriteria = criteriaResults.filter(x => x.aggDomains.length > 0 && x.aggDomains.some(y => y.triggered === true));
if (usableCriteria.length === 0) {
usableCriteria = criteriaResults.filter(x => x.aggDomains.length > 0)
}
// probably none hit min count then
if(criteriaResults.every(x => x.minCountMet === false)) {
const result = `${FAIL} No criteria had their min activity count met`;
this.logger.verbose(result);
return Promise.resolve([false, this.getResult(false, {result})]);
}
let result;
const refCriteriaResults = usableCriteria.find(x => x !== undefined);
if(refCriteriaResults === undefined) {
result = `${FAIL} No criteria results found??`;
return Promise.resolve([false, this.getResult(false, {result})])
}
const {
aggDomains = [],
activityTotal,
activityTotalWindow,
criteria: {threshold, window}
} = refCriteriaResults;
const largestCount = aggDomains.reduce((acc, curr) => Math.max(acc, curr.count), 0);
const largestPercent = aggDomains.reduce((acc, curr) => Math.max(acc, curr.percent), 0);
const smallestCount = aggDomains.reduce((acc, curr) => Math.min(acc, curr.count), aggDomains[0].count);
const smallestPercent = aggDomains.reduce((acc, curr) => Math.min(acc, curr.percent), aggDomains[0].percent);
const windowText = typeof window === 'number' ? `${activityTotal} Items` : activityTotalWindow.humanize();
const countRange = smallestCount === largestCount ? largestCount : `${smallestCount} - ${largestCount}`
const percentRange = formatNumber(smallestPercent, {toFixed: 0}) === formatNumber(largestPercent, {toFixed: 0}) ? `${largestPercent}%` : `${smallestPercent}% - ${largestPercent}%`
let data: any = {};
const resultAgnostic = `met the threshold of ${threshold}, with ${countRange} (${percentRange}) of ${activityTotal} Total -- window: ${windowText}`;
if(criteriaMeta) {
result = `${PASS} ${aggDomains.length} Attribution(s) ${resultAgnostic}`;
data = {
triggeredDomainCount: aggDomains.length,
activityTotal,
largestCount,
largestPercent: `${largestPercent}%`,
smallestCount,
smallestPercent: `${smallestPercent}%`,
countRange,
percentRange,
domains: aggDomains.map(x => x.domain.domain),
domainsDelim: aggDomains.map(x => x.domain.domain).join(', '),
titles: aggDomains.map(x => `${x.domain.display}${x.domain.provider !== undefined ? ` (${x.domain.provider})` :''}`),
titlesDelim: aggDomains.map(x => `${x.domain.display}${x.domain.provider !== undefined ? ` (${x.domain.provider})` :''}`).join(', '),
threshold: threshold,
window: windowText
};
} else {
result = `${FAIL} No Attributions ${resultAgnostic}`;
}
this.logger.verbose(result);
return Promise.resolve([criteriaMeta, this.getResult(criteriaMeta, {
result,
data,
})]);
}
}
interface AttributionConfig extends ReferenceSubmission {
/**
* A list threshold-window values to test attribution against
*
* If none is provided the default set used is:
*
* ```
* threshold: 10%
* window: 100
* ```
*
* @minItems 1
* */
criteria?: AttributionCriteria[]
/**
* * If `OR` then any set of AttributionCriteria that produce an Attribution over the threshold will trigger the rule.
* * If `AND` then all AttributionCriteria sets must product an Attribution over the threshold to trigger the rule.
* */
criteriaJoin?: 'AND' | 'OR'
}
export interface AttributionOptions extends AttributionConfig, RuleOptions {
}
/**
* Aggregates all of the domain/media accounts attributed to an author's Submission history. If any domain is over the threshold the rule is triggered
*
* Available data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):
*
* ```
* triggeredDomainCount => Number of domains that met the threshold
* activityTotal => Number of Activities considered from window
* window => The date range of the Activities considered
* largestCount => The count from the largest aggregated domain
* largestPercentage => The percentage of Activities the largest aggregated domain comprises
* smallestCount => The count from the smallest aggregated domain
* smallestPercentage => The percentage of Activities the smallest aggregated domain comprises
* countRange => A convenience string displaying "smallestCount - largestCount" or just one number if both are the same
* percentRange => A convenience string displaying "smallestPercentage - largestPercentage" or just one percentage if both are the same
* domains => An array of all the domain URLs that met the threshold
* domainsDelim => A comma-delimited string of all the domain URLs that met the threshold
* titles => The friendly-name of the domain if one is present, otherwise the URL (IE youtube.com/c/34ldfa343 => "My Youtube Channel Title")
* titlesDelim => A comma-delimited string of all the domain friendly-names
* threshold => The threshold you configured for this Rule to trigger
* url => Url of the submission that triggered the rule
* ```
* */
export interface AttributionJSONConfig extends AttributionConfig, SubmissionRuleJSONConfig {
kind: 'attribution'
}

View File

@@ -1,7 +1,7 @@
import {Author, AuthorOptions, AuthorCriteria, Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
import {Comment} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {testAuthorCriteria} from "../Utils/SnoowrapUtils";
import {Author, AuthorCriteria} from "../Author/Author";
/**
* Checks the author of the Activity against AuthorCriteria. This differs from a Rule's AuthorOptions as this is a full Rule and will only pass/fail, not skip.
@@ -53,21 +53,21 @@ export class AuthorRule extends Rule {
};
}
protected async process(item: Comment | Submission): Promise<[boolean, RuleResult[]]> {
protected async process(item: Comment | Submission): Promise<[boolean, RuleResult]> {
if (this.include.length > 0) {
for (const auth of this.include) {
if (await testAuthorCriteria(item, auth)) {
return Promise.resolve([true, [this.getResult(true)]]);
if (await this.resources.testAuthorCriteria(item, auth)) {
return Promise.resolve([true, this.getResult(true)]);
}
}
return Promise.resolve([false, [this.getResult(false)]]);
return Promise.resolve([false, this.getResult(false)]);
}
for (const auth of this.exclude) {
if (await testAuthorCriteria(item, auth, false)) {
return Promise.resolve([true, [this.getResult(true)]]);
if (await this.resources.testAuthorCriteria(item, auth, false)) {
return Promise.resolve([true, this.getResult(true)]);
}
}
return Promise.resolve([false, [this.getResult(false)]]);
return Promise.resolve([false, this.getResult(false)]);
}
}

339
src/Rule/HistoryRule.ts Normal file
View File

@@ -0,0 +1,339 @@
import {ActivityWindowType, CompareValueOrPercent, ThresholdCriteria} from "../Common/interfaces";
import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
import Submission from "snoowrap/dist/objects/Submission";
import {getAuthorActivities} from "../Utils/SnoowrapUtils";
import dayjs from "dayjs";
import {
comparisonTextOp,
FAIL,
formatNumber,
parseGenericValueOrPercentComparison, parseSubredditName,
PASS,
percentFromString
} from "../util";
export interface CommentThresholdCriteria extends ThresholdCriteria {
/**
* If `true` then when threshold...
*
* * is `number` it will be number of comments where author is OP
* * is `percent` it will be **percent of total comments where author is OP**
* */
asOp?: boolean
}
/**
* If both `submission` and `comment` are defined then criteria will only trigger if BOTH thresholds are met
* */
export interface HistoryCriteria {
/**
* A string containing a comparison operator and a value to compare submissions against
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign]`
*
* * EX `> 100` => greater than 100 submissions
* * EX `<= 75%` => submissions are equal to or less than 75% of all Activities
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* */
submission?: CompareValueOrPercent
/**
* A string containing a comparison operator and a value to compare comments against
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`
*
* * EX `> 100` => greater than 100 comments
* * EX `<= 75%` => comments are equal to or less than 75% of all Activities
*
* If your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:
*
* * EX `> 100 OP` => greater than 100 comments as OP
* * EX `<= 25% as OP` => Comments as OP were less then or equal to 25% of **all Comments**
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* */
comment?: CompareValueOrPercent
window: ActivityWindowType
/**
* The minimum number of activities that must exist from the `window` results for this criteria to run
* @default 5
* */
minActivityCount?: number
name?: string
}
export class HistoryRule extends Rule {
criteria: HistoryCriteria[];
condition: 'AND' | 'OR';
include: string[];
exclude: string[];
constructor(options: HistoryOptions) {
super(options);
const {
criteria,
condition = 'OR',
include = [],
exclude = [],
} = options || {};
this.criteria = criteria;
this.condition = condition;
if (this.criteria.length === 0) {
throw new Error('Must provide at least one HistoryCriteria');
}
this.include = include.map(x => parseSubredditName(x).toLowerCase());
this.exclude = exclude.map(x => parseSubredditName(x).toLowerCase());
}
getKind(): string {
return "History";
}
protected getSpecificPremise(): object {
return {
criteria: this.criteria,
include: this.include,
exclude: this.exclude,
}
}
protected async process(item: Submission): Promise<[boolean, RuleResult]> {
let criteriaResults = [];
for (const criteria of this.criteria) {
const {comment, window, submission, minActivityCount = 5} = criteria;
let activities = await this.resources.getAuthorActivities(item.author, {window: window});
activities = activities.filter(act => {
if (this.include.length > 0) {
return this.include.some(x => x === act.subreddit.display_name.toLowerCase());
} else if (this.exclude.length > 0) {
return !this.exclude.some(x => x === act.subreddit.display_name.toLowerCase())
}
return true;
});
if (activities.length < minActivityCount) {
continue;
}
const activityTotal = activities.length;
const {submissionTotal, commentTotal, opTotal} = activities.reduce((acc, act) => {
if(act instanceof Submission) {
return {...acc, submissionTotal: acc.submissionTotal + 1};
}
let a = {...acc, commentTotal: acc.commentTotal + 1};
if(act.is_submitter) {
a.opTotal = a.opTotal + 1;
}
return a;
},{submissionTotal: 0, commentTotal: 0, opTotal: 0});
let commentTrigger = undefined;
if(comment !== undefined) {
const {operator, value, isPercent, extra = ''} = parseGenericValueOrPercentComparison(comment);
const asOp = extra.toLowerCase().includes('op');
if(isPercent) {
const per = value / 100;
if(asOp) {
commentTrigger = comparisonTextOp(opTotal / commentTotal, operator, per);
} else {
commentTrigger = comparisonTextOp(commentTotal / activityTotal, operator, per);
}
} else {
if(asOp) {
commentTrigger = comparisonTextOp(opTotal, operator, value);
} else {
commentTrigger = comparisonTextOp(commentTotal, operator, value);
}
}
}
let submissionTrigger = undefined;
if(submission !== undefined) {
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(submission);
if(isPercent) {
const per = value / 100;
submissionTrigger = comparisonTextOp(submissionTotal / activityTotal, operator, per);
} else {
submissionTrigger = comparisonTextOp(submissionTotal, operator, value);
}
}
const firstActivity = activities[0];
const lastActivity = activities[activities.length - 1];
const activityTotalWindow = dayjs.duration(dayjs(firstActivity.created_utc * 1000).diff(dayjs(lastActivity.created_utc * 1000)));
criteriaResults.push({
criteria,
activityTotal,
activityTotalWindow,
submissionTotal,
commentTotal,
opTotal,
submissionTrigger,
commentTrigger,
triggered: (submissionTrigger === undefined || submissionTrigger === true) && (commentTrigger === undefined || commentTrigger === true)
});
}
let criteriaMet = false;
let failCriteriaResult: string = '';
if (this.condition === 'OR') {
criteriaMet = criteriaResults.some(x => x.triggered);
if(!criteriaMet) {
failCriteriaResult = `${FAIL} No criteria was met`;
}
} else {
criteriaMet = criteriaResults.every(x => x.triggered);
if(!criteriaMet) {
if(criteriaResults.some(x => x.triggered)) {
const met = criteriaResults.filter(x => x.triggered);
failCriteriaResult = `${FAIL} ${met.length} out of ${criteriaResults.length} criteria met but Rule required all be met. Set log level to debug to see individual results`;
const results = criteriaResults.map(x => this.generateResultDataFromCriteria(x, true));
this.logger.debug(`\r\n ${results.map(x => x.result).join('\r\n')}`);
} else {
failCriteriaResult = `${FAIL} No criteria was met`;
}
}
}
if(criteriaMet) {
// use first triggered criteria found
const refCriteriaResults = criteriaResults.find(x => x.triggered);
const resultData = this.generateResultDataFromCriteria(refCriteriaResults);
this.logger.verbose(`${PASS} ${resultData.result}`);
return Promise.resolve([true, this.getResult(true, resultData)]);
}
return Promise.resolve([false, this.getResult(false, {result: failCriteriaResult})]);
}
protected generateResultDataFromCriteria(results: any, includePassFailSymbols = false) {
const {
activityTotal,
activityTotalWindow,
submissionTotal,
commentTotal,
opTotal,
criteria: {
comment,
submission,
window,
},
criteria,
triggered,
submissionTrigger,
commentTrigger,
} = results;
const data: any = {
activityTotal,
submissionTotal,
commentTotal,
opTotal,
commentPercent: formatNumber((commentTotal/activityTotal)*100),
submissionPercent: formatNumber((submissionTotal/activityTotal)*100),
opPercent: formatNumber((opTotal/commentTotal)*100),
criteria,
window: typeof window === 'number' ? `${activityTotal} Items` : activityTotalWindow.humanize(true),
triggered,
submissionTrigger,
commentTrigger,
};
let thresholdSummary = [];
let submissionSummary;
let commentSummary;
if(submission !== undefined) {
const {operator, value, isPercent, displayText} = parseGenericValueOrPercentComparison(submission);
const suffix = !isPercent ? 'Items' : `(${formatNumber((submissionTotal/activityTotal)*100)}%) of ${activityTotal} Total`;
submissionSummary = `${includePassFailSymbols ? `${submissionTrigger ? PASS : FAIL} ` : ''}Submissions (${submissionTotal}) were${submissionTrigger ? '' : ' not'} ${displayText} ${suffix}`;
data.submissionSummary = submissionSummary;
thresholdSummary.push(submissionSummary);
}
if(comment !== undefined) {
const {operator, value, isPercent, displayText, extra = ''} = parseGenericValueOrPercentComparison(comment);
const asOp = extra.toLowerCase().includes('op');
const totalType = asOp ? 'Comments' : 'Activities'
const countType = asOp ? 'Comments as OP' : 'Comments';
const suffix = !isPercent ? 'Items' : `(${asOp ? formatNumber((opTotal/commentTotal)*100) : formatNumber((commentTotal/activityTotal)*100)}%) of ${activityTotal} Total ${totalType}`;
commentSummary = `${includePassFailSymbols ? `${commentTrigger ? PASS : FAIL} ` : ''}${countType} (${asOp ? opTotal : commentTotal}) were${commentTrigger ? '' : ' not'} ${displayText} ${suffix}`;
data.commentSummary = commentSummary;
thresholdSummary.push(commentSummary);
}
data.thresholdSummary = thresholdSummary.join(' and ');
const result = `${thresholdSummary} (${data.window})`;
return {result, data};
}
}
export default HistoryRule;
interface HistoryConfig {
/**
* A list threshold-window values to test activities against.
*
* @minItems 1
* */
criteria: HistoryCriteria[]
/**
* * If `OR` then any set of Criteria that pass will trigger the Rule
* * If `AND` then all Criteria sets must pass to trigger the Rule
* */
condition?: 'AND' | 'OR'
/**
* Only include Submissions from this list of Subreddits (by name, case-insensitive)
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
include?: string[],
/**
* Do not include Submissions from this list of Subreddits (by name, case-insensitive)
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
exclude?: string[],
}
export interface HistoryOptions extends HistoryConfig, RuleOptions {
}
/**
* Aggregates an Author's submission and comment history. Rule can be triggered on count/percent of total (for either or both comment/sub totals) as well as comment OP total.
*
* Available data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):
*
* ```
* activityTotal => Total number of activities
* submissionTotal => Total number of submissions
* commentTotal => Total number of comments
* opTotal => Total number of comments as OP
* thresholdSummary => A text summary of the first Criteria triggered with totals/percentages
* criteria => The ThresholdCriteria object
* window => A text summary of the range of Activities considered (# of Items if number, time range if Duration)
* ```
* */
export interface HistoryJSONConfig extends HistoryConfig, RuleJSONConfig {
kind: 'history'
}

View File

@@ -1,8 +1,13 @@
import {Rule, RuleJSONConfig, RuleOptions, RulePremise, RuleResult} from "./index";
import {Comment, VoteableContent} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {getAuthorActivities, getAuthorComments, getAuthorSubmissions} from "../Utils/SnoowrapUtils";
import {parseUsableLinkIdentifier} from "../util";
import {
activityWindowText,
comparisonTextOp, FAIL, formatNumber,
parseGenericValueOrPercentComparison, parseSubredditName,
parseUsableLinkIdentifier,
PASS
} from "../util";
import {
ActivityWindow,
ActivityWindowCriteria,
@@ -33,7 +38,7 @@ export class RecentActivityRule extends Rule {
}
getKind(): string {
return 'Recent Activity';
return 'Recent';
}
getSpecificPremise(): object {
@@ -45,28 +50,27 @@ export class RecentActivityRule extends Rule {
}
}
async process(item: Submission | Comment): Promise<[boolean, RuleResult[]]> {
async process(item: Submission | Comment): Promise<[boolean, RuleResult]> {
let activities;
switch (this.lookAt) {
case 'comments':
activities = await getAuthorComments(item.author, {window: this.window});
activities = await this.resources.getAuthorComments(item.author, {window: this.window});
break;
case 'submissions':
activities = await getAuthorSubmissions(item.author, {window: this.window});
activities = await this.resources.getAuthorSubmissions(item.author, {window: this.window});
break;
default:
activities = await getAuthorActivities(item.author, {window: this.window});
activities = await this.resources.getAuthorActivities(item.author, {window: this.window});
break;
}
let viableActivity = activities;
if (this.useSubmissionAsReference) {
if (!(item instanceof Submission)) {
this.logger.debug('Cannot use post as reference because triggered item is not a Submission');
this.logger.warn('Cannot use post as reference because triggered item is not a Submission');
} else if (item.is_self) {
this.logger.debug('Cannot use post as reference because triggered Submission is not a link type');
this.logger.warn('Cannot use post as reference because triggered Submission is not a link type');
} else {
const usableUrl = parseLink(await item.url);
viableActivity = viableActivity.filter((x) => {
@@ -85,40 +89,114 @@ export class RecentActivityRule extends Rule {
grouped[s] = (grouped[s] || []).concat(activity);
return grouped;
}, {} as Record<string, (Submission | Comment)[]>);
const triggeredOn = [];
const summaries = [];
let totalTriggeredOn;
for (const triggerSet of this.thresholds) {
const {count: threshold = 1, subreddits = []} = triggerSet;
for (const sub of subreddits) {
let currCount = 0;
const presentSubs = [];
const {threshold = '>= 1', subreddits = []} = triggerSet;
for (const sub of subreddits.map(x => parseSubredditName(x))) {
const isub = sub.toLowerCase();
const {[isub]: tSub = []} = groupedActivity;
if (tSub.length >= threshold) {
triggeredOn.push({subreddit: sub, count: tSub.length});
if (tSub.length > 0) {
currCount += tSub.length;
presentSubs.push(sub);
}
}
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(threshold);
let sum = {subsWithActivity: presentSubs, subreddits, count: currCount, threshold, triggered: false, testValue: currCount.toString()};
if (isPercent) {
sum.testValue = `${formatNumber((currCount / viableActivity.length) * 100)}%`;
if (comparisonTextOp(currCount / viableActivity.length, operator, value / 100)) {
sum.triggered = true;
totalTriggeredOn = sum;
}
} else if (comparisonTextOp(currCount, operator, value)) {
sum.triggered = true;
totalTriggeredOn = sum;
}
summaries.push(sum);
// if either trigger condition is hit end the iteration early
if (totalTriggeredOn !== undefined) {
break;
}
}
if (triggeredOn.length > 0) {
const friendlyText = triggeredOn.map(x => `${x.subreddit}(${x.count})`).join(' | ');
const friendly = `Triggered by: ${friendlyText}`;
this.logger.debug(friendly);
return Promise.resolve([true, [this.getResult(true, {result: friendly, data: triggeredOn})]]);
let result = '';
if (totalTriggeredOn !== undefined) {
const resultData = this.generateResultData(totalTriggeredOn, viableActivity);
result = `${PASS} ${resultData.result}`;
this.logger.verbose(result);
return Promise.resolve([true, this.getResult(true, resultData)]);
} else if(summaries.length === 1) {
// can display result if its only one summary otherwise need to log to debug
const res = this.generateResultData(summaries[0], viableActivity);
result = `${FAIL} ${res.result}`;
} else {
result = `${FAIL} No criteria was met. Use 'debug' to see individual results`;
this.logger.debug(`\r\n ${summaries.map(x => this.generateResultData(x, viableActivity).result).join('\r\n')}`);
}
return Promise.resolve([false, [this.getResult(false)]]);
this.logger.verbose(result);
return Promise.resolve([false, this.getResult(false, {result})]);
}
generateResultData(summary: any, activities: (Submission | Comment)[] = []) {
const {
count,
testValue,
subreddits = [],
subsWithActivity = [],
threshold,
triggered
} = summary;
const relevantSubs = subsWithActivity.length === 0 ? subreddits : subsWithActivity;
const totalSummary = `${testValue} activities over ${relevantSubs.length} subreddits ${triggered ? 'met' : 'did not meet'} threshold of ${threshold}`;
return {
result: totalSummary,
data: {
window: typeof this.window === 'number' ? `${activities.length} Items` : activityWindowText(activities),
summary: totalSummary,
subSummary: relevantSubs.join(', '),
subCount: relevantSubs.length,
totalCount: count,
threshold,
testValue
}
};
}
}
/**
* At least one count property must be present. If both are present then either can trigger the rule
*
* @minProperties 1
* @additionalProperties false
* */
export interface SubThreshold extends SubredditCriteria {
/**
* The number of activities in each subreddit from the list that will trigger this rule
* @default 1
* @minimum 1
* A string containing a comparison operator and a value to compare recent activities against
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign]`
*
* * EX `> 3` => greater than 3 activities found in the listed subreddits
* * EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities
*
* **Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then "all Activities" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* @default ">= 1"
* @examples [">= 1"]
* */
count?: number,
threshold?: string
}
interface RecentActivityConfig extends ActivityWindow, ReferenceSubmission {
/**
* If present restricts the activities that are considered for count from SubThreshold
* @examples ["submissions","comments"]
* */
lookAt?: 'comments' | 'submissions',
/**
@@ -133,8 +211,19 @@ export interface RecentActivityRuleOptions extends RecentActivityConfig, RuleOpt
/**
* Checks a user's history for any Activity (Submission/Comment) in the subreddits specified in thresholds
*
* Available data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):
*
* ```
* summary => comma-deliminated list of subreddits that hit the threshold and their count EX subredditA(1), subredditB(4),...
* subCount => Total number of subreddits that hit the threshold
* totalCount => Total number of all activity occurrences in subreddits
* ```
* */
export interface RecentActivityRuleJSONConfig extends RecentActivityConfig, RuleJSONConfig {
/**
* @examples ["recentActivity"]
* */
kind: 'recentActivity'
}

392
src/Rule/RegexRule.ts Normal file
View File

@@ -0,0 +1,392 @@
import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
import {Comment} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {
comparisonTextOp, FAIL, isExternalUrlSubmission, parseGenericValueComparison,
parseGenericValueOrPercentComparison, parseRegex,
PASS
} from "../util";
import {
ActivityWindowType, JoinOperands,
} from "../Common/interfaces";
import dayjs from 'dayjs';
export interface RegexCriteria {
/**
* A descriptive name that will be used in logging and be available for templating
*
* @examples ["swear words"]
* */
name?: string
/**
* A valid Regular Expression to test content against
*
* Do not wrap expression in forward slashes
*
* EX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`
*
* @examples ["reddit|FoxxMD"]
* */
regex: string,
/**
* Regex flags to use
* */
regexFlags?: string,
/**
* Which content from an Activity to test the regex against
*
* Only used if the Activity being tested is a Submission -- Comments are only tested against their content (duh)
*
* @default ["title", "body"]
* */
testOn?: ('title' | 'body' | 'url')[]
/**
* **When used with `window`** determines what type of Activities to retrieve
*
* @default "all"
* */
lookAt?: 'submissions' | 'comments' | 'all',
/**
* A string containing a comparison operator and a value to determine when an Activity is determined "matched"
*
* The syntax is `(< OR > OR <= OR >=) <number>`
*
* * EX `> 7 => greater than 7 matches found in the Activity, Activity is matched
* * EX `<= 3` => less than 3 matches found in the Activity, Activity is matched
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)(\s+.*)*$
* @default "> 0"
* @examples ["> 0"]
* */
matchThreshold?: string,
/**
* An string containing a comparison operator and a value to determine how many Activities need to be "matched" (based on `matchThreshold` condition) to trigger the rule
*
* **Only useful when used in conjunction with `window`**. If no `window` is specified only the Activity being checked is tested (so the default should/will be used).
*
* To disable (you are only using `totalMatchThreshold`) set to `null`
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign]`
*
* * EX `> 3` => greater than 3 Activities met the `matchThreshold` condition, Rule is triggered
* * EX `<= 10%` => less than 10% of all Activities retrieved from `window` met the `matchThreshold` condition, Rule is triggered
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* @default "> 0"
* @examples ["> 0"]
* */
activityMatchThreshold?: string,
/**
* A string containing a comparison operator and a value to determine how many total matches satisfies the criteria.
*
* If both this and `activityMatchThreshold` are present then whichever is satisfied first will be used.
*
* If not using `window` then this should not be used as running `matchThreshold` on one Activity is effectively the same behavior ( but I'm not gonna stop ya ¯\\\_(ツ)\_/¯ )
*
* The syntax is `(< OR > OR <= OR >=) <number>`
*
* * EX `> 7` => greater than 7 matches found in Activity + Author history `window`
* * EX `<= 3` => less than 3 matches found in the Activity + Author history `window`
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)(\s+.*)*$
* @default "null"
* @examples ["> 0"]
* */
totalMatchThreshold?: string,
window?: ActivityWindowType
}
export class RegexRule extends Rule {
criteria: RegexCriteria[];
condition: JoinOperands;
constructor(options: RegexRuleOptions) {
super(options);
const {
criteria = [],
condition = 'OR'
} = options || {};
if (criteria.length < 1) {
throw new Error('Must provide at least one RegexCriteria');
}
this.criteria = criteria;
this.condition = condition;
}
getKind(): string {
return 'Regex';
}
getSpecificPremise(): object {
return {
criteria: this.criteria,
condition: this.condition,
}
}
protected async process(item: Submission | Comment): Promise<[boolean, RuleResult]> {
let criteriaResults = [];
for (const criteria of this.criteria) {
const {
name,
regex,
regexFlags,
testOn: testOnVals = ['title', 'body'],
lookAt = 'all',
matchThreshold = '> 0',
activityMatchThreshold = '> 0',
totalMatchThreshold = null,
window,
} = criteria;
// normalize their values and also ensure we don't have duplicates
const testOn = testOnVals.map(y => y.toLowerCase()).reduce((acc: string[], curr) => {
if (acc.includes(curr)) {
return acc;
}
return acc.concat(curr);
}, []);
// check regex
const reg = new RegExp(regex);
// ok cool its a valid regex
const matchComparison = parseGenericValueComparison(matchThreshold);
const activityMatchComparison = activityMatchThreshold === null ? undefined : parseGenericValueOrPercentComparison(activityMatchThreshold);
const totalMatchComparison = totalMatchThreshold === null ? undefined : parseGenericValueComparison(totalMatchThreshold);
// since we are dealing with user input (regex) it's likely they mess up their expression and end up matching *a lot* of stuff
// so to keep memory under control only keep the first 100 matches
// and just count the rest
let matches: string[] = [];
let matchCount = 0;
let activitiesMatchedCount = 0;
let activitiesTested = 0;
let activityThresholdMet;
let totalThresholdMet;
// first lets see if the activity we are checking satisfies thresholds
// since we may be able to avoid api calls to get history
let actMatches = this.getMatchesFromActivity(item, testOn, reg, regexFlags);
matches = matches.concat(actMatches).slice(0, 100);
matchCount += actMatches.length;
activitiesTested++;
const singleMatched = comparisonTextOp(actMatches.length, matchComparison.operator, matchComparison.value);
if (singleMatched) {
activitiesMatchedCount++;
}
if (activityMatchComparison !== undefined) {
activityThresholdMet = !activityMatchComparison.isPercent && comparisonTextOp(activitiesMatchedCount, activityMatchComparison.operator, activityMatchComparison.value);
}
if (totalMatchComparison !== undefined) {
totalThresholdMet = comparisonTextOp(matchCount, totalMatchComparison.operator, totalMatchComparison.value);
}
let history: (Submission | Comment)[] = [];
if ((activityThresholdMet === false || totalThresholdMet === false) && window !== undefined) {
// our checking activity didn't meet threshold requirements and criteria does define window
// leh go
switch (lookAt) {
case 'all':
history = await this.resources.getAuthorActivities(item.author, {window: window});
break;
case 'submissions':
history = await this.resources.getAuthorSubmissions(item.author, {window: window});
break;
case 'comments':
history = await this.resources.getAuthorComments(item.author, {window: window});
}
// remove current activity it exists in history so we don't count it twice
history = history.filter(x => x.id !== item.id);
const historyLength = history.length;
let activityCountFunc: Function | undefined;
if (activityMatchComparison !== undefined) {
if (activityMatchComparison.isPercent) {
activityCountFunc = (actsMatched: number) => {
return comparisonTextOp(actsMatched / historyLength, activityMatchComparison.operator, activityMatchComparison.value / 100);
}
} else {
activityCountFunc = (actsMatched: number) => {
return comparisonTextOp(actsMatched, activityMatchComparison.operator, activityMatchComparison.value);
}
}
}
for (const h of history) {
activitiesTested++;
const aMatches = this.getMatchesFromActivity(h, testOn, reg, regexFlags);
matches = matches.concat(aMatches).slice(0, 100);
matchCount += aMatches.length;
const matched = comparisonTextOp(aMatches.length, matchComparison.operator, matchComparison.value);
if (matched) {
activitiesMatchedCount++;
}
if (activityCountFunc !== undefined && activityThresholdMet !== true && activityCountFunc(activitiesMatchedCount)) {
activityThresholdMet = true;
}
if (totalMatchComparison !== undefined && totalThresholdMet !== true) {
totalThresholdMet = comparisonTextOp(matchCount, totalMatchComparison.operator, totalMatchComparison.value)
}
}
}
let humanWindow = '';
if (history.length > 0) {
if (typeof window === 'number') {
humanWindow = `${history.length} Items`;
} else {
const firstActivity = history[0];
const lastActivity = history[history.length - 1];
humanWindow = dayjs.duration(dayjs(firstActivity.created_utc * 1000).diff(dayjs(lastActivity.created_utc * 1000))).humanize();
}
} else {
humanWindow = '1 Item';
}
const critResults = {
criteria: {
name,
regex,
testOn,
matchThreshold,
activityMatchThreshold,
totalMatchThreshold,
window: humanWindow
},
matches,
matchCount,
activitiesMatchedCount,
activityThresholdMet,
totalThresholdMet,
triggered: false,
};
if (activityThresholdMet === undefined && totalThresholdMet === undefined) {
// user should not have disabled both but in this scenario we'll pretend activityThresholdMet = singleMatch
critResults.activityThresholdMet = singleMatched;
critResults.triggered = singleMatched;
} else {
critResults.triggered = activityThresholdMet === true || totalThresholdMet === true;
}
criteriaResults.push(critResults);
if (this.condition === 'OR') {
if (critResults.triggered) {
break;
}
} else if (!critResults.triggered) {
// since its AND and didn't match the whole rule will fail
break;
}
}
const criteriaMet = this.condition === 'OR' ? criteriaResults.some(x => x.triggered) : criteriaResults.every(x => x.triggered);
const logSummary: string[] = [];
let index = 0;
for (const c of criteriaResults) {
index++;
let msg = `Crit ${c.criteria.name || index} ${c.triggered ? PASS : FAIL}`;
if (c.activityThresholdMet !== undefined) {
msg = `${msg} -- Activity Match=> ${c.activityThresholdMet ? PASS : FAIL} ${c.activitiesMatchedCount} ${c.criteria.activityMatchThreshold} (Threshold ${c.criteria.matchThreshold})`;
}
if (c.totalThresholdMet !== undefined) {
msg = `${msg} -- Total Matches=> ${c.totalThresholdMet ? PASS : FAIL} ${c.matchCount} ${c.criteria.totalMatchThreshold}`;
} else {
msg = `${msg} and ${c.matchCount} Total Matches`;
}
msg = `${msg} (Window: ${c.criteria.window})`;
logSummary.push(msg);
}
const result = `${criteriaMet ? PASS : FAIL} ${logSummary.join(' || ')}`;
this.logger.verbose(result);
return Promise.resolve([criteriaMet, this.getResult(criteriaMet, {result, data: criteriaResults})]);
}
protected getMatchesFromActivity(a: (Submission | Comment), testOn: string[], reg: RegExp, flags?: string): string[] {
let m: string[] = [];
// determine what content we are testing
let contents: string[] = [];
if (a instanceof Submission) {
for (const l of testOn) {
switch (l) {
case 'title':
contents.push(a.title);
break;
case 'body':
if (a.is_self) {
contents.push(a.selftext);
}
break;
case 'url':
if (isExternalUrlSubmission(a)) {
contents.push(a.url);
}
break;
}
}
} else {
contents.push(a.body)
}
for (const c of contents) {
const results = parseRegex(reg, c, flags);
if (results.matched) {
m = m.concat(results.matches);
}
}
return m;
}
}
interface RegexConfig {
/**
* A list of Regular Expressions and conditions under which tested Activity(ies) are matched
* @minItems 1
* @examples [{"regex": "/reddit/", "matchThreshold": "> 3"}]
* */
criteria: RegexCriteria[]
/**
* * If `OR` then any set of Criteria that pass will trigger the Rule
* * If `AND` then all Criteria sets must pass to trigger the Rule
*
* @default "OR"
* */
condition?: 'AND' | 'OR'
}
export interface RegexRuleOptions extends RegexConfig, RuleOptions {
}
/**
* Test a (list of) Regular Expression against the contents or title of an Activity
*
* Optionally, specify a `window` of the User's history to additionally test against
*
* Available data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):
*
* */
export interface RegexRuleJSONConfig extends RegexConfig, RuleJSONConfig {
/**
* @examples ["regex"]
* */
kind: 'regex'
}
export default RegexRule;

View File

@@ -1,17 +1,34 @@
import {RecentActivityRule, RecentActivityRuleJSONConfig} from "./RecentActivityRule";
import RepeatSubmissionRule, {RepeatSubmissionJSONConfig} from "./SubmissionRule/RepeatSubmissionRule";
import RepeatActivityRule, {RepeatActivityJSONConfig} from "./SubmissionRule/RepeatActivityRule";
import {Rule, RuleJSONConfig} from "./index";
import AuthorRule, {AuthorRuleJSONConfig} from "./AuthorRule";
import {AttributionJSONConfig, AttributionRule} from "./AttributionRule";
import {Logger} from "winston";
import HistoryRule, {HistoryJSONConfig} from "./HistoryRule";
import RegexRule, {RegexRuleJSONConfig} from "./RegexRule";
export function ruleFactory
(config: RuleJSONConfig): Rule {
(config: RuleJSONConfig, logger: Logger, subredditName: string): Rule {
let cfg;
switch (config.kind) {
case 'recentActivity':
return new RecentActivityRule(config as RecentActivityRuleJSONConfig);
case 'repeatSubmission':
return new RepeatSubmissionRule(config as RepeatSubmissionJSONConfig);
cfg = config as RecentActivityRuleJSONConfig;
return new RecentActivityRule({...cfg, logger, subredditName});
case 'repeatActivity':
cfg = config as RepeatActivityJSONConfig;
return new RepeatActivityRule({...cfg, logger, subredditName});
case 'author':
return new AuthorRule(config as AuthorRuleJSONConfig);
cfg = config as AuthorRuleJSONConfig;
return new AuthorRule({...cfg, logger, subredditName});
case 'attribution':
cfg = config as AttributionJSONConfig;
return new AttributionRule({...cfg, logger, subredditName});
case 'history':
cfg = config as HistoryJSONConfig;
return new HistoryRule({...cfg, logger, subredditName});
case 'regex':
cfg = config as RegexRuleJSONConfig;
return new RegexRule({...cfg, logger, subredditName});
default:
throw new Error('rule "kind" was not recognized.');
}

View File

@@ -1,39 +1,30 @@
import {IRule, Triggerable, Rule, RuleJSONConfig, RuleResult} from "./index";
import {IRule, Triggerable, Rule, RuleJSONConfig, RuleResult, RuleSetResult} from "./index";
import {Comment, Submission} from "snoowrap";
import {ruleFactory} from "./RuleFactory";
import {RecentActivityRuleJSONConfig} from "./RecentActivityRule";
import {RepeatSubmissionJSONConfig} from "./SubmissionRule/RepeatSubmissionRule";
import {createLabelledLogger, determineNewResults, findResultByPremise, loggerMetaShuffle} from "../util";
import {createAjvFactory, mergeArr} from "../util";
import {Logger} from "winston";
import {AuthorRuleJSONConfig} from "./AuthorRule";
import {JoinCondition, JoinOperands} from "../Common/interfaces";
import * as RuleSchema from '../Schema/Rule.json';
import Ajv from 'ajv';
import {RuleJson, RuleObjectJson} from "../Common/types";
const ajv = new Ajv();
export class RuleSet implements IRuleSet, Triggerable {
export class RuleSet implements IRuleSet {
rules: Rule[] = [];
condition: JoinOperands;
logger: Logger;
constructor(options: RuleSetOptions) {
const {logger, condition = 'AND', rules = []} = options;
if (logger !== undefined) {
this.logger = logger.child(loggerMetaShuffle(logger, 'Rule Set'));
} else {
this.logger = createLabelledLogger('Rule Set');
}
this.logger = logger.child({leaf: 'Rule Set'}, mergeArr);
this.condition = condition;
const ajv = createAjvFactory(this.logger);
for (const r of rules) {
if (r instanceof Rule) {
this.rules.push(r);
} else {
const valid = ajv.validate(RuleSchema, r);
if (valid) {
// @ts-ignore
r.logger = this.logger;
this.rules.push(ruleFactory(r as RuleJSONConfig));
this.rules.push(ruleFactory(r as RuleJSONConfig, logger, options.subredditName));
} else {
this.logger.warn('Could not build rule because of JSON errors', {}, {errors: ajv.errors, obj: r});
}
@@ -41,12 +32,12 @@ export class RuleSet implements IRuleSet, Triggerable {
}
}
async run(item: Comment | Submission, existingResults: RuleResult[] = []): Promise<[boolean, RuleResult[]]> {
async run(item: Comment | Submission, existingResults: RuleResult[] = []): Promise<[boolean, RuleSetResult]> {
let results: RuleResult[] = [];
let runOne = false;
for (const r of this.rules) {
const combinedResults = [...existingResults, ...results];
const [passed, [result]] = await r.run(item, combinedResults);
const [passed, result] = await r.run(item, combinedResults);
//results = results.concat(determineNewResults(combinedResults, result));
results.push(result);
// skip rule if author check failed
@@ -56,17 +47,30 @@ export class RuleSet implements IRuleSet, Triggerable {
runOne = true;
if (passed) {
if (this.condition === 'OR') {
return [true, results];
return [true, this.generateResultSet(true, results)];
}
} else if (this.condition === 'AND') {
return [false, results];
return [false, this.generateResultSet(false, results)];
}
}
// if no rules were run it's the same as if nothing was triggered
if (!runOne) {
return [false, results];
return [false, this.generateResultSet(false, results)];
}
return [true, results];
if(this.condition === 'OR') {
// if OR and did not return already then none passed
return [false, this.generateResultSet(false, results)];
}
// otherwise AND and did not return already so all passed
return [true, this.generateResultSet(true, results)];
}
generateResultSet(triggered: boolean, results: RuleResult[]): RuleSetResult {
return {
results,
triggered,
condition: this.condition
};
}
}
@@ -79,16 +83,25 @@ export interface IRuleSet extends JoinCondition {
export interface RuleSetOptions extends IRuleSet {
rules: Array<IRule | RuleJSONConfig>,
logger?: Logger
logger: Logger
subredditName: string
}
/**
* A RuleSet is a "nested" set of Rules that can be used to create more complex AND/OR behavior. Think of the outcome of a RuleSet as the result of all of it's Rules (based on condition)
* @see {isRuleSetConfig} ts-auto-guard:type-guard
* A RuleSet is a "nested" set of `Rule` objects that can be used to create more complex AND/OR behavior. Think of the outcome of a `RuleSet` as the result of all of its run `Rule` objects (based on `condition`)
* */
export interface RuleSetJSONConfig extends IRuleSet {
export interface RuleSetJson extends JoinCondition {
/**
* Can be `Rule` or the `name` of any **named** `Rule` in your subreddit's configuration
* @minItems 1
* */
rules: Array<RecentActivityRuleJSONConfig | RepeatSubmissionJSONConfig | AuthorRuleJSONConfig>
rules: Array<RuleJson>
}
export interface RuleSetObjectJson extends RuleSetJson {
rules: Array<RuleObjectJson>
}
export const isRuleSetJSON = (obj: object): obj is RuleSetJson => {
return (obj as RuleSetJson).rules !== undefined;
}

View File

@@ -0,0 +1,369 @@
import {SubmissionRule, SubmissionRuleJSONConfig} from "./index";
import {RuleOptions, RuleResult} from "../index";
import {Comment} from "snoowrap";
import {
activityWindowText,
comparisonTextOp, FAIL, isExternalUrlSubmission, isRedditMedia,
parseGenericValueComparison, parseSubredditName,
parseUsableLinkIdentifier as linkParser, PASS
} from "../../util";
import {ActivityWindow, ActivityWindowType, ReferenceSubmission} from "../../Common/interfaces";
import Submission from "snoowrap/dist/objects/Submission";
import dayjs from "dayjs";
import Fuse from 'fuse.js'
const parseUsableLinkIdentifier = linkParser();
interface RepeatActivityData {
identifier: string,
sets: (Submission | Comment)[]
}
interface RepeatActivityReducer {
openSets: RepeatActivityData[]
allSets: RepeatActivityData[]
}
const getActivityIdentifier = (activity: (Submission | Comment), length = 200) => {
let identifier: string;
if (activity instanceof Submission) {
if (activity.is_self) {
identifier = `${activity.title}${activity.selftext.slice(0, length)}`;
} else if(isRedditMedia(activity)) {
identifier = activity.title;
} else {
identifier = parseUsableLinkIdentifier(activity.url) as string;
}
} else {
identifier = activity.body.slice(0, length);
}
return identifier;
}
const fuzzyOptions = {
includeScore: true,
distance: 15
};
export class RepeatActivityRule extends SubmissionRule {
threshold: string;
window: ActivityWindowType;
gapAllowance?: number;
useSubmissionAsReference: boolean;
lookAt: 'submissions' | 'all';
include: string[];
exclude: string[];
keepRemoved: boolean;
minWordCount: number;
constructor(options: RepeatActivityOptions) {
super(options);
const {
threshold = '> 5',
window = 100,
gapAllowance,
useSubmissionAsReference = true,
minWordCount = 1,
lookAt = 'all',
include = [],
exclude = [],
keepRemoved = false,
} = options;
this.minWordCount = minWordCount;
this.keepRemoved = keepRemoved;
this.threshold = threshold;
this.window = window;
this.gapAllowance = gapAllowance;
this.useSubmissionAsReference = useSubmissionAsReference;
this.include = include.map(x => parseSubredditName(x).toLowerCase());
this.exclude = exclude.map(x => parseSubredditName(x).toLowerCase());
this.lookAt = lookAt;
}
getKind(): string {
return 'Repeat';
}
getSpecificPremise(): object {
return {
threshold: this.threshold,
window: this.window,
gapAllowance: this.gapAllowance,
useSubmissionAsReference: this.useSubmissionAsReference,
include: this.include,
exclude: this.exclude,
}
}
async process(item: Submission): Promise<[boolean, RuleResult]> {
const referenceUrl = await item.url;
if (referenceUrl === undefined && this.useSubmissionAsReference) {
this.logger.warn(`Rule not triggered because useSubmissionAsReference=true but submission is not a link`);
return Promise.resolve([false, this.getResult(false)]);
}
let filterFunc = (x: any) => true;
if(this.include.length > 0) {
filterFunc = (x: Submission|Comment) => this.include.includes(x.subreddit.display_name.toLowerCase());
} else if(this.exclude.length > 0) {
filterFunc = (x: Submission|Comment) => !this.exclude.includes(x.subreddit.display_name.toLowerCase());
}
let activities: (Submission | Comment)[] = [];
switch (this.lookAt) {
case 'submissions':
activities = await this.resources.getAuthorSubmissions(item.author, {window: this.window, keepRemoved: this.keepRemoved});
break;
default:
activities = await this.resources.getAuthorActivities(item.author, {window: this.window, keepRemoved: this.keepRemoved});
break;
}
const condensedActivities = activities.reduce((acc: RepeatActivityReducer, activity: (Submission | Comment), index: number) => {
const {openSets = [], allSets = []} = acc;
let identifier = getActivityIdentifier(activity);
const isUrl = isExternalUrlSubmission(activity);
let fu = new Fuse([identifier], !isUrl ? fuzzyOptions : {...fuzzyOptions, distance: 5});
const validSub = filterFunc(activity);
let minMet = identifier.length >= this.minWordCount;
let updatedAllSets = [...allSets];
let updatedOpenSets: RepeatActivityData[] = [];
let currIdentifierInOpen = false;
const bufferedActivities = this.gapAllowance === undefined || this.gapAllowance === 0 ? [] : activities.slice(Math.max(0, index - this.gapAllowance), Math.max(0, index));
for (const o of openSets) {
const res = fu.search(o.identifier);
const match = res.length > 0;
if (match && validSub && minMet) {
updatedOpenSets.push({...o, sets: [...o.sets, activity]});
currIdentifierInOpen = true;
} else if (bufferedActivities.some(x => fu.search(getActivityIdentifier(x)).length > 0) && validSub && minMet) {
updatedOpenSets.push(o);
} else if(!currIdentifierInOpen && !isUrl) {
updatedAllSets.push(o);
}
}
if (!currIdentifierInOpen) {
updatedOpenSets.push({identifier, sets: [activity]})
if(isUrl) {
// could be that a spammer is using different URLs for each submission but similar submission titles so search by title as well
const sub = activity as Submission;
identifier = sub.title;
fu = new Fuse([identifier], !isUrl ? fuzzyOptions : {...fuzzyOptions, distance: 5});
minMet = identifier.length >= this.minWordCount;
for (const o of openSets) {
const res = fu.search(o.identifier);
const match = res.length > 0;
if (match && validSub && minMet) {
updatedOpenSets.push({...o, sets: [...o.sets, activity]});
currIdentifierInOpen = true;
} else if (bufferedActivities.some(x => fu.search(getActivityIdentifier(x)).length > 0) && validSub && minMet && !updatedOpenSets.includes(o)) {
updatedOpenSets.push(o);
} else if(!updatedAllSets.includes(o)) {
updatedAllSets.push(o);
}
}
if (!currIdentifierInOpen) {
updatedOpenSets.push({identifier, sets: [activity]})
}
}
}
return {openSets: updatedOpenSets, allSets: updatedAllSets};
}, {openSets: [], allSets: []});
const allRepeatSets = [...condensedActivities.allSets, ...condensedActivities.openSets];
const identifierGroupedActivities = allRepeatSets.reduce((acc, repeatActivityData) => {
let existingSets = [];
if (acc.has(repeatActivityData.identifier)) {
existingSets = acc.get(repeatActivityData.identifier);
}
acc.set(repeatActivityData.identifier, [...existingSets, repeatActivityData.sets].sort((a, b) => b.length < a.length ? 1 : -1));
return acc;
}, new Map());
let applicableGroupedActivities = identifierGroupedActivities;
if (this.useSubmissionAsReference) {
applicableGroupedActivities = new Map();
let identifier = getActivityIdentifier(item);
let referenceSubmissions = identifierGroupedActivities.get(identifier);
if(referenceSubmissions === undefined && isExternalUrlSubmission(item)) {
// if external url sub then try by title
identifier = item.title;
referenceSubmissions = identifierGroupedActivities.get(identifier);
if(referenceSubmissions === undefined) {
// didn't get by title so go back to url since that's the default
identifier = getActivityIdentifier(item);
}
}
applicableGroupedActivities.set(identifier, referenceSubmissions || [])
}
const {operator, value: thresholdValue} = parseGenericValueComparison(this.threshold);
const greaterThan = operator.includes('>');
let allLessThan = true;
const identifiersSummary: SummaryData[] = [];
for (let [key, value] of applicableGroupedActivities) {
const summaryData: SummaryData = {
identifier: key,
totalSets: value.length,
totalTriggeringSets: 0,
largestTrigger: 0,
sets: [],
setsMarkdown: [],
triggeringSets: [],
triggeringSetsMarkdown: [],
};
for (let set of value) {
const test = comparisonTextOp(set.length, operator, thresholdValue);
const md = set.map((x: (Comment | Submission)) => `[${x instanceof Submission ? x.title : getActivityIdentifier(x, 50)}](https://reddit.com${x.permalink}) in ${x.subreddit_name_prefixed} on ${dayjs(x.created_utc * 1000).utc().format()}`);
summaryData.sets.push(set);
summaryData.largestTrigger = Math.max(summaryData.largestTrigger, set.length);
summaryData.setsMarkdown.push(md);
if (test) {
summaryData.triggeringSets.push(set);
summaryData.totalTriggeringSets++;
summaryData.triggeringSetsMarkdown.push(md);
// }
} else if (!greaterThan) {
allLessThan = false;
}
}
identifiersSummary.push(summaryData);
}
const criteriaMet = identifiersSummary.filter(x => x.totalTriggeringSets > 0).length > 0 && (greaterThan || (!greaterThan && allLessThan));
const largestRepeat = identifiersSummary.reduce((acc, summ) => Math.max(summ.largestTrigger, acc), 0);
let result: string;
if (criteriaMet || greaterThan) {
result = `${criteriaMet ? PASS : FAIL} ${identifiersSummary.filter(x => x.totalTriggeringSets > 0).length} of ${identifiersSummary.length} unique items repeated ${this.threshold} times, largest repeat: ${largestRepeat}`;
} else {
result = `${FAIL} Not all of ${identifiersSummary.length} unique items repeated ${this.threshold} times, largest repeat: ${largestRepeat}`
}
this.logger.verbose(result);
if (criteriaMet) {
const triggeringSummaries = identifiersSummary.filter(x => x.totalTriggeringSets > 0);
return Promise.resolve([true, this.getResult(true, {
result,
data: {
window: typeof this.window === 'number' ? `${activities.length} Items` : activityWindowText(activities),
totalTriggeringSets: triggeringSummaries.length,
largestRepeat,
threshold: this.threshold,
gapAllowance: this.gapAllowance,
url: referenceUrl,
triggeringSummaries,
}
})])
}
return Promise.resolve([false, this.getResult(false, {result})]);
}
}
interface SummaryData {
identifier: string,
totalSets: number,
totalTriggeringSets: number,
largestTrigger: number,
sets: (Comment | Submission)[],
setsMarkdown: string[],
triggeringSets: (Comment | Submission)[],
triggeringSetsMarkdown: string[]
}
interface RepeatActivityConfig extends ActivityWindow, ReferenceSubmission {
/**
* The number of repeat submissions that will trigger the rule
* @default ">= 5"
* */
threshold?: string,
/**
* The number of allowed non-identical Submissions between identical Submissions that can be ignored when checking against the threshold value
* */
gapAllowance?: number,
/**
* Only include Submissions from this list of Subreddits (by name, case-insensitive)
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
include?: string[],
/**
* Do not include Submissions from this list of Subreddits (by name, case-insensitive)
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
exclude?: string[],
/**
* If present determines which activities to consider for gapAllowance.
*
* * If `submissions` then only the author's submission history is considered IE gapAllowance = 2 ===> can have gap of two submissions between repeats
* * If `all` then the author's entire history (submissions/comments) is considered IE gapAllowance = 2 ===> can only have gap of two activities (submissions or comments) between repeats
*
* @default all
* */
lookAt?: 'submissions' | 'all',
/**
* Count submissions/comments that have previously been removed.
*
* By default all `Submissions/Commments` that are in a `removed` state will be filtered from `window` (only applies to subreddits you mod).
*
* Setting to `true` could be useful if you also want to also detected removed repeat posts by a user like for example if automoderator removes multiple, consecutive submissions for not following title format correctly.
*
* @default false
* */
keepRemoved?: boolean
/**
* For activities that are text-based this is the minimum number of words required for the activity to be considered for a repeat
*
* EX if `minimumWordCount=5` and a comment is `what about you` then it is ignored because `3 is less than 5`
*
* **For self-text submissions** -- title + body text
*
* **For comments* -- body text
*
* @default 1
* @example [1]
* */
minWordCount?: number,
}
export interface RepeatActivityOptions extends RepeatActivityConfig, RuleOptions {
}
/**
* Checks a user's history for Submissions with identical content
*
* Available data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):
*
* ```
* count => Total number of repeat Submissions
* threshold => The threshold you configured for this Rule to trigger
* url => Url of the submission that triggered the rule
* ```
* */
export interface RepeatActivityJSONConfig extends RepeatActivityConfig, SubmissionRuleJSONConfig {
kind: 'repeatActivity'
}
export default RepeatActivityRule;

View File

@@ -1,162 +0,0 @@
import {SubmissionRule, SubmissionRuleJSONConfig} from "./index";
import {Rule, RuleOptions, RulePremise, RuleResult} from "../index";
import {Submission} from "snoowrap";
import {getAuthorSubmissions} from "../../Utils/SnoowrapUtils";
import {groupBy, parseUsableLinkIdentifier as linkParser} from "../../util";
import {ActivityWindow, ActivityWindowType, ReferenceSubmission} from "../../Common/interfaces";
const groupByUrl = groupBy(['urlIdentifier']);
const parseUsableLinkIdentifier = linkParser()
export class RepeatSubmissionRule extends SubmissionRule {
threshold: number;
window: ActivityWindowType;
gapAllowance?: number;
useSubmissionAsReference: boolean;
include: string[];
exclude: string[];
constructor(options: RepeatSubmissionOptions) {
super(options);
const {
threshold = 5,
window = 15,
gapAllowance,
useSubmissionAsReference = true,
include = [],
exclude = []
} = options;
this.threshold = threshold;
this.window = window;
this.gapAllowance = gapAllowance;
this.useSubmissionAsReference = useSubmissionAsReference;
this.include = include;
this.exclude = exclude;
}
getKind(): string {
return 'Repeat Submission';
}
getSpecificPremise(): object {
return {
threshold: this.threshold,
window: this.window,
gapAllowance: this.gapAllowance,
useSubmissionAsReference: this.useSubmissionAsReference,
include: this.include,
exclude: this.exclude,
}
}
async process(item: Submission): Promise<[boolean, RuleResult[]]> {
const referenceUrl = await item.url;
if (referenceUrl === undefined && this.useSubmissionAsReference) {
throw new Error(`Cannot run Rule ${this.name} because submission is not a link`);
}
const submissions = await getAuthorSubmissions(item.author, {window: this.window});
// we need to check in order
if (this.gapAllowance !== undefined) {
let consecutivePosts = referenceUrl !== undefined ? 1 : 0;
let gap = 0;
let lastUrl = parseUsableLinkIdentifier(referenceUrl);
// start with second post since first is the one we triggered on (prob)
for (const sub of submissions.slice(1)) {
if (sub.url !== undefined) {
const regUrl = parseUsableLinkIdentifier(sub.url);
if (lastUrl === undefined || lastUrl === regUrl) {
consecutivePosts++;
gap = 0;
} else {
gap++;
if (gap > this.gapAllowance) {
gap = 0;
consecutivePosts = 1;
}
}
lastUrl = regUrl;
} else {
gap++;
if (gap > this.gapAllowance) {
gap = 0;
consecutivePosts = 0;
}
}
if (consecutivePosts >= this.threshold) {
const result = `Threshold of ${this.threshold} repeats triggered for submission with url ${sub.url}`;
this.logger.debug(result);
return Promise.resolve([true, [this.getResult(true, {result})]]);
}
}
return Promise.resolve([false, [this.getResult(false)]]);
}
// otherwise we can just group all occurrences together
const groupedPosts = groupByUrl(submissions.map(x => ({
...x,
urlIdentifier: parseUsableLinkIdentifier(x.url)
})));
let groupsToCheck = [];
if (this.useSubmissionAsReference) {
const identifier = parseUsableLinkIdentifier(referenceUrl);
const {[identifier as string]: refGroup = []} = groupedPosts;
groupsToCheck.push(refGroup);
} else {
groupsToCheck = Object.values(groupedPosts)
}
for (const group of groupsToCheck) {
if (group.length >= this.threshold) {
// @ts-ignore
const result = `Threshold of ${this.threshold} repeats triggered for submission with url ${group[0].url}`;
this.logger.debug(result);
return Promise.resolve([true, [this.getResult(true, {result})]]);
}
}
return Promise.resolve([false, [this.getResult(false)]]);
}
}
interface RepeatSubmissionConfig extends ActivityWindow, ReferenceSubmission {
/**
* The number of repeat submissions that will trigger the rule
* @default 5
* */
threshold?: number,
/**
* The number of allowed non-identical Submissions between identical Submissions that can be ignored when checking against the threshold value
* */
gapAllowance?: number,
/**
* Only include Submissions from this list of Subreddits.
*
* A list of subreddits (case-insensitive) to look for. Do not include "r/" prefix.
*
* EX to match against /r/mealtimevideos and /r/askscience use ["mealtimevideos","askscience"]
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
include?: string[],
/**
* Do not include Submissions from this list of Subreddits.
*
* A list of subreddits (case-insensitive) to look for. Do not include "r/" prefix.
*
* EX to match against /r/mealtimevideos and /r/askscience use ["mealtimevideos","askscience"]
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* */
exclude?: string[],
}
export interface RepeatSubmissionOptions extends RepeatSubmissionConfig, RuleOptions {
}
/**
* Checks a user's history for Submissions with identical content
* */
export interface RepeatSubmissionJSONConfig extends RepeatSubmissionConfig, SubmissionRuleJSONConfig {
kind: 'repeatSubmission'
}
export default RepeatSubmissionRule;

View File

@@ -1,14 +1,18 @@
import {Comment, RedditUser} from "snoowrap";
import {Comment} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {Logger} from "winston";
import {createLabelledLogger, findResultByPremise, loggerMetaShuffle, mergeArr} from "../util";
import {testAuthorCriteria} from "../Utils/SnoowrapUtils";
import {findResultByPremise, mergeArr} from "../util";
import ResourceManager, {SubredditResources} from "../Subreddit/SubredditResources";
import {ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
import {isItem} from "../Utils/SnoowrapUtils";
import Author, {AuthorOptions} from "../Author/Author";
export interface RuleOptions {
name?: string;
authors?: AuthorOptions;
logger?: Logger
loggerPrefix?: string
authorIs?: AuthorOptions;
itemIs?: TypedActivityStates;
logger: Logger
subredditName: string;
}
export interface RulePremise {
@@ -23,77 +27,105 @@ interface ResultContext {
export interface RuleResult extends ResultContext {
premise: RulePremise
name?: string
name: string
triggered: (boolean | null)
}
export interface RuleSetResult {
results: RuleResult[],
condition: 'OR' | 'AND',
triggered: boolean
}
export const isRuleSetResult = (obj: any): obj is RuleSetResult => {
return typeof obj === 'object' && Array.isArray(obj.results) && obj.condition !== undefined && obj.triggered !== undefined;
}
export interface Triggerable {
run(item: Comment | Submission, existingResults: RuleResult[]): Promise<[(boolean | null), RuleResult[]]>;
run(item: Comment | Submission, existingResults: RuleResult[]): Promise<[(boolean | null), RuleResult?]>;
}
export abstract class Rule implements IRule, Triggerable {
name?: string;
name: string;
logger: Logger
authors: AuthorOptions;
authorIs: AuthorOptions;
itemIs: TypedActivityStates;
resources: SubredditResources;
constructor(options: RuleOptions) {
const {
name,
loggerPrefix = '',
name = this.getKind(),
logger,
authors: {
authorIs: {
include = [],
exclude = [],
} = {},
itemIs = [],
subredditName,
} = options;
this.name = name;
this.resources = ResourceManager.get(subredditName) as SubredditResources;
this.authors = {
this.authorIs = {
exclude: exclude.map(x => new Author(x)),
include: include.map(x => new Author(x)),
}
const ruleUniqueName = this.name === undefined ? this.getKind() : `${this.getKind()} - ${this.name}`;
if (logger === undefined) {
const prefix = `${loggerPrefix}|${ruleUniqueName}`;
this.logger = createLabelledLogger(prefix, prefix);
} else {
this.logger = logger.child(loggerMetaShuffle(logger, undefined, [ruleUniqueName], {truncateLength: 100}));
this.itemIs = itemIs;
this.logger = logger.child({labels: [`Rule ${this.getRuleUniqueName()}`]}, mergeArr);
}
async run(item: Comment | Submission, existingResults: RuleResult[] = []): Promise<[(boolean | null), RuleResult]> {
try {
const existingResult = findResultByPremise(this.getPremise(), existingResults);
if (existingResult) {
this.logger.debug(`Returning existing result of ${existingResult.triggered ? '✔️' : '❌'}`);
return Promise.resolve([existingResult.triggered, {...existingResult, name: this.name}]);
}
const [itemPass, crit] = isItem(item, this.itemIs, this.logger);
if (!itemPass) {
this.logger.verbose(`(Skipped) Item did not pass 'itemIs' test`);
return Promise.resolve([null, this.getResult(null, {result: `Item did not pass 'itemIs' test`})]);
}
if (this.authorIs.include !== undefined && this.authorIs.include.length > 0) {
for (const auth of this.authorIs.include) {
if (await this.resources.testAuthorCriteria(item, auth)) {
return this.process(item);
}
}
this.logger.verbose('(Skipped) Inclusive author criteria not matched');
return Promise.resolve([null, this.getResult(null, {result: 'Inclusive author criteria not matched'})]);
}
if (this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
for (const auth of this.authorIs.exclude) {
if (await this.resources.testAuthorCriteria(item, auth, false)) {
return this.process(item);
}
}
this.logger.verbose('(Skipped) Exclusive author criteria not matched');
return Promise.resolve([null, this.getResult(null, {result: 'Exclusive author criteria not matched'})]);
}
} catch (err) {
this.logger.error('Error occurred during Rule pre-process checks');
throw err;
}
try {
return this.process(item);
} catch (err) {
this.logger.error('Error occurred while processing rule');
throw err;
}
}
async run(item: Comment | Submission, existingResults: RuleResult[] = []): Promise<[(boolean | null), RuleResult[]]> {
this.logger = this.logger.child(loggerMetaShuffle(this.logger, `${item instanceof Submission ? 'SUB' : 'COMM'} ${item.id}`), mergeArr);
this.logger.debug('Starting rule run');
const existingResult = findResultByPremise(this.getPremise(), existingResults);
if (existingResult) {
return Promise.resolve([existingResult.triggered, [existingResult]]);
}
if (this.authors.include !== undefined && this.authors.include.length > 0) {
for (const auth of this.authors.include) {
if (await testAuthorCriteria(item, auth)) {
return this.process(item);
}
}
this.logger.debug('Inclusive author criteria not matched, rule running skipped');
return Promise.resolve([false, [this.getResult(null, {result: 'Inclusive author criteria not matched, rule running skipped'})]]);
}
if (this.authors.exclude !== undefined && this.authors.exclude.length > 0) {
for (const auth of this.authors.exclude) {
if (await testAuthorCriteria(item, auth, false)) {
return this.process(item);
}
}
this.logger.debug('Exclusive author criteria not matched, rule running skipped');
return Promise.resolve([false, [this.getResult(null, {result: 'Exclusive author criteria not matched, rule running skipped'})]]);
}
return this.process(item);
}
protected abstract process(item: Comment | Submission): Promise<[boolean, RuleResult[]]>;
protected abstract process(item: Comment | Submission): Promise<[boolean, RuleResult]>;
abstract getKind(): string;
getRuleUniqueName() {
return this.name === undefined ? this.getKind() : `${this.getKind()} - ${this.name}`;
}
protected abstract getSpecificPremise(): object;
getPremise(): RulePremise {
@@ -101,7 +133,8 @@ export abstract class Rule implements IRule, Triggerable {
return {
kind: this.getKind(),
config: {
authors: this.authors,
authorIs: this.authorIs,
itemIs: this.itemIs,
...config,
},
};
@@ -117,83 +150,100 @@ export abstract class Rule implements IRule, Triggerable {
}
}
export class Author implements AuthorCriteria {
name?: string[];
flairCssClass?: string[];
flairText?: string[];
isMod?: boolean;
constructor(options: AuthorCriteria) {
this.name = options.name;
this.flairCssClass = options.flairCssClass;
this.flairText = options.flairText;
this.isMod = options.isMod;
}
}
/**
* If present then these Author criteria are checked before running the rule. If criteria fails then the rule is skipped.
* @minProperties 1
* @additionalProperties false
* */
export interface AuthorOptions {
export interface UserNoteCriteria {
/**
* Will "pass" if any set of AuthorCriteria passes
* User Note type key to search for
* @examples ["spamwarn"]
* */
include?: AuthorCriteria[];
type: string;
/**
* Only runs if include is not present. Will "pass" if any of set of the AuthorCriteria does not pass
* */
exclude?: AuthorCriteria[];
}
/**
* Criteria with which to test against the author of an Activity. The outcome of the test is based on:
*
* 1. All present properties passing and
* 2. If a property is a list then any value from the list matching
*
* @minProperties 1
* @additionalProperties false
* */
export interface AuthorCriteria {
/**
* A list of reddit usernames (case-insensitive) to match against. Do not include the "u/" prefix
* Number of occurrences of this type. Ignored if `search` is `current`
*
* EX to match against /u/FoxxMD and /u/AnotherUser use ["FoxxMD","AnotherUser"]
* @examples ["FoxxMD","AnotherUser"]
* A string containing a comparison operator and/or a value to compare number of occurrences against
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign] [ascending|descending]`
*
* @examples [">= 1"]
* @default ">= 1"
* @pattern ^\s*(?<opStr>>|>=|<|<=)\s*(?<value>\d+)\s*(?<percent>%?)\s*(?<extra>asc.*|desc.*)*$
* */
name?: string[],
count?: string;
/**
* A list of (user) flair css class values from the subreddit to match against
* How to test the notes for this Author:
*
* ### current
*
* Only the most recent note is checked for `type`
*
* ### total
*
* The `count` comparison of `type` must be found within all notes
*
* * EX `count: > 3` => Must have more than 3 notes of `type`, total
* * EX `count: <= 25%` => Must have 25% or less of notes of `type`, total
*
* ### consecutive
*
* The `count` **number** of `type` notes must be found in a row.
*
* You may also specify the time-based order in which to search the notes by specifying `ascending (asc)` or `descending (desc)` in the `count` value. Default is `descending`
*
* * EX `count: >= 3` => Must have 3 or more notes of `type` consecutively, in descending order
* * EX `count: < 2` => Must have less than 2 notes of `type` consecutively, in descending order
* * EX `count: > 4 asc` => Must have greater than 4 notes of `type` consecutively, in ascending order
*
* @examples ["current"]
* @default current
* */
flairCssClass?: string[],
/**
* A list of (user) flair text values from the subreddit to match against
* */
flairText?: string[],
/**
* Is the author a moderator?
* */
isMod?: boolean,
search?: 'current' | 'consecutive' | 'total'
}
export interface IRule {
/**
* A duration and how to compare it against a value
*
* The syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`
*
* * EX `> 100 days` => Passes if the date being compared is before 100 days ago
* * EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months
*
* Unit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)
*
* [See] https://regexr.com/609n8 for example
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\s*$
* */
export type DurationComparor = string;
export interface IRule extends ChecksActivityState {
/**
* A friendly, descriptive name for this rule. Highly recommended to make it easier to track logs EX "repeatCrosspostRule"
* An optional, but highly recommended, friendly name for this rule. If not present will default to `kind`.
*
* Can only contain letters, numbers, underscore, spaces, and dashes
*
* name is used to reference Rule result data during Action content templating. See CommentAction or ReportAction for more details.
* @pattern ^[a-zA-Z]([\w -]*[\w])?$
* @examples ["myNewRule"]
* */
name?: string
/**
* If present then these Author criteria are checked before running the rule. If criteria fails then the rule is skipped.
* */
authors?: AuthorOptions
authorIs?: AuthorOptions
/**
* A list of criteria to test the state of the `Activity` against before running the Rule.
*
* If any set of criteria passes the Rule will be run. If the criteria fails then the Rule is skipped.
*
* */
itemIs?: TypedActivityStates
}
/** @see {isRuleConfig} ts-auto-guard:type-guard */
export interface RuleJSONConfig extends IRule {
/**
* The kind of rule to run
* @examples ["recentActivity", "repeatActivity", "author", "attribution", "history"]
*/
kind: 'recentActivity' | 'repeatSubmission' | 'author'
kind: 'recentActivity' | 'repeatActivity' | 'author' | 'attribution' | 'history' | 'regex'
}

View File

@@ -1,26 +1,323 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"AuthorCriteria": {
"additionalProperties": false,
"description": "Criteria with which to test against the author of an Activity. The outcome of the test is based on:\n\n1. All present properties passing and\n2. If a property is a list then any value from the list matching",
"examples": [
{
"flairText": [
"Contributor",
"Veteran"
],
"isMod": true,
"name": [
"FoxxMD",
"AnotherUser"
]
}
],
"minProperties": 1,
"properties": {
"age": {
"description": "Test the age of the Author's account (when it was created) against this comparison\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>`\n\n* EX `> 100 days` => Passes if Author's account is older than 100 days\n* EX `<= 2 months` => Passes if Author's account is younger than or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
"pattern": "^\\s*(?<opStr>>|>=|<|<=)\\s*(?<time>\\d+)\\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\\s*$",
"type": "string"
},
"commentKarma": {
"description": "A string containing a comparison operator and a value to compare karma against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 comment karma\n* EX `<= 75%` => comment karma is less than or equal to 75% of **all karma**",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"flairCssClass": {
"description": "A list of (user) flair css class values from the subreddit to match against",
"examples": [
"red"
],
"items": {
"type": "string"
},
"type": "array"
},
"flairText": {
"description": "A list of (user) flair text values from the subreddit to match against",
"examples": [
"Approved"
],
"items": {
"type": "string"
},
"type": "array"
},
"isMod": {
"description": "Is the author a moderator?",
"type": "boolean"
},
"linkKarma": {
"description": "A string containing a comparison operator and a value to compare link karma against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 link karma\n* EX `<= 75%` => link karma is less than or equal to 75% of **all karma**",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"name": {
"description": "A list of reddit usernames (case-insensitive) to match against. Do not include the \"u/\" prefix\n\n EX to match against /u/FoxxMD and /u/AnotherUser use [\"FoxxMD\",\"AnotherUser\"]",
"examples": [
"FoxxMD",
"AnotherUser"
],
"items": {
"type": "string"
},
"type": "array"
},
"totalKarma": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"userNotes": {
"description": "A list of UserNote properties to check against the User Notes attached to this Author in this Subreddit (must have Toolbox enabled and used User Notes at least once)",
"items": {
"$ref": "#/definitions/UserNoteCriteria"
},
"type": "array"
},
"verified": {
"description": "Does Author's account have a verified email?",
"type": "boolean"
}
},
"type": "object"
},
"AuthorOptions": {
"description": "If present then these Author criteria are checked before running the rule. If criteria fails then the rule is skipped.",
"examples": [
{
"include": [
{
"flairText": [
"Contributor",
"Veteran"
]
},
{
"isMod": true
}
]
}
],
"properties": {
"exclude": {
"description": "Only runs if `include` is not present. Will \"pass\" if any of set of the AuthorCriteria **does not** pass",
"items": {
"$ref": "#/definitions/AuthorCriteria"
},
"type": "array"
},
"include": {
"description": "Will \"pass\" if any set of AuthorCriteria passes",
"items": {
"$ref": "#/definitions/AuthorCriteria"
},
"type": "array"
}
},
"type": "object"
},
"CommentState": {
"description": "Different attributes a `Comment` can be in. Only include a property if you want to check it.",
"examples": [
{
"op": true,
"removed": false
}
],
"properties": {
"approved": {
"type": "boolean"
},
"deleted": {
"type": "boolean"
},
"distinguished": {
"type": "boolean"
},
"filtered": {
"type": "boolean"
},
"locked": {
"type": "boolean"
},
"op": {
"type": "boolean"
},
"removed": {
"type": "boolean"
},
"spam": {
"type": "boolean"
},
"stickied": {
"type": "boolean"
}
},
"type": "object"
},
"SubmissionState": {
"description": "Different attributes a `Submission` can be in. Only include a property if you want to check it.",
"examples": [
{
"over_18": true,
"removed": false
}
],
"properties": {
"approved": {
"type": "boolean"
},
"deleted": {
"type": "boolean"
},
"distinguished": {
"type": "boolean"
},
"filtered": {
"type": "boolean"
},
"is_self": {
"type": "boolean"
},
"locked": {
"type": "boolean"
},
"over_18": {
"description": "NSFW",
"type": "boolean"
},
"pinned": {
"type": "boolean"
},
"removed": {
"type": "boolean"
},
"spam": {
"type": "boolean"
},
"spoiler": {
"type": "boolean"
},
"stickied": {
"type": "boolean"
}
},
"type": "object"
},
"UserNoteCriteria": {
"properties": {
"count": {
"default": ">= 1",
"description": "Number of occurrences of this type. Ignored if `search` is `current`\n\nA string containing a comparison operator and/or a value to compare number of occurrences against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [ascending|descending]`",
"examples": [
">= 1"
],
"pattern": "^\\s*(?<opStr>>|>=|<|<=)\\s*(?<value>\\d+)\\s*(?<percent>%?)\\s*(?<extra>asc.*|desc.*)*$",
"type": "string"
},
"search": {
"default": "current",
"description": "How to test the notes for this Author:\n\n### current\n\nOnly the most recent note is checked for `type`\n\n### total\n\nThe `count` comparison of `type` must be found within all notes\n\n* EX `count: > 3` => Must have more than 3 notes of `type`, total\n* EX `count: <= 25%` => Must have 25% or less of notes of `type`, total\n\n### consecutive\n\nThe `count` **number** of `type` notes must be found in a row.\n\nYou may also specify the time-based order in which to search the notes by specifying `ascending (asc)` or `descending (desc)` in the `count` value. Default is `descending`\n\n* EX `count: >= 3` => Must have 3 or more notes of `type` consecutively, in descending order\n* EX `count: < 2` => Must have less than 2 notes of `type` consecutively, in descending order\n* EX `count: > 4 asc` => Must have greater than 4 notes of `type` consecutively, in ascending order",
"enum": [
"consecutive",
"current",
"total"
],
"examples": [
"current"
],
"type": "string"
},
"type": {
"description": "User Note type key to search for",
"examples": [
"spamwarn"
],
"type": "string"
}
},
"required": [
"type"
],
"type": "object"
}
},
"properties": {
"authorIs": {
"$ref": "#/definitions/AuthorOptions",
"description": "If present then these Author criteria are checked before running the Action. If criteria fails then the Action is not run.",
"examples": [
{
"include": [
{
"flairText": [
"Contributor",
"Veteran"
]
},
{
"isMod": true
}
]
}
]
},
"dryRun": {
"default": false,
"description": "If `true` the Action will not make the API request to Reddit to perform its action.",
"examples": [
false,
true
],
"type": "boolean"
},
"itemIs": {
"anyOf": [
{
"items": {
"$ref": "#/definitions/SubmissionState"
},
"type": "array"
},
{
"items": {
"$ref": "#/definitions/CommentState"
},
"type": "array"
}
],
"description": "A list of criteria to test the state of the `Activity` against before running the Action.\n\nIf any set of criteria passes the Action will be run."
},
"kind": {
"description": "The type of action that will be performed",
"enum": [
"approve",
"ban",
"comment",
"flair",
"lock",
"remove",
"report"
"report",
"usernote"
],
"type": "string"
},
"name": {
"description": "A friendly name for this Action",
"description": "An optional, but highly recommended, friendly name for this Action. If not present will default to `kind`.\n\nCan only contain letters, numbers, underscore, spaces, and dashes",
"examples": [
"myDescriptiveAction"
],
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
"type": "string"
}
},
"propertyOrder": [
"kind",
"name"
],
"required": [
"kind"
],

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,374 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"CacheOptions": {
"properties": {
"auth_pass": {
"type": "string"
},
"db": {
"type": "number"
},
"host": {
"type": "string"
},
"max": {
"type": "number"
},
"port": {
"type": "number"
},
"store": {
"$ref": "#/definitions/CacheProvider"
},
"ttl": {
"type": "number"
}
},
"required": [
"store"
],
"type": "object"
},
"CacheProvider": {
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
},
"DiscordProviderConfig": {
"properties": {
"name": {
"type": "string"
},
"type": {
"enum": [
"discord"
],
"type": "string"
},
"url": {
"type": "string"
}
},
"required": [
"name",
"type",
"url"
],
"type": "object"
},
"NotificationConfig": {
"properties": {
"events": {
"items": {
"anyOf": [
{
"$ref": "#/definitions/NotificationEventConfig"
},
{
"items": {
"enum": [
"configUpdated",
"eventActioned",
"pollingError",
"runStateChanged"
],
"type": "string"
},
"type": "array"
}
]
},
"type": "array"
},
"providers": {
"items": {
"$ref": "#/definitions/DiscordProviderConfig"
},
"type": "array"
}
},
"required": [
"events",
"providers"
],
"type": "object"
},
"NotificationEventConfig": {
"properties": {
"providers": {
"items": {
"type": "string"
},
"type": "array"
},
"types": {
"items": {
"enum": [
"configUpdated",
"eventActioned",
"pollingError",
"runStateChanged"
],
"type": "string"
},
"type": "array"
}
},
"required": [
"providers",
"types"
],
"type": "object"
},
"PollingDefaults": {
"properties": {
"delayUntil": {
"description": "Delay processing Activity until it is `N` seconds old\n\nUseful if there are other bots that may process an Activity and you want this bot to run first/last/etc.\n\nIf the Activity is already `N` seconds old when it is initially retrieved no refresh of the Activity occurs (no API request is made) and it is immediately processed.",
"type": "number"
},
"interval": {
"default": 30,
"description": "Amount of time, in seconds, to wait between requests",
"examples": [
30
],
"type": "number"
},
"limit": {
"default": 50,
"description": "The maximum number of Activities to get on every request",
"examples": [
50
],
"type": "number"
}
},
"type": "object"
}
},
"properties": {
"api": {
"properties": {
"hardLimit": {
"type": "number"
},
"softLimit": {
"type": "number"
}
},
"type": "object"
},
"caching": {
"anyOf": [
{
"properties": {
"authorTTL": {
"default": 10000,
"description": "Amount of time, in milliseconds, author activities (Comments/Submission) should be cached",
"examples": [
10000
],
"type": "number"
},
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
]
},
"userNotesTTL": {
"default": 60000,
"description": "Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached",
"examples": [
60000
],
"type": "number"
},
"wikiTTL": {
"default": 300000,
"description": "Amount of time, in milliseconds, wiki content pages should be cached",
"examples": [
300000
],
"type": "number"
}
},
"type": "object"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
]
},
"credentials": {
"properties": {
"accessToken": {
"type": "string"
},
"clientId": {
"type": "string"
},
"clientSecret": {
"type": "string"
},
"redirectUri": {
"type": "string"
},
"refreshToken": {
"type": "string"
}
},
"type": "object"
},
"logging": {
"properties": {
"level": {
"enum": [
"debug",
"error",
"info",
"verbose",
"warn"
],
"type": "string"
},
"path": {
"type": "string"
}
},
"type": "object"
},
"notifications": {
"$ref": "#/definitions/NotificationConfig"
},
"operator": {
"properties": {
"display": {
"type": "string"
},
"name": {
"type": "string"
}
},
"type": "object"
},
"polling": {
"allOf": [
{
"$ref": "#/definitions/PollingDefaults"
},
{
"properties": {
"interval": {
"type": "number"
},
"limit": {
"type": "number"
},
"sharedMod": {
"type": "boolean"
}
},
"type": "object"
}
]
},
"snoowrap": {
"properties": {
"debug": {
"type": "boolean"
},
"proxy": {
"type": "string"
}
},
"type": "object"
},
"subreddits": {
"properties": {
"dryRun": {
"type": "boolean"
},
"heartbeatInterval": {
"type": "number"
},
"names": {
"items": {
"type": "string"
},
"type": "array"
},
"wikiConfig": {
"type": "string"
}
},
"type": "object"
},
"web": {
"properties": {
"enabled": {
"type": "boolean"
},
"logLevel": {
"enum": [
"debug",
"error",
"info",
"verbose",
"warn"
],
"type": "string"
},
"maxLogs": {
"type": "number"
},
"port": {
"type": "number"
},
"session": {
"properties": {
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"redis"
],
"type": "string"
}
]
},
"secret": {
"type": "string"
}
},
"type": "object"
}
},
"type": "object"
}
},
"type": "object"
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

85
src/Server/helper.ts Normal file
View File

@@ -0,0 +1,85 @@
import {addAsync, Router} from '@awaitjs/express';
import express from 'express';
import Snoowrap from "snoowrap";
import {permissions} from "../util";
import {getLogger} from "../Utils/loggerFactory";
import {OperatorConfig} from "../Common/interfaces";
const app = addAsync(express());
const router = Router();
app.set('views', `${__dirname}/views`);
app.set('view engine', 'ejs');
app.use(router);
const helperServer = async function (options: OperatorConfig) {
let rUri: string;
const {
credentials: {
clientId,
clientSecret,
redirectUri
},
web: {
port
}
} = options;
const server = await app.listen(port);
const logger = getLogger(options);
logger.info(`Helper UI started: http://localhost:${port}`);
app.getAsync('/', async (req, res) => {
res.render('helper', {
redirectUri
});
});
app.getAsync('/auth', async (req, res) => {
rUri = req.query.redirect as string;
let permissionsList = permissions;
const includeWikiEdit = (req.query.wikiEdit as any).toString() === "1";
if (!includeWikiEdit) {
permissionsList = permissionsList.filter(x => x !== 'wikiedit');
}
const authUrl = Snoowrap.getAuthUrl({
clientId,
scope: permissionsList,
redirectUri: rUri as string,
permanent: true,
});
return res.redirect(authUrl);
});
app.getAsync(/.*callback$/, async (req, res) => {
const {error, code} = req.query as any;
if (error !== undefined) {
let errContent: string;
switch (error) {
case 'access_denied':
errContent = 'You must <b>Allow</b> this application to connect in order to proceed.';
break;
default:
errContent = error;
}
return res.render('error', {error: errContent, });
}
const client = await Snoowrap.fromAuthCode({
userAgent: `web:contextBot:web`,
clientId,
clientSecret,
redirectUri: rUri,
code: code as string,
});
// @ts-ignore
const user = await client.getMe();
res.render('callback', {
accessToken: client.accessToken,
refreshToken: client.refreshToken,
});
});
}
export default helperServer;

View File

@@ -0,0 +1,85 @@
/*https://codepen.io/bheberer/pen/BaNZKmq*/
.toggle-checkbox {
position: absolute;
opacity: 0;
cursor: pointer;
height: 0;
width: 0;
}
.toggle-slot {
position: relative;
height: 10em;
width: 20em;
border: 2px solid #e4e7ec;
padding: 2.5px;
border-radius: 10em;
background-color: white;
/*box-shadow: 0px 10px 25px #e4e7ec;*/
transition: background-color 250ms;
}
.toggle-checkbox:checked ~ .toggle-slot {
background-color: transparent;
}
.toggle-button {
transform: translate(9.75em, 1em);
position: absolute;
height: 4.5em;
width: 4.5em;
border-radius: 50%;
background-color: #ffeccf;
box-shadow: inset 0px 0px 0px 0.75em #ffbb52;
transition: background-color 250ms, border-color 250ms, transform 500ms cubic-bezier(.26,2,.46,.71);
}
.toggle-checkbox:checked ~ .toggle-slot .toggle-button {
background-color: #485367;
box-shadow: inset 0px 0px 0px 0.75em white;
transform: translate(1.75em, 1em);
}
.sun-icon {
position: absolute;
height: 6em;
width: 6em;
color: #ffbb52;
}
.sun-icon-wrapper {
position: absolute;
height: 6em;
width: 6em;
opacity: 1;
transform: translate(1.1em, 0.1em) rotate(15deg);
transform-origin: 50% 50%;
transition: opacity 150ms, transform 500ms cubic-bezier(.26,2,.46,.71);
}
.toggle-checkbox:checked ~ .toggle-slot .sun-icon-wrapper {
opacity: 0;
transform: translate(3em, 2em) rotate(0deg);
}
.moon-icon {
position: absolute;
height: 6em;
width: 6em;
color: white;
}
.moon-icon-wrapper {
position: absolute;
height: 6em;
width: 6em;
opacity: 0;
transform: translate(11em, 1em) rotate(0deg);
transform-origin: 50% 50%;
transition: opacity 150ms, transform 500ms cubic-bezier(.26,2.5,.46,.71);
}
.toggle-checkbox:checked ~ .toggle-slot .moon-icon-wrapper {
opacity: 1;
transform: translate(9em, 0em) rotate(-15deg);
}

706
src/Server/server.ts Normal file
View File

@@ -0,0 +1,706 @@
import {addAsync, Router} from '@awaitjs/express';
import express from 'express';
import bodyParser from 'body-parser';
import session from 'express-session';
import {Cache} from 'cache-manager';
// @ts-ignore
import CacheManagerStore from 'express-session-cache-manager'
import Snoowrap from "snoowrap";
import {App} from "../App";
import dayjs from 'dayjs';
import {Writable} from "stream";
import winston from 'winston';
import {Server as SocketServer} from 'socket.io';
import sharedSession from 'express-socket.io-session';
import Submission from "snoowrap/dist/objects/Submission";
import EventEmitter from "events";
import tcpUsed from 'tcp-port-used';
import {
boolToString, cacheStats,
COMMENT_URL_ID, createCacheManager,
filterLogBySubreddit,
formatLogLineToHtml, formatNumber,
isLogLineMinLevel,
LogEntry,
parseLinkIdentifier,
parseSubredditLogName, parseSubredditName,
pollingInfo, SUBMISSION_URL_ID
} from "../util";
import {Manager} from "../Subreddit/Manager";
import {getLogger} from "../Utils/loggerFactory";
import LoggedError from "../Utils/LoggedError";
import {OperatorConfig, ResourceStats, RUNNING, STOPPED, SYSTEM, USER} from "../Common/interfaces";
import http from "http";
import SimpleError from "../Utils/SimpleError";
const app = addAsync(express());
const router = Router();
app.use(router);
app.use(bodyParser.json());
app.set('views', `${__dirname}/views`);
app.set('view engine', 'ejs');
interface ConnectedUserInfo {
subreddits: string[],
level?: string,
user: string
}
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);
const connectedUsers: Map<string, ConnectedUserInfo> = new Map();
const availableLevels = ['error', 'warn', 'info', 'verbose', 'debug'];
let operatorSessionId: (string | undefined);
declare module 'express-session' {
interface SessionData {
user: string,
subreddits: string[],
lastCheck?: number,
limit?: number,
sort?: string,
level?: string,
}
}
const subLogMap: Map<string, LogEntry[]> = new Map();
const emitter = new EventEmitter();
const stream = new Writable()
const rcbServer = async function (options: OperatorConfig) {
const {
credentials: {
clientId,
clientSecret,
redirectUri
},
operator: {
name,
display,
},
web: {
port,
session: {
provider,
secret,
},
maxLogs,
},
} = options;
let botSubreddits: string[] = [];
stream._write = (chunk, encoding, next) => {
let logLine = chunk.toString();
const now = Date.now();
const logEntry: LogEntry = [now, logLine];
const subName = parseSubredditLogName(logLine);
if (subName !== undefined && (botSubreddits.length === 0 || botSubreddits.includes(subName))) {
const subLogs = subLogMap.get(subName) || [];
subLogs.unshift(logEntry);
subLogMap.set(subName, subLogs.slice(0, maxLogs + 1));
} else {
const appLogs = subLogMap.get('app') || [];
appLogs.unshift(logEntry);
subLogMap.set('app', appLogs.slice(0, maxLogs + 1));
}
emitter.emit('log', logLine);
next();
}
const streamTransport = new winston.transports.Stream({
stream,
})
const logger = getLogger({...options.logging, additionalTransports: [streamTransport]})
if (await tcpUsed.check(port)) {
throw new SimpleError(`Specified port for web interface (${port}) is in use or not available. Cannot start web server.`);
}
let server: http.Server,
io: SocketServer;
try {
server = await app.listen(port);
io = new SocketServer(server);
} catch (err) {
logger.error('Error occurred while initializing web or socket.io server', err);
err.logged = true;
throw err;
}
logger.info(`Web UI started: http://localhost:${port}`);
const bot = new App(options);
await bot.testClient();
app.use('/public', express.static(`${__dirname}/public`));
await bot.buildManagers();
botSubreddits = bot.subManagers.map(x => x.displayLabel);
// TODO potentially prune subLogMap of user keys? shouldn't have happened this early though
if(provider.store === 'none') {
logger.warn(`Cannot use 'none' for session store or else no one can use the interface...falling back to 'memory'`);
provider.store = 'memory';
}
const sessionObj = session({
cookie: {
maxAge: provider.ttl,
},
store: new CacheManagerStore(createCacheManager(provider) as Cache),
resave: false,
saveUninitialized: false,
secret,
});
app.use(sessionObj);
io.use(sharedSession(sessionObj));
io.on("connection", function (socket) {
// @ts-ignore
if (socket.handshake.session.user !== undefined) {
// @ts-ignore
socket.join(socket.handshake.session.id);
// @ts-ignore
connectedUsers.set(socket.handshake.session.id, {
// @ts-ignore
subreddits: socket.handshake.session.subreddits,
// @ts-ignore
level: socket.handshake.session.level,
// @ts-ignore
user: socket.handshake.session.user
});
// @ts-ignore
if (name !== undefined && socket.handshake.session.user.toLowerCase() === name.toLowerCase()) {
// @ts-ignore
operatorSessionId = socket.handshake.session.id;
}
}
});
io.on('disconnect', (socket) => {
// @ts-ignore
connectedUsers.delete(socket.handshake.session.id);
if (operatorSessionId === socket.handshake.session.id) {
operatorSessionId = undefined;
}
});
const redditUserMiddleware = async (req: express.Request, res: express.Response, next: Function) => {
if (req.session.user === undefined) {
return res.redirect('/login');
}
next();
}
const booleanMiddle = (boolParams: string[] = []) => async (req: express.Request, res: express.Response, next: Function) => {
if (req.query !== undefined) {
for (const b of boolParams) {
const bVal = req.query[b] as any;
if (bVal !== undefined) {
let truthyVal: boolean;
if (bVal === 'true' || bVal === true || bVal === 1 || bVal === '1') {
truthyVal = true;
} else if (bVal === 'false' || bVal === false || bVal === 0 || bVal === '0') {
truthyVal = false;
} else {
res.status(400);
res.send(`Expected query parameter ${b} to be a truthy value. Got "${bVal}" but must be one of these: true/false, 1/0`);
return;
}
// @ts-ignore
req.query[b] = truthyVal;
}
}
}
next();
}
app.getAsync('/logout', async (req, res) => {
// @ts-ignore
req.session.destroy();
res.send('Bye!');
})
app.getAsync('/login', async (req, res) => {
if(redirectUri === undefined) {
return res.render('error', {error: `No <b>redirectUri</b> was specified through environmental variables or program argument. This must be provided in order to use the web interface.`});
}
const authUrl = Snoowrap.getAuthUrl({
clientId,
scope: ['identity', 'mysubreddits'],
redirectUri: redirectUri as string,
permanent: false,
});
return res.redirect(authUrl);
});
app.getAsync(/.*callback$/, async (req, res) => {
const {error, code} = req.query as any;
if (error !== undefined) {
let errContent: string;
switch (error) {
case 'access_denied':
errContent = 'You must <b>Allow</b> this application to connect in order to proceed.';
break;
default:
errContent = error;
}
return res.render('error', {error: errContent, operatorDisplay: display});
}
const client = await Snoowrap.fromAuthCode({
userAgent: `web:contextBot:web`,
clientId,
clientSecret,
redirectUri: redirectUri as string,
code: code as string,
});
// @ts-ignore
const user = await client.getMe().name as string;
const subs = await client.getModeratedSubreddits();
req.session['user'] = user;
// @ts-ignore
req.session['subreddits'] = name !== undefined && name.toLowerCase() === user.toLowerCase() ? bot.subManagers.map(x => x.displayLabel) : subs.reduce((acc: string[], x) => {
const sm = bot.subManagers.find(y => y.subreddit.display_name === x.display_name);
if (sm !== undefined) {
return acc.concat(sm.displayLabel);
}
return acc;
}, []);
req.session['lastCheck'] = dayjs().unix();
res.redirect('/');
});
app.use('/', redditUserMiddleware);
app.getAsync('/', async (req, res) => {
const {
subreddits = [],
user: userVal,
limit = 200,
level = 'verbose',
sort = 'descending',
lastCheck
} = req.session;
const user = userVal as string;
const isOperator = name !== undefined && name.toLowerCase() === user.toLowerCase()
if ((req.session.subreddits as string[]).length === 0 && !isOperator) {
return res.render('noSubs', {operatorDisplay: display});
}
const logs = filterLogBySubreddit(subLogMap, req.session.subreddits, {
level,
operator: isOperator,
user,
// @ts-ignore
sort,
limit
});
const subManagerData = [];
for (const s of subreddits) {
const m = bot.subManagers.find(x => x.displayLabel === s) as Manager;
const sd = {
name: s,
//linkName: s.replace(/\W/g, ''),
logs: logs.get(s) || [], // provide a default empty value in case we truly have not logged anything for this subreddit yet
botState: m.botState,
eventsState: m.eventsState,
queueState: m.queueState,
indicator: 'gray',
queuedActivities: m.queue.length(),
runningActivities: m.queue.running(),
maxWorkers: m.queue.concurrency,
validConfig: boolToString(m.validConfigLoaded),
dryRun: boolToString(m.dryRun === true),
pollingInfo: m.pollOptions.length === 0 ? ['nothing :('] : m.pollOptions.map(pollingInfo),
checks: {
submissions: m.submissionChecks === undefined ? 0 : m.submissionChecks.length,
comments: m.commentChecks === undefined ? 0 : m.commentChecks.length,
},
wikiLocation: m.wikiLocation,
wikiHref: `https://reddit.com/r/${m.subreddit.display_name}/wiki/${m.wikiLocation}`,
wikiRevisionHuman: m.lastWikiRevision === undefined ? 'N/A' : `${dayjs.duration(dayjs().diff(m.lastWikiRevision)).humanize()} ago`,
wikiRevision: m.lastWikiRevision === undefined ? 'N/A' : m.lastWikiRevision.local().format('MMMM D, YYYY h:mm A Z'),
wikiLastCheckHuman: `${dayjs.duration(dayjs().diff(m.lastWikiCheck)).humanize()} ago`,
wikiLastCheck: m.lastWikiCheck.local().format('MMMM D, YYYY h:mm A Z'),
stats: await m.getStats(),
startedAt: 'Not Started',
startedAtHuman: 'Not Started',
delayBy: m.delayBy === undefined ? 'No' : `Delayed by ${m.delayBy} sec`,
};
// TODO replace indicator data with js on client page
let indicator;
if (m.botState.state === RUNNING && m.queueState.state === RUNNING && m.eventsState.state === RUNNING) {
indicator = 'green';
} else if (m.botState.state === STOPPED && m.queueState.state === STOPPED && m.eventsState.state === STOPPED) {
indicator = 'red';
} else {
indicator = 'yellow';
}
sd.indicator = indicator;
if (m.startedAt !== undefined) {
const dur = dayjs.duration(dayjs().diff(m.startedAt));
sd.startedAtHuman = `${dur.humanize()} ago`;
sd.startedAt = m.startedAt.local().format('MMMM D, YYYY h:mm A Z');
if(sd.stats.cache.totalRequests > 0) {
const minutes = dur.asMinutes();
if(minutes < 10) {
sd.stats.cache.requestRate = formatNumber((10/minutes) * sd.stats.cache.totalRequests, {toFixed: 0, round: {enable: true, indicate: true}});
} else {
sd.stats.cache.requestRate = formatNumber((minutes/10) * sd.stats.cache.totalRequests, {toFixed: 0, round: {enable: true, indicate: true}});
}
} else {
sd.stats.cache.requestRate = 0;
}
}
subManagerData.push(sd);
}
const totalStats = subManagerData.reduce((acc, curr) => {
return {
checks: {
submissions: acc.checks.submissions + curr.checks.submissions,
comments: acc.checks.comments + curr.checks.comments,
},
eventsCheckedTotal: acc.eventsCheckedTotal + curr.stats.eventsCheckedTotal,
checksRunTotal: acc.checksRunTotal + curr.stats.checksRunTotal,
checksTriggeredTotal: acc.checksTriggeredTotal + curr.stats.checksTriggeredTotal,
rulesRunTotal: acc.rulesRunTotal + curr.stats.rulesRunTotal,
rulesCachedTotal: acc.rulesCachedTotal + curr.stats.rulesCachedTotal,
rulesTriggeredTotal: acc.rulesTriggeredTotal + curr.stats.rulesTriggeredTotal,
actionsRunTotal: acc.actionsRunTotal + curr.stats.actionsRunTotal,
};
}, {
checks: {
submissions: 0,
comments: 0,
},
eventsCheckedTotal: 0,
checksRunTotal: 0,
checksTriggeredTotal: 0,
rulesRunTotal: 0,
rulesCachedTotal: 0,
rulesTriggeredTotal: 0,
actionsRunTotal: 0,
});
const {checks, ...rest} = totalStats;
let cumRaw = subManagerData.reduce((acc, curr) => {
Object.keys(curr.stats.cache.types as ResourceStats).forEach((k) => {
acc[k].requests += curr.stats.cache.types[k].requests;
acc[k].miss += curr.stats.cache.types[k].miss;
});
return acc;
}, cacheStats());
cumRaw = Object.keys(cumRaw).reduce((acc, curr) => {
const per = acc[curr].miss === 0 ? 0 : formatNumber(acc[curr].miss / acc[curr].requests) * 100;
// @ts-ignore
acc[curr].missPercent = `${formatNumber(per, {toFixed: 0})}%`;
return acc;
}, cumRaw);
let allManagerData: any = {
name: 'All',
linkName: 'All',
indicator: 'green',
botState: {
state: RUNNING,
causedBy: SYSTEM
},
dryRun: boolToString(bot.dryRun === true),
logs: logs.get('all'),
checks: checks,
softLimit: bot.softLimit,
hardLimit: bot.hardLimit,
stats: {
...rest,
cache: {
currentKeyCount: await bot.subManagers[0].resources.getCacheKeyCount(),
isShared: false,
totalRequests: subManagerData.reduce((acc, curr) => acc + curr.stats.cache.totalRequests, 0),
types: {
...cumRaw,
}
}
},
};
if (allManagerData.logs === undefined) {
// this should happen but saw an edge case where potentially did
logger.warn(`Logs for 'all' were undefined found but should always have a default empty value`);
}
// if(isOperator) {
allManagerData.startedAt = bot.startedAt.local().format('MMMM D, YYYY h:mm A Z');
allManagerData.heartbeatHuman = dayjs.duration({seconds: bot.heartbeatInterval}).humanize();
allManagerData.heartbeat = bot.heartbeatInterval;
allManagerData = {...allManagerData, ...opStats(bot)};
//}
const botDur = dayjs.duration(dayjs().diff(bot.startedAt))
if(allManagerData.stats.cache.totalRequests > 0) {
const minutes = botDur.asMinutes();
if(minutes < 10) {
allManagerData.stats.cache.requestRate = formatNumber((10/minutes) * allManagerData.stats.cache.totalRequests, {toFixed: 0, round: {enable: true, indicate: true}});
} else {
allManagerData.stats.cache.requestRate = formatNumber(allManagerData.stats.cache.totalRequests / (minutes/10), {toFixed: 0, round: {enable: true, indicate: true}});
}
} else {
allManagerData.stats.cache.requestRate = 0;
}
const data = {
userName: user,
system: {
startedAt: bot.startedAt.local().format('MMMM D, YYYY h:mm A Z'),
...opStats(bot),
},
subreddits: [allManagerData, ...subManagerData],
show: 'All',
botName: bot.botName,
operatorDisplay: display,
isOperator,
logSettings: {
//limit: [10, 20, 50, 100, 200].map(x => `<a class="capitalize ${limit === x ? 'font-bold no-underline pointer-events-none' : ''}" data-limit="${x}" href="logs/settings/update?limit=${x}">${x}</a>`).join(' | '),
limitSelect: [10, 20, 50, 100, 200].map(x => `<option ${limit === x ? 'selected' : ''} class="capitalize ${limit === x ? 'font-bold' : ''}" data-value="${x}">${x}</option>`).join(' | '),
//sort: ['ascending', 'descending'].map(x => `<a class="capitalize ${sort === x ? 'font-bold no-underline pointer-events-none' : ''}" data-sort="${x}" href="logs/settings/update?sort=${x}">${x}</a>`).join(' | '),
sortSelect: ['ascending', 'descending'].map(x => `<option ${sort === x ? 'selected' : ''} class="capitalize ${sort === x ? 'font-bold' : ''}" data-value="${x}">${x}</option>`).join(' '),
//level: availableLevels.map(x => `<a class="capitalize log-${x} ${level === x ? `font-bold no-underline pointer-events-none` : ''}" data-log="${x}" href="logs/settings/update?level=${x}">${x}</a>`).join(' | '),
levelSelect: availableLevels.map(x => `<option ${level === x ? 'selected' : ''} class="capitalize log-${x} ${level === x ? `font-bold` : ''}" data-value="${x}">${x}</option>`).join(' '),
},
};
if(req.query.sub !== undefined) {
const encoded = encodeURI(req.query.sub as string).toLowerCase();
const shouldShow = data.subreddits.find(x => x.name.toLowerCase() === encoded);
if(shouldShow !== undefined) {
data.show = shouldShow.name;
}
}
res.render('status', data);
});
app.getAsync('/logs/settings/update', async function (req, res) {
const e = req.query;
for (const [setting, val] of Object.entries(req.query)) {
switch (setting) {
case 'limit':
req.session.limit = Number.parseInt(val as string);
break;
case 'sort':
req.session.sort = val as string;
break;
case 'level':
req.session.level = val as string;
break;
}
}
const {limit = 200, level = 'verbose', sort = 'descending', user} = req.session;
res.send('OK');
const subMap = filterLogBySubreddit(subLogMap, req.session.subreddits, {
level,
operator: name !== undefined && name.toLowerCase() === (user as string).toLowerCase(),
user,
limit,
sort: (sort as 'descending' | 'ascending'),
});
const subArr: any = [];
subMap.forEach((v: string[], k: string) => {
subArr.push({name: k, logs: v.join('')});
});
io.emit('logClear', subArr);
});
app.use('/action', booleanMiddle(['force']));
app.getAsync('/action', async (req, res) => {
const {type, action, subreddit, force = false} = req.query as any;
let subreddits: string[] = [];
if (subreddit === 'All') {
subreddits = req.session.subreddits as string[];
} else if ((req.session.subreddits as string[]).includes(subreddit)) {
subreddits = [subreddit];
}
for (const s of subreddits) {
const manager = bot.subManagers.find(x => x.displayLabel === s);
if (manager === undefined) {
logger.warn(`Manager for ${s} does not exist`, {subreddit: `/u/${req.session.user}`});
continue;
}
const mLogger = manager.logger;
mLogger.info(`/u/${req.session.user} invoked '${action}' action for ${type} on ${manager.displayLabel}`);
try {
switch (action) {
case 'start':
if (type === 'bot') {
await manager.start('user');
} else if (type === 'queue') {
manager.startQueue('user');
} else {
await manager.startEvents('user');
}
break;
case 'stop':
if (type === 'bot') {
await manager.stop('user');
} else if (type === 'queue') {
await manager.stopQueue('user');
} else {
manager.stopEvents('user');
}
break;
case 'pause':
if (type === 'queue') {
await manager.pauseQueue('user');
} else {
manager.pauseEvents('user');
}
break;
case 'reload':
const prevQueueState = manager.queueState.state;
const newConfig = await manager.parseConfiguration('user', force);
if (newConfig === false) {
mLogger.info('Config was up-to-date');
}
if (newConfig && prevQueueState === RUNNING) {
await manager.startQueue(USER);
}
break;
case 'check':
if (type === 'unmoderated') {
const activities = await manager.subreddit.getUnmoderated({limit: 100});
for (const a of activities.reverse()) {
manager.queue.push({
checkType: a instanceof Submission ? 'Submission' : 'Comment',
activity: a,
});
}
} else {
const activities = await manager.subreddit.getModqueue({limit: 100});
for (const a of activities.reverse()) {
manager.queue.push({
checkType: a instanceof Submission ? 'Submission' : 'Comment',
activity: a,
});
}
}
break;
}
} catch (err) {
if (!(err instanceof LoggedError)) {
mLogger.error(err, {subreddit: manager.displayLabel});
}
}
}
res.send('OK');
});
app.use('/check', booleanMiddle(['dryRun']));
app.getAsync('/check', async (req, res) => {
const {url, dryRun, subreddit} = req.query as any;
let a;
const commentId = commentReg(url);
if (commentId !== undefined) {
// @ts-ignore
a = await bot.client.getComment(commentId);
}
if (a === undefined) {
const submissionId = submissionReg(url);
if (submissionId !== undefined) {
// @ts-ignore
a = await bot.client.getSubmission(submissionId);
}
}
if (a === undefined) {
logger.error('Could not parse Comment or Submission ID from given URL', {subreddit: `/u/${req.session.user}`});
return res.send('OK');
} else {
// @ts-ignore
const activity = await a.fetch();
const sub = await activity.subreddit.display_name;
let manager = subreddit === 'All' ? bot.subManagers.find(x => x.subreddit.display_name === sub) : bot.subManagers.find(x => x.displayLabel === subreddit);
if (manager === undefined || !(req.session.subreddits as string[]).includes(manager.displayLabel)) {
let msg = 'Activity does not belong to a subreddit you moderate or the bot runs on.';
if (subreddit === 'All') {
msg = `${msg} If you want to test an Activity against a Subreddit\'s config it does not belong to then switch to that Subreddit's tab first.`
}
logger.error(msg, {subreddit: `/u/${req.session.user}`});
return res.send('OK');
}
// will run dryrun if specified or if running activity on subreddit it does not belong to
const dr: boolean | undefined = (dryRun || manager.subreddit.display_name !== sub) ? true : undefined;
manager.logger.info(`/u/${req.session.user} running${dr === true ? ' DRY RUN ' : ' '}check on${manager.subreddit.display_name !== sub ? ' FOREIGN ACTIVITY ' : ' '}${url}`);
await manager.runChecks(activity instanceof Submission ? 'Submission' : 'Comment', activity, {dryRun: dr})
}
res.send('OK');
})
setInterval(() => {
// refresh op stats every 30 seconds
io.emit('opStats', opStats(bot));
// if (operatorSessionId !== undefined) {
// io.to(operatorSessionId).emit('opStats', opStats(bot));
// }
}, 30000);
emitter.on('log', (log) => {
const emittedSessions = [];
const subName = parseSubredditLogName(log);
if (subName !== undefined) {
for (const [id, info] of connectedUsers) {
const {subreddits, level = 'verbose', user} = info;
if (isLogLineMinLevel(log, level) && (subreddits.includes(subName) || subName.includes(user))) {
emittedSessions.push(id);
io.to(id).emit('log', formatLogLineToHtml(log));
}
}
}
if (operatorSessionId !== undefined) {
io.to(operatorSessionId).emit('opStats', opStats(bot));
if (subName === undefined || !emittedSessions.includes(operatorSessionId)) {
const {level = 'verbose'} = connectedUsers.get(operatorSessionId) || {};
if (isLogLineMinLevel(log, level)) {
io.to(operatorSessionId).emit('log', formatLogLineToHtml(log));
}
}
}
});
await bot.runManagers();
};
const opStats = (bot: App) => {
const limitReset = dayjs(bot.client.ratelimitExpiration);
const nextHeartbeat = bot.nextHeartbeat !== undefined ? bot.nextHeartbeat.local().format('MMMM D, YYYY h:mm A Z') : 'N/A';
const nextHeartbeatHuman = bot.nextHeartbeat !== undefined ? `in ${dayjs.duration(bot.nextHeartbeat.diff(dayjs())).humanize()}` : 'N/A'
return {
startedAtHuman: `${dayjs.duration(dayjs().diff(bot.startedAt)).humanize()}`,
nextHeartbeat,
nextHeartbeatHuman,
apiLimit: bot.client.ratelimitRemaining,
apiAvg: formatNumber(bot.apiRollingAvg),
nannyMode: bot.nannyMode || 'Off',
apiDepletion: bot.apiEstDepletion === undefined ? 'Not Calculated' : bot.apiEstDepletion.humanize(),
limitReset,
limitResetHuman: `in ${dayjs.duration(limitReset.diff(dayjs())).humanize()}`,
}
}
export default rcbServer;

View File

@@ -0,0 +1,39 @@
<html>
<%- include('partials/head', {title: 'RCB OAuth Helper'}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<%- include('partials/title', {title: ' OAuth Helper'}) %>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Congrats! You did the thing.</div>
<div class="space-y-3">
<ul class="list-inside list-disc">
<li>Access Token: <b><%= accessToken %></b></li>
<li>Refresh Token: <b><%= refreshToken %></b></li>
</ul>
<div>Copy these somewhere and then restart the application providing these as either arguments
or environmental variables as described in the <a
href="https://github.com/FoxxMD/reddit-context-bot#usage">usage section.</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<script>
if (document.querySelector('#redirectUri').value === '') {
document.querySelector('#redirectUri').value = `${document.location.href}callback`;
}
document.querySelector('#doAuth').addEventListener('click', e => {
e.preventDefault()
const url = `${document.location.href}auth?redirect=${document.querySelector('#redirectUri').value}`
window.location.href = url;
})
</script>
</body>
</html>

View File

@@ -0,0 +1,25 @@
<html>
<%- include('partials/head', {title: 'RCB'}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<%- include('partials/title', {title: ''}) %>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Oops 😬</div>
<div class="space-y-3">
<div>Something went wrong while processing that last request:</div>
<div><%- error %></div>
<% if(locals.operatorDisplay !== undefined && locals.operatorDisplay !== 'Anonymous') { %>
<div>Operated By: <%= operatorDisplay %></div>
<% } %>
</div>
</div>
</div>
</div>
</div>
</div>
</body>
</html>

View File

@@ -0,0 +1,60 @@
<html>
<%- include('partials/head', {title: 'RCB OAuth Helper'}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<%- include('partials/title', {title: ' OAuth Helper'}) %>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Hi! Looks like you're setting up your bot. To get running:</div>
<div class="text-lg text-semibold my-3">1. Set your redirect URL</div>
<input id="redirectUri" style="min-width:500px;"
class="text-black placeholder-gray-500 rounded mt-2 mb-3 p-2" value="<%= redirectUri %>">
<div class="my-2">
<input type="checkbox" id="wikiedit" name="wikiedit"
checked>
<label for="wikiedit">Include <span class="font-mono">wikiedit</span> permission for Toolbox
User Notes</label>
</div>
<div class="space-y-3">
<div>This is the URL Reddit will redirect you to once you have authorized an account to be used
with your application.
</div>
<div>The input field has been pre-filled with either:
<ul class="list-inside list-disc">
<li>What you provided to the program as an argument/environmental variable or</li>
<li>The current URL in your browser that would be used -- if you are using a reverse
proxy this may be different so double check
</li>
</ul>
</div>
<div>Make sure it matches what is found in the <b>redirect uri</b> for your <a target="_blank"
href="https://www.reddit.com/prefs/apps">application
on Reddit</a> and <b>it must end with "callback"</b></div>
</div>
<div class="text-lg text-semibold my-3">2. Login to Reddit with the account that will be the bot
</div>
Protip: Login to Reddit in an Incognito session, then open this URL in a new tab.
<div class="text-lg text-semibold my-3">3. <a id="doAuth" href="">Authorize your bot account</a>
</div>
</div>
</div>
</div>
</div>
</div>
<script>
if (document.querySelector('#redirectUri').value === '') {
document.querySelector('#redirectUri').value = `${document.location.href}callback`;
}
document.querySelector('#doAuth').addEventListener('click', e => {
e.preventDefault()
const wikiEdit = document.querySelector('#wikiedit').checked ? 1 : 0;
const url = `${document.location.href}auth?redirect=${document.querySelector('#redirectUri').value}&wikiEdit=${wikiEdit}`;
window.location.href = url;
})
</script>
</body>
</html>

View File

@@ -0,0 +1,29 @@
<html>
<%- include('partials/head', {title: 'RCB'}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<%- include('partials/title', {title: ''}) %>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Sorry!</div>
<div class="space-y-3">
<div>Your account was successfully logged in but you do not have access to this RCB instance because either:</div>
<ul class="list-inside list-disc">
<li>The Bot account used by this instance is not a Moderator of any Subreddits you are also a Moderator of or</li>
<li>the Bot is a Moderator of one of your Subreddits but the Operator of this instance is not currently running the instance on your Subreddits.</li>
</ul>
<div>Note: You must <a href="logout">Logout</a> in order for the instance to detect changes in your subreddits/moderator status</div>
<% if(operatorDisplay !== 'Anonymous') { %>
<div>Operated By: <%= operatorDisplay %></div>
<% } %>
</div>
</div>
</div>
</div>
</div>
</div>
</body>
</html>

View File

@@ -0,0 +1,33 @@
<div class="space-x-4 p-6 md:px-10 md:py-6 leading-6 font-semibold bg-gray-800 text-white">
<div class="container mx-auto">
<div class="flex items-center justify-between">
<div class="flex items-center flex-grow pr-4">
<div class="px-4 width-full relative">
<span>
<a href="https://github.com/FoxxMD/reddit-context-bot">RCB</a> for <a href="https://reddit.com/user/<%= botName %>">/u/<%= botName %></a>
</span>
<span class="inline-block -mb-3 ml-2">
<label style="font-size:2.5px;">
<input class='toggle-checkbox' type='checkbox' id="themeToggle" checked></input>
<div class='toggle-slot'>
<div class='sun-icon-wrapper'>
<div class="iconify sun-icon" data-icon="feather-sun" data-inline="false"></div>
</div>
<div class='toggle-button'></div>
<div class='moon-icon-wrapper'>
<div class="iconify moon-icon" data-icon="feather-moon" data-inline="false"></div>
</div>
</div>
</label>
</span>
<div class="text-small absolute pl-4">
Operated by <%= operatorDisplay %>
</div>
</div>
</div>
<div class="flex items-center flex-end text-sm">
<a href="logout">Logout</a>
</div>
</div>
</div>
</div>

View File

@@ -0,0 +1,86 @@
<head>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/tailwindcss/2.0.3/tailwind.min.css"
integrity="sha512-wl80ucxCRpLkfaCnbM88y4AxnutbGk327762eM9E/rRTvY/ZGAHWMZrYUq66VQBYMIYDFpDdJAOGSLyIPHZ2IQ=="
crossorigin="anonymous"/>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/tailwindcss/2.0.3/tailwind-dark.min.css"
integrity="sha512-WvyKyiVHgInX5UQt67447ExtRRZG/8GUijaq1MpqTNYp8wY4/EJOG5bI80sRp/5crDy4Z6bBUydZI2OFV3Vbtg=="
crossorigin="anonymous"/>
<script src="https://code.iconify.design/1/1.0.4/iconify.min.js"></script>
<link rel="stylesheet" href="public/themeToggle.css">
<style>
a {
text-decoration: underline;
}
.loading {
height: 35px;
fill: black;
display: none;
}
.connected .loading {
display: inline;
}
.dark .loading {
fill: white;
}
.sub {
display: none;
}
.sub.active {
display: inherit;
}
/*https://stackoverflow.com/a/48386400/1469797*/
.stats {
display: grid;
grid-template-columns: max-content auto;
grid-gap: 5px;
}
.stats label {
text-align: right;
}
.stats label:after {
content: ":";
}
.has-tooltip {
/*position: relative;*/
}
.tooltip {
transition-delay: 0.5s;
transition-property: visibility;
visibility: hidden;
position: absolute;
/*right: 0;*/
margin-top:-35px;
}
.has-tooltip:hover .tooltip {
visibility: visible;
transition-delay: 0.2s;
transition-property: visibility;
z-index: 100;
}
.pointer {
cursor: pointer;
}
.botStats.hidden {
display: none;
}
</style>
<title><%= title !== undefined ? title : `RCB for /u/${botName}`%></title>
<!--<title><%# `RCB for /u/${botName}`%></title>-->
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width,initial-scale=1.0">
<!--icons from https://heroicons.com -->
</head>

View File

@@ -0,0 +1,13 @@
<div class="space-x-4 p-6 md:px-10 md:py-6 leading-6 font-semibold bg-gray-800 text-white">
<div class="container mx-auto">
<div class="flex items-center justify-between">
<div class="flex items-center flex-grow pr-4">
<div class="px-4 width-full relative">
<div><a href="https://github.com/FoxxMD/reddit-context-bot">RCB</a> <%= title %></div>
</div>
</div>
<div class="flex items-center flex-end text-sm">
</div>
</div>
</div>
</div>

756
src/Server/views/status.ejs Normal file
View File

@@ -0,0 +1,756 @@
<html>
<%- include('partials/head', {title: undefined}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<%- include('partials/authTitle') %>
<div class="space-x-4 py-1 md:px-10 leading-6 font-semibold bg-gray-500 dark:bg-gray-700 text-white">
<div class="container mx-auto">
<ul id="tabs" class="inline-flex flex-wrap">
<% subreddits.forEach(function (data){ %>
<li class="my-3 px-3 dark:text-white">
<span data-subreddit="<%= data.name %>" class="rounded-md py-2 px-3 tabSelectWrapper">
<a class="tabSelect font-normal pointer hover:font-bold"
data-subreddit="<%= data.name %>">
<%= data.name %>
</a>
<% if ((data.name === 'All' && isOperator) || data.name !== 'All') { %>
<span class="inline-block mb-0.5 ml-0.5 w-2 h-2 bg-<%= data.indicator %>-400 rounded-full"></span>
<% } %>
</span>
</li>
<% }) %>
</ul>
</div>
</div>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="pb-6 pt-3 md:px-7">
<!-- <div class="flex items-center justify-around">-->
<!-- -->
<!-- </div>-->
<% subreddits.forEach(function (data){ %>
<div class="sub" data-subreddit="<%= data.name %>">
<div class="grid grid-cols-1 lg:grid-cols-2 xl:grid-cols-3 2xl:grid-cols-3 gap-5">
<div class="bg-white shadow-md rounded my-3 dark:bg-gray-500 dark:text-white">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 dark:bg-gray-700 dark:text-white">
<div class="flex items-center justify-between">
<h4>Overview</h4>
<% if (data.name === 'All') { %>
<div class="flex items-center flex-end">
<div>
<a class="action" data-action="start" data-type="bot"
data-subreddit="<%= data.name %>"
href="#">Start All</a> |
<a class="action" data-action="stop" data-type="bot"
data-subreddit="<%= data.name %>"
href="#">Stop All</a> |
<a class="action" data-action="reload"
data-subreddit="<%= data.name %>"
href="#">Update All</a>
</div>
</div>
<% } else { %>
<div class="flex items-center flex-end">
<div>
<span class="mr-2">Check:</span>
<a class="action" data-action="check" data-type="unmoderated"
data-subreddit="<%= data.name %>"
href="#">Unmoderated</a> |
<a class="action" data-action="check" data-type="modqueue"
data-subreddit="<%= data.name %>"
href="#">Modqueue</a>
</div>
</div>
<% } %>
</div>
</div>
<div class="p-4">
<div class="stats">
<% if (data.name !== 'All') { %>
<label>
<span class="has-tooltip">
<span class="tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2 space-y-3 p-2 text-left">
<div>The "main" process monitoring Activity for the subreddit.</div>
<div><b>Starting</b> or <b>Stopping</b> this will also Start/Stop <b>Queue/Events.</b></div>
<div>Additionally, the <b>Stopped</b> state will prevent configuration from being checked on heartbeat. This is useful if the subreddit's config is really broken and you want to essentially ignore this subreddit until manually interacted with.</div>
</span>
<span>
Bot
<svg xmlns="http://www.w3.org/2000/svg"
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
</span>
</span>
</label>
<div class="flex items-center justify-between">
<span class="font-semibold"><%= `${data.botState.state}${data.botState.causedBy === 'system' ? '' : ' (user)'}` %></span>
<div class="flex items-center flex-end">
<div>
<a class="action" data-action="start" data-type="bot"
data-subreddit="<%= data.name %>" href="#">Start</a> |
<a class="action" data-action="stop" data-type="bot"
data-subreddit="<%= data.name %>" href="#">Stop</a>
</div>
</div>
</div>
<label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2 space-y-3 p-2 text-left'>
<div>The <b>Queue</b> controls processing of <b>Activities</b> ingested from <b>Events.</b></div>
<ul class="list-inside list-disc">
<li><b>Starting</b> the Queue will being Processing (running checks on) queued Activities based on the max number of workers available</li>
<li><b>Stopping</b> the Queue will prevent queued Activities from being Processed, after any current Activities are finished Processing.</li>
</ul>
<div>If all available workers are processing Activities then new Activities returned from <b>Events</b> will be put marked as <b>Queued</b></div>
</span>
<span>
Queue
<svg xmlns="http://www.w3.org/2000/svg"
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
</span>
</span>
</label>
<div class="flex items-center justify-between">
<span class="font-semibold"><%= `${data.queueState.state}${data.queueState.causedBy === 'system' ? '' : ' (user)'}` %></span>
<div class="flex items-center flex-end">
<div>
<a class="action" data-action="start" data-type="queue"
data-subreddit="<%= data.name %>" href="#">Start</a> |
<a class="action" data-action="pause" data-type="queue"
data-subreddit="<%= data.name %>" href="#">Pause</a> |
<a class="action" data-action="stop" data-type="queue"
data-subreddit="<%= data.name %>" href="#">Stop</a>
</div>
</div>
</div>
<label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2 space-y-3 p-2 text-left'>
<div><b>Events</b> controls polling (monitoring) of <b>Activity Sources</b> (unmoderated, modqueue, comments, etc.)</div>
<ul class="list-inside list-disc">
<li><b>Starting</b> Events will cause polling to begin. Any new Activities discovered after polling begins will be sent to <b>Queue</b></li>
<li><b>Stopping</b> Events will cause polling to stop.</li>
</ul>
</span>
<span>
Events
<svg xmlns="http://www.w3.org/2000/svg"
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
</span>
</span>
</label>
<div class="flex items-center justify-between">
<span class="font-semibold"><%= `${data.eventsState.state}${data.eventsState.causedBy === 'system' ? '' : ' (user)'}` %></span>
<div class="flex items-center flex-end">
<div>
<a class="action" data-action="start" data-type="event"
data-subreddit="<%= data.name %>" href="#">Start</a> |
<a class="action" data-action="pause" data-type="event"
data-subreddit="<%= data.name %>" href="#">Pause</a> |
<a class="action" data-action="stop" data-type="event"
data-subreddit="<%= data.name %>" href="#">Stop</a>
</div>
</div>
</div>
<label>Activities</label>
<span class="has-tooltip">
<span style="margin-top:-55px"
class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black'>
<div>Max Concurrent Processing</div>
<div>Config: <%= data.maxWorkers %></div>
</span>
<span><%= `${data.runningActivities} Processing / ${data.queuedActivities} Queued` %></span>
</span>
<label>Slow Mode</label>
<span><%= data.delayBy %></span>
<% } %>
<% if (data.name === 'All') { %>
<label>Uptime</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<%= system.startedAt %>
</span>
<span>
<span id="startedAtHuman"><%= system.startedAtHuman %></span>
</span>
</span>
<label>Heartbeat Interval</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<%= data.heartbeat %> seconds
</span>
<%= data.heartbeatHuman %>
</span>
<label>Next Heartbeat</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span id="nextHeartbeat"><%= data.nextHeartbeat %></span>
</span>
<span id="nextHeartbeatHuman"><%= data.nextHeartbeatHuman %></span>
</span>
<% } %>
</div>
<% if (data.name !== 'All') { %>
<ul class="list-disc list-inside mt-4">
<% data.pollingInfo.forEach(function (i){ %>
<li>Polling <%- i %></li>
<% }) %>
</ul>
<% } %>
</div>
</div>
<% if (data.name === 'All') { %>
<div class="bg-white shadow-md rounded my-3 dark:bg-gray-500 dark:text-white">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 dark:bg-gray-700 dark:text-white">
<h4>API</h4>
</div>
<div class="p-4">
<div class="stats">
<label>Soft Limit</label>
<span>< <span id="softLimit"><%= data.softLimit %></span></span>
<label>Hard Limit</label>
<span>< <span id="softLimit"><%= data.hardLimit %></span></span>
<label>Api Nanny</label>
<span><b><span id="nannyMode"><%= data.nannyMode %></span></b></span>
<label>Api Usage</label>
<span><span id="apiLimit"><%= data.apiLimit %></span>/600 (~<span
id="apiAvg"><%= data.apiAvg %></span>req/s)</span>
<label>Depleted</label>
<span>in ~<span id="apiDepletion"><%= data.apiDepletion %></span></span>
<label>Limit Reset</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span id="limitReset"><%= data.limitReset %></span>
</span>
<span id="limitResetHuman"><%= data.limitResetHuman %></span>
</span>
</div>
</div>
</div>
<% } %>
<% if (data.name !== 'All') { %>
<div class="bg-white shadow-md rounded my-3 dark:bg-gray-500 dark:text-white">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 dark:bg-gray-700 dark:text-white">
<h4>Config
<span>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<%= data.wikiRevision %>
</span>
(Revised <%= data.wikiRevisionHuman %>)
</span>
</h4>
</div>
<div class="p-4">
<div class="stats">
<label>Valid</label>
<span class="font-semibold"><%= data.validConfig %></span>
<label>Checks</label>
<span><%= data.checks.submissions %> Submission | <%= data.checks.comments %> Comment </span>
<label>Dry Run</label>
<span><%= data.dryRun %></span>
<label>Updated</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<%= data.startedAt %>
</span>
<%= data.startedAtHuman %>
</span>
</span>
<label>Checked</label>
<span>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<%= data.wikiLastCheck %>
</span>
<%= data.wikiLastCheckHuman %>
</span>
<a class="action" data-action="reload"
data-subreddit="<%= data.name %>"
href="#">Update</a> | <a
class="action" data-action="reload" data-force="true"
data-subreddit="<%= data.name %>" href="#">Force</a>
</span>
<label>Location</label>
<span>
<a style="display: inline"
href="<%= data.wikiHref %>"><%= data.wikiLocation %></a>
</span>
</div>
</div>
</div>
<% } %>
<div class="bg-white shadow-md rounded my-3 dark:bg-gray-500 dark:text-white">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 dark:bg-gray-700 dark:text-white">
<h4>Usage</h4>
</div>
<div class="p-4">
<div class="flex items-top justify-evenly">
<div>
<div class="text-left font-semibold pb-2">Bot</div>
<% if (data.name !== 'All') { %><span class="text-center pb-2"><a
data-subreddit="<%= data.name %>"
data-stattype="reloadStats" class="statsToggle" href="">Last Reload</a> | <a
data-subreddit="<%= data.name %>" data-stattype="allStats"
class="statsToggle allStatsToggle"
href="">All Time</a> </span>
<% } %>
<% if (data.name !== 'All') { %>
<div data-subreddit="<%= data.name %>"
class="stats botStats reloadStats">
<label>Events</label>
<span><%= data.stats.eventsCheckedSinceStartTotal === undefined ? '-' : data.stats.eventsCheckedSinceStartTotal %></span>
<label>Checks</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.checksTriggeredSinceStartTotal %></span> Triggered / <span><%= data.stats.checksRunSinceStartTotal %></span> Run
</span>
<span><%= data.stats.checksTriggeredSinceStartTotal %></span> T / <span><%= data.stats.checksRunSinceStartTotal %></span> R
</span>
<label>Rules</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.rulesTriggeredSinceStartTotal %></span> Triggered / <span><%= data.stats.rulesCachedSinceStartTotal %></span> Cached / <span><%= data.stats.rulesRunSinceStartTotal %></span> Run
</span>
<span class="cursor-help">
<span><%= data.stats.rulesTriggeredSinceStartTotal %></span> T / <span><%= data.stats.rulesCachedSinceStartTotal %></span> C / <span><%= data.stats.rulesRunSinceStartTotal %></span> R</span>
</span>
<label>Actions</label>
<span class="cursor-help"><%= data.stats.actionsRunSinceStartTotal === undefined ? '-' : data.stats.actionsRunSinceStartTotal %></span>
</div>
<% } %>
<div data-subreddit="<%= data.name %>" class="stats botStats allStats">
<label>Events</label>
<span><%= data.stats.eventsCheckedTotal %></span>
<label>Checks</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.checksTriggeredTotal %></span> Triggered / <span><%= data.stats.checksRunTotal %></span> Run
</span>
<span class="cursor-help"><span><%= data.stats.checksTriggeredTotal %></span> T / <span><%= data.stats.checksRunTotal %></span> R</span>
</span>
<label>Rules</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.rulesTriggeredTotal %></span> Triggered / <span><%= data.stats.rulesCachedTotal %></span> Cached / <span><%= data.stats.rulesRunTotal %></span> Run
</span>
<span class="cursor-help"><span><%= data.stats.rulesTriggeredTotal %></span> T / <span><%= data.stats.rulesCachedTotal %></span> C / <span><%= data.stats.rulesRunTotal %></span> R</span>
</span>
<label>Actions</label>
<span><%= data.stats.actionsRunTotal %> Run</span>
</div>
</div>
<div>
<div class="text-left pb-2">
<% if (data.name === 'All') { %>
<span class="font-semibold">Cache</span>
<% } else { %>
<div>
<span class="font-semibold capitalize">Cache - <%= data.stats.cache.provider %><%= data.stats.cache.isShared ? ' (Shared)' : '' %></span>
<span class="has-tooltip">
<span style="right: 0;"
class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2 space-y-3 p-2'>
<% if (data.stats.cache.isShared) { %>
<div>This subreddit is using the default, <b>application-wide shared cache</b> because its <b
class="font-mono">caching</b> configuration is not specified.</div>
<div>Pros:
<ul class="list-inside list-disc">
<li>All subreddits can utilize any cached authors/etc., reduces overall api usage</li>
<li>Bot Operator can fine tune cache without subreddit interaction</li>
</ul>
</div>
<div>
Cons:
<ul class="list-inside list-disc">
<li>Subreddits must use default TTLs which may not fit use case for rules</li>
<li>Bots operating subreddits with dramatically contrasting caching requirements may suffer in performance/api usage</li>
</ul>
</div>
<% } else { %>
<div>This subreddit is using the default, <b>application-wide shared cache</b> because its <b
class="font-mono">caching</b> configuration is not specified.</div>
<div>Pros:
<ul class="list-inside list-disc">
<li>All subreddits can utilize any cached authors/etc., reduces overall api usage</li>
<li>Bot Operator can fine tune cache without subreddit interaction</li>
</ul>
</div>
<div>
Cons:
<ul class="list-inside list-disc">
<li>Subreddits must use default TTLs which may not fit use case for rules</li>
<li>Bots operating subreddits with dramatically contrasting caching requirements may suffer in performance/api usage</li>
</ul>
</div>
<% } %>
</span>
<span>
<svg xmlns="http://www.w3.org/2000/svg"
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
</span>
</span>
</div>
<% } %>
</div>
<div class="stats">
<label>Keys</label>
<span><%= data.stats.cache.currentKeyCount %></span>
<label>Calls</label>
<span><span><%= data.stats.cache.totalRequests %></span> <span>(<%= data.stats.cache.requestRate %>/10min)</span></span>
</div>
<div class="text-left py-2">
<span class="font-semibold">Calls Breakdown</span>
<span class="has-tooltip">
<span style="right: 0;"
class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2 space-y-3 p-2'>
<div>Number of calls to cache made for each type of cached item.</div>
<div>The <b>miss %</b> is the percentage of cache calls that were empty and data had to be fully acquired/processed.</div>
<div>
<ul class="list-inside list-disc">
<li><b>Author</b> - Cached history for an Activity's Author acquired from <span
class="font-mono">window</span> criteria. A missed cached call means at least one API call must be made.</li>
<li><b>Criteria</b> - Cached results of an <span
class="font-mono">authorIs</span> test. A missed cached call may require at least one API call.</li>
<li><b>Content</b> - Cached content for footer/comment/ban content. A missed cached call requires one API call.</li>
<li><b>UserNote</b> - Cached UserNotes. A missed cached call requires one API call.</li>
</ul>
</div>
<div>
Some tips/tricks for cache:
</div>
<ul class="list-inside list-disc">
<li>Only need to pay attention to caching if a subreddit uses the API/cache heavily IE high-volume comment checks or very large check sets for submissions</li>
<li>Increasing TTL will reduce cache misses and api usage at the expensive of a larger cache and stale results</li>
<li>Re-using <span
class="font-mono">window</span> and <span
class="font-mono">authorIs</span> values in configuration will enable the bot to re-use these results and thus reduce cache misses/api usage</li>
</ul>
</span>
<span>
<svg xmlns="http://www.w3.org/2000/svg"
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
</span>
</span>
</div>
<div class="stats">
<label>Author</label>
<span><%= data.stats.cache.types.author.requests %> (<%= data.stats.cache.types.author.missPercent %> miss)</span>
<label>Criteria</label>
<span><%= data.stats.cache.types.authorCrit.requests %> (<%= data.stats.cache.types.authorCrit.missPercent %> miss)</span>
<label>Content</label>
<span><%= data.stats.cache.types.content.requests %> (<%= data.stats.cache.types.content.missPercent %> miss)</span>
<label>UserNote</label>
<span><%= data.stats.cache.types.userNotes.requests %> (<%= data.stats.cache.types.userNotes.missPercent %> miss)</span>
</div>
</div>
</div>
</div>
</div>
</div>
<br/>
<div class="flex items-center justify-between flex-wrap">
<div class="inline-flex items-center">
<input data-subreddit="<%= data.name %>" style="min-width: 420px;"
class="border-gray-50 placeholder-gray-500 rounded mt-2 mb-3 p-2 text-black checkUrl"
placeholder="<%= data.name === 'All' ? 'Run Bot on a permalink from any moderated Subreddit' : `Run Bot on a permalink using this Subreddit's config` %>"/>
<span class="mx-2">
<input type="checkbox" class="dryrunCheck" data-subreddit="<%= data.name %>"
name="dryrunCheck">
<label for="dryrunCheck">Dry Run?</label>
</span>
<a class="runCheck" data-subreddit="<%= data.name %>" href="">Run</a>
</div>
<div class="flex items-center flex-end space-x-2">
<span>
<label for="level-select">Level: </label>
<select class="logSettingSelect rounded capitalize text-black" data-type="level"
id="levels-select">
<%- logSettings.levelSelect %>
</select>
</span>
<span>
<label for="sort-select">Sort: </label>
<select class="logSettingSelect rounded capitalize text-black" data-type="sort"
id="sort-select">
<%- logSettings.sortSelect %>
</select>
</span>
<span>
<label for="limit-select">Limit: </label>
<select class="logSettingSelect rounded capitalize text-black" data-type="limit"
id="limit-select">
<%- logSettings.limitSelect %>
</select>
</span>
</div>
</div>
<svg class="loading" version="1.1" id="L9" xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 100 100" xml:space="preserve">
<path
d="M73,50c0-12.7-10.3-23-23-23S27,37.3,27,50 M30.9,50c0-10.5,8.5-19.1,19.1-19.1S69.1,39.5,69.1,50">
<animateTransform
attributeName="transform"
attributeType="XML"
type="rotate"
dur="1s"
from="0 50 50"
to="360 50 50"
repeatCount="indefinite"/>
</path>
</svg>
<div data-subreddit="<%= data.name %>" class="logs font-mono text-sm">
<% data.logs.forEach(function (logEntry){ %>
<%- logEntry %>
<% }) %>
</div>
</div>
<% }) %>
</div>
<!--<div class="w-full flex-auto flex min-h-0 overflow-auto">
<div class="w-full relative flex-auto">
</div>
</div>-->
</div>
</div>
</div>
</div>
<script>
/* const appendUser = causedBy => causedBy === 'system' ? '' : ' (user)';
const initialData =
const updateOverview = (sub, data) => {
}*/
window.sort = 'desc';
document.querySelectorAll('.theme').forEach(el => {
el.addEventListener('click', e => {
e.preventDefault();
if (e.target.id === 'dark') {
document.body.classList.add('dark');
localStorage.setItem('ms-dark', 'yes');
} else {
document.body.classList.remove('dark');
localStorage.setItem('ms-dark', 'no');
}
document.querySelectorAll('.theme').forEach(el => {
el.classList.remove('font-bold', 'no-underline', 'pointer-events-none');
});
e.target.classList.add('font-bold', 'no-underline', 'pointer-events-none');
})
})
document.querySelector("#themeToggle").checked = localStorage.getItem('ms-dark') !== 'no';
document.querySelector("#themeToggle").onchange = (e) => {
if (e.target.checked === true) {
document.body.classList.add('dark');
localStorage.setItem('ms-dark', 'yes');
} else {
document.body.classList.remove('dark');
localStorage.setItem('ms-dark', 'no');
}
}
// if (localStorage.getItem('ms-dark') === 'no') {
// document.querySelector('#light.theme').classList.add('font-bold', 'no-underline', 'pointer-events-none');
// document.body.classList.remove('dark')
// } else {
// document.querySelector('#dark.theme').classList.add('font-bold', 'no-underline', 'pointer-events-none');
// }
document.querySelectorAll('.logSettingSelect').forEach(el => {
el.onchange = (e) => {
action = e.target.dataset.type;
value = e.target.value;
fetch(`logs/settings/update?${action}=${value}`);
document.querySelectorAll(`#${e.target.id}.logSettingSelect option`).forEach(el => {
el.classList.remove('font-bold');
});
document.querySelector(`#${e.target.id}.logSettingSelect option[data-value="${e.target.value}"]`).classList.add('font-bold');
}
});
document.querySelectorAll('.action').forEach(el => {
el.addEventListener('click', e => {
e.preventDefault()
action = e.target.dataset.action;
subreddit = e.target.dataset.subreddit;
type = e.target.dataset.type;
force = e.target.dataset.force === 'true';
fetch(`action?action=${action}&subreddit=${subreddit}&force=${force}&type=${type}`);
});
})
document.querySelectorAll(".runCheck").forEach(el => {
el.addEventListener('click', e => {
e.preventDefault();
const subreddit = e.target.dataset.subreddit;
const urlInput = document.querySelector(`[data-subreddit="${subreddit}"].checkUrl`);
const dryRunCheck = document.querySelector(`[data-subreddit="${subreddit}"].dryrunCheck`);
const url = urlInput.value;
const dryRun = dryRunCheck.checked ? 1 : 0;
const fetchUrl = `check?url=${url}&dryRun=${dryRun}&subreddit=${subreddit}`;
fetch(fetchUrl);
urlInput.value = '';
dryRunCheck.checked = false;
});
});
document.querySelectorAll('.statsToggle').forEach(el => {
el.addEventListener('click', e => {
const subreddit = e.target.dataset.subreddit;
const statsClass = e.target.dataset.stattype;
e.preventDefault();
document.querySelectorAll(`[data-subreddit="${subreddit}"].statsToggle`).forEach(el => {
el.classList.remove('font-bold', 'no-underline', 'pointer-events-none');
});
e.target.classList.add('font-bold', 'no-underline', 'pointer-events-none');
document.querySelectorAll(`[data-subreddit="${subreddit}"].botStats`).forEach(el => {
el.classList.add('hidden');
});
document.querySelector(`[data-subreddit="${subreddit}"].botStats.${statsClass}`).classList.remove('hidden');
});
})
document.querySelectorAll('[data-subreddit].tabSelect').forEach(el => {
el.addEventListener('click', e => {
e.preventDefault();
document.querySelectorAll('[data-subreddit].tabSelect').forEach(el => {
el.classList.remove('font-bold', 'no-underline', 'pointer-events-none');
});
document.querySelectorAll('[data-subreddit].sub').forEach(el => {
el.classList.remove('active');
});
e.target.classList.add('font-bold', 'no-underline', 'pointer-events-none');
document.querySelector(`[data-subreddit="${e.target.dataset.subreddit}"].sub`).classList.add('active');
document.querySelectorAll('.tabSelectWrapper').forEach(el => {
el.classList.remove('border-2');
el.classList.add('border');
})
const wrapper = document.querySelector(`[data-subreddit="${e.target.dataset.subreddit}"].tabSelectWrapper`);
wrapper.classList.remove('border');
wrapper.classList.add('border-2');
if ('URLSearchParams' in window) {
var searchParams = new URLSearchParams(window.location.search)
searchParams.set("sub", e.target.dataset.subreddit);
var newRelativePathQuery = window.location.pathname + '?' + searchParams.toString();
history.pushState(null, '', newRelativePathQuery);
}
});
})
document.querySelector('[data-subreddit="<%= show %>"].tabSelect').classList.add('font-bold', 'no-underline', 'pointer-events-none');
document.querySelectorAll('.tabSelectWrapper').forEach(el => el.classList.add('border'));
document.querySelector('[data-subreddit="<%= show %>"].sub').classList.add('active')
const wrapper = document.querySelector(`[data-subreddit="<%= show %>"].tabSelectWrapper`);
wrapper.classList.remove('border');
wrapper.classList.add('border-2');
document.querySelectorAll('.stats.reloadStats').forEach(el => el.classList.add('hidden'));
document.querySelectorAll('.allStatsToggle').forEach(el => el.classList.add('font-bold', 'no-underline', 'pointer-events-none'));
</script>
<script src="https://cdn.socket.io/3.1.3/socket.io.min.js"
integrity="sha384-cPwlPLvBTa3sKAgddT6krw0cJat7egBga3DJepJyrLl4Q9/5WLra3rrnMcyTyOnh"
crossorigin="anonymous"></script>
<script>
const SUBREDDIT_NAME_LOG_REGEX = /{(.+?)}/;
const parseSubredditLogName = (val) => {
const matches = val.match(SUBREDDIT_NAME_LOG_REGEX);
if (matches === null) {
return undefined;
}
return matches[1];
}
let socket = io({
reconnectionAttempts: 5, // bail after 5 attempts
});
socket.on("connect", () => {
document.body.classList.add('connected')
socket.on("log", data => {
const selectors = ['[data-subreddit="All"].logs'];
const sub = parseSubredditLogName(data);
if (sub !== undefined) {
selectors.push(`[data-subreddit="${sub}"].logs`);
}
selectors.forEach(sel => {
const el = document.querySelector(sel);
if (el !== null) {
const currLogs = el.innerHTML;
document.querySelector(sel).innerHTML = window.sort === 'desc' ? data.concat(currLogs) : currLogs.concat(data)
}
});
});
socket.on("logClear", data => {
data.forEach((obj) => {
const n = obj.name === 'all' ? 'All' : obj.name;
document.querySelector(`[data-subreddit="${n}"].logs`).innerHTML = obj.logs;
})
//document.querySelector('.logs').innerHTML = data.join().replaceAll(/<br\s*\/?>\,/g,'<br />');
});
socket.on('opStats', (data) => {
for (const [k, v] of Object.entries(data)) {
document.querySelector(`#${k}`).innerHTML = v;
}
});
});
socket.on('disconnect', () => {
document.body.classList.remove('connected');
});
</script>
</body>
</html>

View File

@@ -1,136 +1,882 @@
import Snoowrap, {Comment, Submission, Subreddit} from "snoowrap";
import Snoowrap, {Comment, Subreddit} from "snoowrap";
import {Logger} from "winston";
import {SubmissionCheck} from "../Check/SubmissionCheck";
import {CommentCheck} from "../Check/CommentCheck";
import {createLabelledLogger, determineNewResults, loggerMetaShuffle, mergeArr, sleep} from "../util";
import {CommentStream, SubmissionStream} from "snoostorm";
import {
cacheStats,
createRetryHandler,
determineNewResults, formatNumber,
mergeArr, parseFromJsonOrYamlToObject, pollingInfo, sleep, totalFromMapStats,
} from "../util";
import {Poll} from "snoostorm";
import pEvent from "p-event";
import {RuleResult} from "../Rule";
import {ConfigBuilder} from "../ConfigBuilder";
import {PollingOptions} from "../Common/interfaces";
import {ConfigBuilder, buildPollingOptions} from "../ConfigBuilder";
import {
DEFAULT_POLLING_INTERVAL,
DEFAULT_POLLING_LIMIT, Invokee,
ManagerOptions, ManagerStateChangeOption, PAUSED,
PollingOptionsStrong, RUNNING, RunState, STOPPED, SYSTEM, USER
} from "../Common/interfaces";
import Submission from "snoowrap/dist/objects/Submission";
import {activityIsRemoved, itemContentPeek} from "../Utils/SnoowrapUtils";
import LoggedError from "../Utils/LoggedError";
import ResourceManager, {
SubredditResourceConfig,
SubredditResources,
SubredditResourceSetOptions
} from "./SubredditResources";
import {SPoll, UnmoderatedStream, ModQueueStream, SubmissionStream, CommentStream} from "./Streams";
import EventEmitter from "events";
import ConfigParseError from "../Utils/ConfigParseError";
import dayjs, {Dayjs as DayjsObj} from "dayjs";
import Action from "../Action";
import {queue, QueueObject} from 'async';
import {JSONConfig} from "../JsonConfig";
import {CheckStructuredJson} from "../Check";
import NotificationManager from "../Notification/NotificationManager";
export interface ManagerOptions {
polling?: PollingOptions
export interface RunningState {
state: RunState,
causedBy: Invokee
}
export interface runCheckOptions {
checkNames?: string[],
delayUntil?: number,
dryRun?: boolean,
}
export interface CheckTask {
checkType: ('Comment' | 'Submission'),
activity: (Submission | Comment),
options?: runCheckOptions
}
export interface RuntimeManagerOptions extends ManagerOptions {
sharedModqueue?: boolean;
}
export class Manager {
subreddit: Subreddit;
client: Snoowrap;
logger: Logger;
pollOptions: PollingOptions;
submissionChecks: SubmissionCheck[];
commentChecks: CommentCheck[];
pollOptions: PollingOptionsStrong[] = [];
submissionChecks!: SubmissionCheck[];
commentChecks!: CommentCheck[];
resources!: SubredditResources;
wikiLocation: string = 'botconfig/contextbot';
lastWikiRevision?: DayjsObj
lastWikiCheck: DayjsObj = dayjs();
//wikiUpdateRunning: boolean = false;
subListedOnce = false;
streamSub?: SubmissionStream;
commentsListedOnce = false;
streamComments?: CommentStream;
streamListedOnce: string[] = [];
streams: SPoll<Snoowrap.Submission | Snoowrap.Comment>[] = [];
modStreamCallbacks: Map<string, any> = new Map();
dryRun?: boolean;
sharedModqueue: boolean;
globalDryRun?: boolean;
emitter: EventEmitter = new EventEmitter();
queue: QueueObject<CheckTask>;
constructor(sub: Subreddit, client: Snoowrap, logger: Logger, sourceData: object, opts: ManagerOptions = {}) {
this.logger = logger.child(loggerMetaShuffle(logger, undefined, [`r/${sub.display_name}`], {truncateLength: 40}), mergeArr);
displayLabel: string;
currentLabels: string[] = [];
const configBuilder = new ConfigBuilder({logger: this.logger});
const [subChecks, commentChecks] = configBuilder.buildFromJson(sourceData);
this.pollOptions = opts.polling || {};
startedAt?: DayjsObj;
validConfigLoaded: boolean = false;
running: boolean = false;
manuallyStopped: boolean = false;
eventsState: RunningState = {
state: STOPPED,
causedBy: SYSTEM
};
queueState: RunningState = {
state: STOPPED,
causedBy: SYSTEM
};
botState: RunningState = {
state: STOPPED,
causedBy: SYSTEM
}
notificationManager: NotificationManager;
// use by api nanny to slow event consumption
delayBy?: number;
eventsCheckedTotal: number = 0;
eventsCheckedSinceStartTotal: number = 0;
eventsSample: number[] = [];
eventsSampleInterval: any;
eventsRollingAvg: number = 0;
checksRunTotal: number = 0;
checksRunSinceStartTotal: number = 0;
checksTriggered: Map<string, number> = new Map();
checksTriggeredSinceStart: Map<string, number> = new Map();
rulesRunTotal: number = 0;
rulesRunSinceStartTotal: number = 0;
rulesCachedTotal: number = 0;
rulesCachedSinceStartTotal: number = 0;
rulesTriggeredTotal: number = 0;
rulesTriggeredSinceStartTotal: number = 0;
rulesUniqueSample: number[] = [];
rulesUniqueSampleInterval: any;
rulesUniqueRollingAvg: number = 0;
actionsRun: Map<string, number> = new Map();
actionsRunSinceStart: Map<string, number> = new Map();
getStats = async () => {
const data: any = {
eventsCheckedTotal: this.eventsCheckedTotal,
eventsCheckedSinceStartTotal: this.eventsCheckedSinceStartTotal,
eventsAvg: formatNumber(this.eventsRollingAvg),
checksRunTotal: this.checksRunTotal,
checksRunSinceStartTotal: this.checksRunSinceStartTotal,
checksTriggered: this.checksTriggered,
checksTriggeredTotal: totalFromMapStats(this.checksTriggered),
checksTriggeredSinceStart: this.checksTriggeredSinceStart,
checksTriggeredSinceStartTotal: totalFromMapStats(this.checksTriggeredSinceStart),
rulesRunTotal: this.rulesRunTotal,
rulesRunSinceStartTotal: this.rulesRunSinceStartTotal,
rulesCachedTotal: this.rulesCachedTotal,
rulesCachedSinceStartTotal: this.rulesCachedSinceStartTotal,
rulesTriggeredTotal: this.rulesTriggeredTotal,
rulesTriggeredSinceStartTotal: this.rulesTriggeredSinceStartTotal,
rulesAvg: formatNumber(this.rulesUniqueRollingAvg),
actionsRun: this.actionsRun,
actionsRunTotal: totalFromMapStats(this.actionsRun),
actionsRunSinceStart: this.actionsRunSinceStart,
actionsRunSinceStartTotal: totalFromMapStats(this.actionsRunSinceStart),
cache: {
provider: 'none',
currentKeyCount: 0,
isShared: false,
totalRequests: 0,
requestRate: 0,
types: cacheStats()
},
};
if (this.resources !== undefined) {
const resStats = this.resources.getStats();
data.cache = resStats.cache;
data.cache.currentKeyCount = await this.resources.getCacheKeyCount();
data.cache.isShared = this.resources.cacheSettingsHash === 'default';
data.cache.provider = this.resources.cacheType;
}
return data;
}
getCurrentLabels = () => {
return this.currentLabels;
}
getDisplay = () => {
return this.displayLabel;
}
constructor(sub: Subreddit, client: Snoowrap, logger: Logger, opts: RuntimeManagerOptions = {}) {
const {dryRun, sharedModqueue = false} = opts;
this.displayLabel = opts.nickname || `${sub.display_name_prefixed}`;
const getLabels = this.getCurrentLabels;
const getDisplay = this.getDisplay;
// dynamic default meta for winston feasible using function getters
// https://github.com/winstonjs/winston/issues/1626#issuecomment-531142958
this.logger = logger.child({
get labels() {
return getLabels()
},
get subreddit() {
return getDisplay()
}
}, mergeArr);
this.globalDryRun = dryRun;
this.sharedModqueue = sharedModqueue;
this.subreddit = sub;
this.client = client;
this.submissionChecks = subChecks;
this.commentChecks = commentChecks;
const checkSummary = `Found Checks -- Submission: ${this.submissionChecks.length} | Comment: ${this.commentChecks.length}`;
if (subChecks.length === 0 && commentChecks.length === 0) {
this.logger.warn(checkSummary);
} else {
this.logger.info(checkSummary);
this.notificationManager = new NotificationManager(this.logger, this.subreddit, this.displayLabel);
this.queue = queue(async (task: CheckTask, cb) => {
if(this.delayBy !== undefined) {
this.logger.debug(`SOFT API LIMIT MODE: Delaying Event run by ${this.delayBy} seconds`);
await sleep(this.delayBy * 1000);
}
await this.runChecks(task.checkType, task.activity, task.options);
}
// TODO allow concurrency??
, 1);
this.queue.error((err, task) => {
this.logger.error('Encountered unhandled error while processing Activity, processing stopped early');
this.logger.error(err);
});
this.queue.drain(() => {
this.logger.debug('All queued activities have been processed.');
});
this.queue.pause();
this.eventsSampleInterval = setInterval((function(self) {
return function() {
const rollingSample = self.eventsSample.slice(0, 7)
rollingSample.unshift(self.eventsCheckedTotal)
self.eventsSample = rollingSample;
const diff = self.eventsSample.reduceRight((acc: number[], curr, index) => {
if(self.eventsSample[index + 1] !== undefined) {
const d = curr - self.eventsSample[index + 1];
if(d === 0) {
return [...acc, 0];
}
return [...acc, d/10];
}
return acc;
}, []);
self.eventsRollingAvg = diff.reduce((acc, curr) => acc + curr,0) / diff.length;
//self.logger.debug(`Event Rolling Avg: ${formatNumber(self.eventsRollingAvg)}/s`);
}
})(this), 10000);
this.rulesUniqueSampleInterval = setInterval((function(self) {
return function() {
const rollingSample = self.rulesUniqueSample.slice(0, 7)
rollingSample.unshift(self.rulesRunTotal - self.rulesCachedTotal);
self.rulesUniqueSample = rollingSample;
const diff = self.rulesUniqueSample.reduceRight((acc: number[], curr, index) => {
if(self.rulesUniqueSample[index + 1] !== undefined) {
const d = curr - self.rulesUniqueSample[index + 1];
if(d === 0) {
return [...acc, 0];
}
return [...acc, d/10];
}
return acc;
}, []);
self.rulesUniqueRollingAvg = diff.reduce((acc, curr) => acc + curr,0) / diff.length;
//self.logger.debug(`Unique Rules Run Rolling Avg: ${formatNumber(self.rulesUniqueRollingAvg)}/s`);
}
})(this), 10000);
}
protected parseConfigurationFromObject(configObj: object) {
try {
const configBuilder = new ConfigBuilder({logger: this.logger});
const validJson = configBuilder.validateJson(configObj);
const {checks, ...configManagerOpts} = validJson;
const {
polling = [{pollOn: 'unmoderated', limit: DEFAULT_POLLING_LIMIT, interval: DEFAULT_POLLING_INTERVAL}],
caching,
dryRun,
footer,
nickname,
notifications,
} = configManagerOpts || {};
this.pollOptions = buildPollingOptions(polling);
this.dryRun = this.globalDryRun || dryRun;
this.displayLabel = nickname || `${this.subreddit.display_name_prefixed}`;
if (footer !== undefined) {
this.resources.footer = footer;
}
this.logger.info(`Dry Run: ${this.dryRun === true}`);
for (const p of this.pollOptions) {
this.logger.info(`Polling Info => ${pollingInfo(p)}`)
}
this.notificationManager = new NotificationManager(this.logger, this.subreddit, this.displayLabel, notifications);
const {events, notifiers} = this.notificationManager.getStats();
const notifierContent = notifiers.length === 0 ? 'None' : notifiers.join(', ');
const eventContent = events.length === 0 ? 'None' : events.join(', ');
this.logger.info(`Notification Info => Providers: ${notifierContent} | Events: ${eventContent}`);
let resourceConfig: SubredditResourceConfig = {
footer,
logger: this.logger,
subreddit: this.subreddit,
caching
};
this.resources = ResourceManager.set(this.subreddit.display_name, resourceConfig);
this.logger.info('Subreddit-specific options updated');
this.logger.info('Building Checks...');
const commentChecks: Array<CommentCheck> = [];
const subChecks: Array<SubmissionCheck> = [];
const structuredChecks = configBuilder.parseToStructured(validJson);
for (const jCheck of structuredChecks) {
const checkConfig = {
...jCheck,
dryRun: this.dryRun || jCheck.dryRun,
logger: this.logger,
subredditName: this.subreddit.display_name
};
if (jCheck.kind === 'comment') {
commentChecks.push(new CommentCheck(checkConfig));
} else if (jCheck.kind === 'submission') {
subChecks.push(new SubmissionCheck(checkConfig));
}
}
this.submissionChecks = subChecks;
this.commentChecks = commentChecks;
const checkSummary = `Found Checks -- Submission: ${this.submissionChecks.length} | Comment: ${this.commentChecks.length}`;
if (subChecks.length === 0 && commentChecks.length === 0) {
this.logger.warn(checkSummary);
} else {
this.logger.info(checkSummary);
}
this.validConfigLoaded = true;
} catch (err) {
this.validConfigLoaded = false;
throw err;
}
}
async runChecks(checkType: ('Comment' | 'Submission'), item: (Submission | Comment)): Promise<void> {
const checks = checkType === 'Comment' ? this.commentChecks : this.submissionChecks;
const itemId = await item.id;
let allRuleResults: RuleResult[] = [];
async parseConfiguration(causedBy: Invokee = 'system', force: boolean = false, options?: ManagerStateChangeOption) {
const {reason, suppressNotification = false} = options || {};
//this.wikiUpdateRunning = true;
this.lastWikiCheck = dayjs();
for (const check of checks) {
this.logger.debug(`Running Check ${check.name} on ${checkType} (ID ${itemId})`);
let triggered = false;
try {
let sourceData: string;
try {
const [checkTriggered, checkResults] = await check.run(item, allRuleResults);
allRuleResults = allRuleResults.concat(determineNewResults(allRuleResults, checkResults));
triggered = checkTriggered;
const invokedRules = checkResults.map(x => x.name || x.premise.kind).join(' | ');
if (checkTriggered) {
this.logger.debug(`Check ${check.name} was triggered with invoked Rules: ${invokedRules}`);
} else {
this.logger.debug(`Check ${check.name} was not triggered using invoked Rule(s): ${invokedRules}`);
// @ts-ignore
const wiki = await this.subreddit.getWikiPage(this.wikiLocation).fetch();
const revisionDate = dayjs.unix(wiki.revision_date);
if (!force && this.validConfigLoaded && (this.lastWikiRevision !== undefined && this.lastWikiRevision.isSame(revisionDate))) {
// nothing to do, we already have this revision
//this.wikiUpdateRunning = false;
if (force) {
this.logger.info('Config is up to date');
}
return false;
}
} catch (e) {
this.logger.warn(`Check ${check.name} on Submission (ID ${itemId}) failed with error: ${e.message}`, e);
if (force) {
this.logger.info('Config update was forced');
} else if (!this.validConfigLoaded) {
this.logger.info('Trying to load (new?) config now since there is no valid config loaded');
} else if (this.lastWikiRevision !== undefined) {
this.logger.info(`Updating config due to stale wiki page (${dayjs.duration(dayjs().diff(revisionDate)).humanize()} old)`)
}
if(this.queueState.state === RUNNING) {
this.logger.verbose('Waiting for activity processing queue to pause before continuing config update');
await this.pauseQueue(causedBy);
}
this.lastWikiRevision = revisionDate;
sourceData = await wiki.content_md;
} catch (err) {
const msg = `Could not read wiki configuration. Please ensure the page https://reddit.com${this.subreddit.url}wiki/${this.wikiLocation} exists and is readable -- error: ${err.message}`;
this.logger.error(msg);
throw new ConfigParseError(msg);
}
if (triggered) {
// TODO give actions a name
await check.runActions(item, this.client);
this.logger.debug(`Ran actions for Check ${check.name}`);
break;
if (sourceData === '') {
this.logger.error(`Wiki page contents was empty`);
throw new ConfigParseError('Wiki page contents was empty');
}
const [configObj, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(sourceData);
if (configObj === undefined) {
this.logger.error(`Could not parse wiki page contents as JSON or YAML:`);
this.logger.error(jsonErr);
this.logger.error(yamlErr);
throw new ConfigParseError('Could not parse wiki page contents as JSON or YAML')
}
this.parseConfigurationFromObject(configObj);
this.logger.info('Checks updated');
if(!suppressNotification) {
this.notificationManager.handle('configUpdated', 'Configuration Updated', reason, causedBy)
}
return true;
} catch (err) {
this.validConfigLoaded = false;
throw err;
}
}
async runChecks(checkType: ('Comment' | 'Submission'), activity: (Submission | Comment), options?: runCheckOptions): Promise<void> {
const checks = checkType === 'Comment' ? this.commentChecks : this.submissionChecks;
let item = activity;
this.eventsCheckedTotal++;
this.eventsCheckedSinceStartTotal++;
const itemId = await item.id;
let allRuleResults: RuleResult[] = [];
const itemIdentifier = `${checkType === 'Submission' ? 'SUB' : 'COM'} ${itemId}`;
this.currentLabels = [itemIdentifier];
let ePeek = '';
try {
const [peek, _] = await itemContentPeek(item);
ePeek = peek;
this.logger.info(`<EVENT> ${peek}`);
} catch (err) {
this.logger.error(`Error occurred while generate item peek for ${checkType} Activity ${itemId}`, err);
}
const {
checkNames = [],
delayUntil,
dryRun,
} = options || {};
if (delayUntil !== undefined) {
const created = dayjs.unix(item.created_utc);
const diff = dayjs().diff(created, 's');
if (diff < delayUntil) {
this.logger.verbose(`Delaying processing until Activity is ${delayUntil} seconds old (${delayUntil - diff}s)`);
await sleep(delayUntil - diff);
// @ts-ignore
item = await activity.refresh();
}
}
const startingApiLimit = this.client.ratelimitRemaining;
if (item instanceof Submission) {
if (await item.removed_by_category === 'deleted') {
this.logger.warn('Submission was deleted, cannot process.');
return;
}
} else if (item.author.name === '[deleted]') {
this.logger.warn('Comment was deleted, cannot process.');
return;
}
let checksRun = 0;
let actionsRun = 0;
let totalRulesRun = 0;
let runActions: Action[] = [];
try {
let triggered = false;
for (const check of checks) {
if (checkNames.length > 0 && !checkNames.map(x => x.toLowerCase()).some(x => x === check.name.toLowerCase())) {
this.logger.warn(`Check ${check.name} not in array of requested checks to run, skipping`);
continue;
}
checksRun++;
triggered = false;
let currentResults: RuleResult[] = [];
try {
const [checkTriggered, checkResults] = await check.runRules(item, allRuleResults);
currentResults = checkResults;
totalRulesRun += checkResults.length;
allRuleResults = allRuleResults.concat(determineNewResults(allRuleResults, checkResults));
triggered = checkTriggered;
} catch (e) {
if (e.logged !== true) {
this.logger.warn(`Running rules for Check ${check.name} failed due to uncaught exception`, e);
}
}
if (triggered) {
this.checksTriggered.set(check.name, (this.checksTriggered.get(check.name) || 0) + 1);
this.checksTriggeredSinceStart.set(check.name, (this.checksTriggeredSinceStart.get(check.name) || 0) + 1);
runActions = await check.runActions(item, currentResults.filter(x => x.triggered), dryRun);
actionsRun = runActions.length;
if(check.notifyOnTrigger) {
const ar = runActions.map(x => x.getActionUniqueName()).join(', ');
this.notificationManager.handle('eventActioned', 'Check Triggered', `Check "${check.name}" was triggered on Event: \n ${ePeek} \n\n with the following actions run: ${ar}`);
}
break;
}
}
if (!triggered) {
this.logger.info('No checks triggered');
}
} catch (err) {
if (!(err instanceof LoggedError) && err.logged !== true) {
this.logger.error('An unhandled error occurred while running checks', err);
}
} finally {
try {
const cachedTotal = totalRulesRun - allRuleResults.length;
const triggeredRulesTotal = allRuleResults.filter(x => x.triggered).length;
this.checksRunTotal += checksRun;
this.checksRunSinceStartTotal += checksRun;
this.rulesRunTotal += totalRulesRun;
this.rulesRunSinceStartTotal += totalRulesRun;
this.rulesCachedTotal += cachedTotal;
this.rulesCachedSinceStartTotal += cachedTotal;
this.rulesTriggeredTotal += triggeredRulesTotal;
this.rulesTriggeredSinceStartTotal += triggeredRulesTotal;
for (const a of runActions) {
const name = a.getActionUniqueName();
this.actionsRun.set(name, (this.actionsRun.get(name) || 0) + 1);
this.actionsRunSinceStart.set(name, (this.actionsRunSinceStart.get(name) || 0) + 1)
}
this.logger.verbose(`Run Stats: Checks ${checksRun} | Rules => Total: ${totalRulesRun} Unique: ${allRuleResults.length} Cached: ${totalRulesRun - allRuleResults.length} Rolling Avg: ~${formatNumber(this.rulesUniqueRollingAvg)}/s | Actions ${actionsRun}`);
this.logger.verbose(`Reddit API Stats: Initial ${startingApiLimit} | Current ${this.client.ratelimitRemaining} | Used ~${startingApiLimit - this.client.ratelimitRemaining} | Events ~${formatNumber(this.eventsRollingAvg)}/s`);
this.currentLabels = [];
} catch (err) {
this.logger.error('Error occurred while cleaning up Activity check and generating stats', err);
}
}
}
async buildPolling() {
// give current handle() time to stop
//await sleep(1000);
const retryHandler = createRetryHandler({maxRequestRetry: 5, maxOtherRetry: 1}, this.logger);
const subName = this.subreddit.display_name;
for (const pollOpt of this.pollOptions) {
const {
pollOn,
limit,
interval,
delayUntil
} = pollOpt;
let stream: SPoll<Snoowrap.Submission | Snoowrap.Comment>;
let modStreamType: string | undefined;
switch (pollOn) {
case 'unmoderated':
if (limit === DEFAULT_POLLING_LIMIT && interval === DEFAULT_POLLING_INTERVAL && this.sharedModqueue) {
modStreamType = 'unmoderated';
// use default mod stream from resources
stream = ResourceManager.modStreams.get('unmoderated') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
} else {
stream = new UnmoderatedStream(this.client, {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
});
}
break;
case 'modqueue':
if (limit === DEFAULT_POLLING_LIMIT && interval === DEFAULT_POLLING_INTERVAL) {
modStreamType = 'modqueue';
// use default mod stream from resources
stream = ResourceManager.modStreams.get('modqueue') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
} else {
stream = new ModQueueStream(this.client, {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
});
}
break;
case 'newSub':
stream = new SubmissionStream(this.client, {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
});
break;
case 'newComm':
stream = new CommentStream(this.client, {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
});
break;
}
stream.once('listing', async (listing) => {
if (!this.streamListedOnce.includes(pollOn)) {
// warning if poll event could potentially miss activities
if (this.commentChecks.length === 0 && ['unmoderated', 'modqueue', 'newComm'].some(x => x === pollOn)) {
this.logger.warn(`Polling '${pollOn}' may return Comments but no comments checks were configured.`);
}
if (this.submissionChecks.length === 0 && ['unmoderated', 'modqueue', 'newSub'].some(x => x === pollOn)) {
this.logger.warn(`Polling '${pollOn}' may return Submissions but no submission checks were configured.`);
}
this.streamListedOnce.push(pollOn);
}
});
const onItem = async (item: Comment | Submission) => {
if (!this.streamListedOnce.includes(pollOn)) {
return;
}
if (item.subreddit.display_name !== subName || this.eventsState.state !== RUNNING) {
return;
}
let checkType: 'Submission' | 'Comment' | undefined;
if (item instanceof Submission) {
if (this.submissionChecks.length > 0) {
checkType = 'Submission';
}
} else if (this.commentChecks.length > 0) {
checkType = 'Comment';
}
if (checkType !== undefined) {
this.queue.push({checkType, activity: item, options: {delayUntil}})
}
};
stream.on('item', onItem);
if (modStreamType !== undefined) {
this.modStreamCallbacks.set(pollOn, onItem);
} else {
// @ts-ignore
stream.on('error', async (err: any) => {
this.logger.error('Polling error occurred', err);
const shouldRetry = await retryHandler(err);
if (shouldRetry) {
stream.startInterval();
} else {
this.logger.warn('Pausing event polling due to too many errors');
await this.pauseEvents();
}
});
this.streams.push(stream);
}
}
}
async handle(): Promise<void> {
if (this.submissionChecks.length > 0) {
const {
submissions: {
limit = 10,
interval = 10000,
} = {}
} = this.pollOptions
this.streamSub = new SubmissionStream(this.client, {
subreddit: this.subreddit.display_name,
limit,
pollTime: interval,
});
this.streamSub.once('listing', async (listing) => {
this.subListedOnce = true;
// for debugging
// await this.runChecks('Submission', listing[0]);
});
this.streamSub.on('item', async (item) => {
if (!this.subListedOnce) {
return;
}
await this.runChecks('Submission', item)
});
if (this.submissionChecks.length === 0 && this.commentChecks.length === 0) {
this.logger.warn('No submission or comment checks to run! Bot will not run.');
return;
}
if (this.commentChecks.length > 0) {
const {
comments: {
limit = 10,
interval = 10000,
} = {}
} = this.pollOptions
this.streamComments = new CommentStream(this.client, {
subreddit: this.subreddit.display_name,
limit,
pollTime: interval,
});
this.streamComments.once('listing', () => this.commentsListedOnce = true);
this.streamComments.on('item', async (item) => {
if (!this.commentsListedOnce) {
return;
}
await this.runChecks('Comment', item)
});
}
try {
for (const s of this.streams) {
s.startInterval();
}
this.startedAt = dayjs();
this.running = true;
this.manuallyStopped = false;
this.logger.info('Bot Running');
if (this.streamSub !== undefined) {
await pEvent(this.streamSub, 'end');
} else if (this.streamComments !== undefined) {
await pEvent(this.streamComments, 'end');
await pEvent(this.emitter, 'end');
} catch (err) {
this.logger.error('Too many request errors occurred or an unhandled error was encountered, manager is stopping');
} finally {
this.stop();
}
}
startQueue(causedBy: Invokee = 'system', options?: ManagerStateChangeOption) {
const {reason, suppressNotification = false} = options || {};
if(this.queueState.state === RUNNING) {
this.logger.info(`Activity processing queue is already RUNNING with (${this.queue.length()} queued activities)`);
} else if (!this.validConfigLoaded) {
this.logger.warn('Cannot start activity processing queue while manager has an invalid configuration');
} else {
this.logger.warn('No submission or comment checks to run!');
this.queue.resume();
this.logger.info(`Activity processing queue started RUNNING with ${this.queue.length()} queued activities`);
this.queueState = {
state: RUNNING,
causedBy
}
if(!suppressNotification) {
this.notificationManager.handle('runStateChanged', 'Queue Started', reason, causedBy)
}
}
}
async pauseQueue(causedBy: Invokee = 'system', options?: ManagerStateChangeOption) {
const {reason, suppressNotification = false} = options || {};
if(this.queueState.state === PAUSED) {
if(this.queueState.causedBy !== causedBy) {
this.logger.info(`Activity processing queue state set to PAUSED by ${causedBy}`);
this.queueState = {
state: PAUSED,
causedBy
}
} else {
this.logger.info('Activity processing queue already PAUSED');
}
} else if(this.queueState.state === STOPPED) {
this.logger.info(`Activity processing queue must be in RUNNING state to pause`);
} else {
this.queue.pause();
if(this.queue.running() === 0) {
this.logger.info('Paused activity processing queue');
} else {
const pauseWaitStart = dayjs();
this.logger.info(`Activity processing queue is pausing...waiting for ${this.queue.running()} activities to finish processing`);
while (this.queue.running() > 0) {
await sleep(1500);
this.logger.verbose(`Activity processing queue is pausing...waiting for ${this.queue.running()} activities to finish processing`);
}
this.logger.info(`Activity processing queue paused (waited ${dayjs().diff(pauseWaitStart, 's')} seconds while activity processing finished)`);
}
this.queueState = {
state: PAUSED,
causedBy
}
if(!suppressNotification) {
this.notificationManager.handle('runStateChanged', 'Queue Paused', reason, causedBy)
}
}
}
async stopQueue(causedBy: Invokee = 'system', options?: ManagerStateChangeOption) {
const {reason, suppressNotification = false} = options || {};
if(this.queueState.state === STOPPED) {
if(this.queueState.causedBy !== causedBy) {
this.logger.info(`Activity processing queue state set to STOPPED by ${causedBy}`);
} else {
this.logger.info(`Activity processing queue is already STOPPED`);
}
} else {
this.queue.pause();
if(this.queue.running() === 0) {
this.logger.info('Stopped activity processing queue');
} else {
const pauseWaitStart = dayjs();
this.logger.info(`Activity processing queue is stopping...waiting for ${this.queue.running()} activities to finish processing`);
while (this.queue.running() > 0) {
await sleep(1500);
this.logger.verbose(`Activity processing queue is stopping...waiting for ${this.queue.running()} activities to finish processing`);
}
this.logger.info(`Activity processing queue stopped by ${causedBy} and ${this.queue.length()} queued activities cleared (waited ${dayjs().diff(pauseWaitStart, 's')} seconds while activity processing finished)`);
this.queue.kill();
}
this.queueState = {
state: STOPPED,
causedBy
}
if(!suppressNotification) {
this.notificationManager.handle('runStateChanged', 'Queue Stopped', reason, causedBy)
}
}
}
async startEvents(causedBy: Invokee = 'system', options?: ManagerStateChangeOption) {
const {reason, suppressNotification = false} = options || {};
if(!this.validConfigLoaded) {
this.logger.warn('Cannot start event polling while manager has an invalid configuration');
return;
}
if(this.eventsState.state === RUNNING) {
this.logger.info('Event polling already running');
} else {
if(this.eventsState.state === STOPPED) {
await this.buildPolling();
}
if (this.submissionChecks.length === 0 && this.commentChecks.length === 0) {
this.logger.warn('No submission or comment checks found!');
}
for (const s of this.streams) {
s.startInterval();
}
this.startedAt = dayjs();
}
this.logger.info('Event polling STARTED');
this.eventsState = {
state: RUNNING,
causedBy
}
if(!suppressNotification) {
this.notificationManager.handle('runStateChanged', 'Events Polling Started', reason, causedBy)
}
}
pauseEvents(causedBy: Invokee = 'system', options?: ManagerStateChangeOption) {
const {reason, suppressNotification = false} = options || {};
if(this.eventsState.state !== RUNNING) {
this.logger.warn('Events must be in RUNNING state in order to be paused.');
} else {
this.eventsState = {
state: PAUSED,
causedBy
};
for(const s of this.streams) {
s.end();
}
if(causedBy === USER) {
this.logger.info('Event polling is PAUSED. (Note: To change polling behavior you must first STOP event polling)')
} else {
this.logger.info('Event polling is PAUSED.');
}
if(!suppressNotification) {
this.notificationManager.handle('runStateChanged', 'Events Polling Paused', reason, causedBy)
}
}
}
stopEvents(causedBy: Invokee = 'system', options?: ManagerStateChangeOption) {
const {reason, suppressNotification = false} = options || {};
if(this.eventsState.state !== STOPPED) {
for (const s of this.streams) {
s.end();
}
this.streams = [];
for (const [k, v] of this.modStreamCallbacks) {
const stream = ResourceManager.modStreams.get(k) as Poll<Snoowrap.Submission | Snoowrap.Comment>;
stream.removeListener('item', v);
}
this.startedAt = undefined;
this.eventsCheckedSinceStartTotal = 0;
this.checksRunSinceStartTotal = 0;
this.rulesRunSinceStartTotal = 0;
this.rulesCachedSinceStartTotal = 0;
this.rulesTriggeredSinceStartTotal = 0;
this.checksTriggeredSinceStart = new Map();
this.actionsRunSinceStart = new Map();
this.logger.info(`Events STOPPED by ${causedBy}`);
this.eventsState = {
state: STOPPED,
causedBy
}
this.logger.info('Note: Polling behavior will be re-built from configuration when next started');
if(!suppressNotification) {
this.notificationManager.handle('runStateChanged', 'Events Polling Stopped', reason, causedBy)
}
} else if(causedBy !== this.eventsState.causedBy) {
this.logger.info(`Events STOPPED by ${causedBy}`);
this.logger.info('Note: Polling behavior will be re-built from configuration when next started');
this.eventsState.causedBy = causedBy;
} else {
this.logger.info('Events already STOPPED');
}
}
async start(causedBy: Invokee = 'system', options?: ManagerStateChangeOption) {
const {reason, suppressNotification = false} = options || {};
if(!this.validConfigLoaded) {
this.logger.warn('Cannot put bot in RUNNING state while manager has an invalid configuration');
return;
}
await this.startEvents(causedBy, {suppressNotification: true});
this.startQueue(causedBy, {suppressNotification: true});
this.botState = {
state: RUNNING,
causedBy
}
if(!suppressNotification) {
this.notificationManager.handle('runStateChanged', 'Bot Started', reason, causedBy)
}
}
async stop(causedBy: Invokee = 'system', options?: ManagerStateChangeOption) {
const {reason, suppressNotification = false} = options || {};
this.stopEvents(causedBy, {suppressNotification: true});
await this.stopQueue(causedBy, {suppressNotification: true});
this.botState = {
state: STOPPED,
causedBy
}
if(!suppressNotification) {
this.notificationManager.handle('runStateChanged', 'Bot Stopped', reason, causedBy)
}
}
}

100
src/Subreddit/Streams.ts Normal file
View File

@@ -0,0 +1,100 @@
import {Poll, SnooStormOptions} from "snoostorm"
import Snoowrap from "snoowrap";
import {EventEmitter} from "events";
import {PollConfiguration} from "snoostorm/out/util/Poll";
import {DEFAULT_POLLING_INTERVAL} from "../Common/interfaces";
type Awaitable<T> = Promise<T> | T;
export class SPoll<T extends object> extends Poll<T> {
identifier: keyof T;
getter: () => Awaitable<T[]>;
frequency;
running: boolean = false;
constructor(options: PollConfiguration<T>) {
super(options);
this.identifier = options.identifier;
this.getter = options.get;
this.frequency = options.frequency;
clearInterval(this.interval);
}
startInterval = () => {
this.running = true;
this.interval = setInterval(async () => {
try {
const batch = await this.getter();
const newItems: T[] = [];
for (const item of batch) {
const id = item[this.identifier];
if (this.processed.has(id)) continue;
// Emit for new items and add it to the list
newItems.push(item);
this.processed.add(id);
this.emit("item", item);
}
// Emit the new listing of all new items
this.emit("listing", newItems);
} catch (err) {
this.emit('error', err);
this.end();
}
}, this.frequency);
}
end = () => {
this.running = false;
super.end();
}
}
export class UnmoderatedStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: SnooStormOptions & { subreddit: string }) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getSubreddit(options.subreddit).getUnmoderated(options),
identifier: "id",
});
}
}
export class ModQueueStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: SnooStormOptions & { subreddit: string }) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getSubreddit(options.subreddit).getModqueue(options),
identifier: "id",
});
}
}
export class SubmissionStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: SnooStormOptions & { subreddit: string }) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getNew(options.subreddit, options),
identifier: "id",
});
}
}
export class CommentStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: SnooStormOptions & { subreddit: string }) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getNewComments(options.subreddit, options),
identifier: "id",
});
}
}

View File

@@ -0,0 +1,362 @@
import Snoowrap, {RedditUser, Comment, Submission} from "snoowrap";
import objectHash from 'object-hash';
import {
AuthorActivitiesOptions,
AuthorTypedActivitiesOptions, BOT_LINK,
getAuthorActivities,
testAuthorCriteria
} from "../Utils/SnoowrapUtils";
import Subreddit from 'snoowrap/dist/objects/Subreddit';
import winston, {Logger} from "winston";
import fetch from 'node-fetch';
import {
buildCacheOptionsFromProvider,
cacheStats, createCacheManager,
formatNumber,
mergeArr,
parseExternalUrl,
parseWikiContext
} from "../util";
import LoggedError from "../Utils/LoggedError";
import {
CacheOptions,
Footer, OperatorConfig, ResourceStats,
SubredditCacheConfig, TTLConfig
} from "../Common/interfaces";
import UserNotes from "./UserNotes";
import Mustache from "mustache";
import he from "he";
import {AuthorCriteria} from "../Author/Author";
import {SPoll} from "./Streams";
import {Cache} from 'cache-manager';
export const DEFAULT_FOOTER = '\r\n*****\r\nThis action was performed by [a bot.]({{botLink}}) Mention a moderator or [send a modmail]({{modmailLink}}) if you any ideas, questions, or concerns about this action.';
export interface SubredditResourceConfig extends Footer {
caching?: SubredditCacheConfig,
subreddit: Subreddit,
logger: Logger;
}
interface SubredditResourceOptions extends Footer {
ttl: Required<TTLConfig>
cache?: Cache
cacheType: string;
cacheSettingsHash: string
subreddit: Subreddit,
logger: Logger;
}
export interface SubredditResourceSetOptions extends SubredditCacheConfig, Footer {
}
export class SubredditResources {
//enabled!: boolean;
protected authorTTL!: number;
protected useSubredditAuthorCache!: boolean;
protected wikiTTL!: number;
name: string;
protected logger: Logger;
userNotes: UserNotes;
footer: false | string = DEFAULT_FOOTER;
subreddit: Subreddit
cache?: Cache
cacheType: string
cacheSettingsHash?: string;
stats: { cache: ResourceStats };
constructor(name: string, options: SubredditResourceOptions) {
const {
subreddit,
logger,
ttl: {
userNotesTTL,
authorTTL,
wikiTTL,
},
cache,
cacheType,
cacheSettingsHash,
} = options || {};
this.cacheSettingsHash = cacheSettingsHash;
this.cache = cache;
this.cacheType = cacheType;
this.authorTTL = authorTTL;
this.wikiTTL = wikiTTL;
this.subreddit = subreddit;
this.name = name;
if (logger === undefined) {
const alogger = winston.loggers.get('default')
this.logger = alogger.child({labels: [this.name, 'Resource Cache']}, mergeArr);
} else {
this.logger = logger.child({labels: ['Resource Cache']}, mergeArr);
}
this.stats = {
cache: cacheStats()
};
const cacheUseCB = (miss: boolean) => {
this.stats.cache.userNotes.requests++;
this.stats.cache.userNotes.miss += miss ? 1 : 0;
}
this.userNotes = new UserNotes(userNotesTTL, this.subreddit, this.logger, this.cache, cacheUseCB)
}
async getCacheKeyCount() {
if (this.cache !== undefined && this.cache.store.keys !== undefined) {
return (await this.cache.store.keys()).length;
}
return 0;
}
getStats() {
return {
cache: {
// TODO could probably combine these two
totalRequests: Object.values(this.stats.cache).reduce((acc, curr) => acc + curr.requests, 0),
types: Object.keys(this.stats.cache).reduce((acc, curr) => {
const per = acc[curr].miss === 0 ? 0 : formatNumber(acc[curr].miss / acc[curr].requests) * 100;
// @ts-ignore
acc[curr].missPercent = `${formatNumber(per, {toFixed: 0})}%`;
return acc;
}, this.stats.cache)
}
}
}
async getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
if (this.cache !== undefined && this.authorTTL > 0) {
const userName = user.name;
const hashObj: any = {...options, userName};
if (this.useSubredditAuthorCache) {
hashObj.subreddit = this.name;
}
const hash = objectHash.sha1({...options, userName});
this.stats.cache.author.requests++;
let miss = false;
const cacheVal = await this.cache.wrap(hash, async () => {
miss = true;
return await getAuthorActivities(user, options);
}, {ttl: this.authorTTL});
if (!miss) {
this.logger.debug(`Cache Hit: ${userName} (${options.type || 'overview'})`);
} else {
this.stats.cache.author.miss++;
}
return cacheVal as Array<Submission | Comment>;
}
return await getAuthorActivities(user, options);
}
async getAuthorComments(user: RedditUser, options: AuthorActivitiesOptions): Promise<Comment[]> {
return await this.getAuthorActivities(user, {...options, type: 'comment'}) as unknown as Promise<Comment[]>;
}
async getAuthorSubmissions(user: RedditUser, options: AuthorActivitiesOptions): Promise<Submission[]> {
return await this.getAuthorActivities(user, {
...options,
type: 'submission'
}) as unknown as Promise<Submission[]>;
}
async getContent(val: string, subredditArg?: Subreddit): Promise<string> {
const subreddit = subredditArg || this.subreddit;
let cacheKey;
const wikiContext = parseWikiContext(val);
if (wikiContext !== undefined) {
cacheKey = `${wikiContext.wiki}${wikiContext.subreddit !== undefined ? `|${wikiContext.subreddit}` : ''}`;
}
const extUrl = wikiContext === undefined ? parseExternalUrl(val) : undefined;
if (extUrl !== undefined) {
cacheKey = extUrl;
}
if (cacheKey === undefined) {
return val;
}
// try to get cached value first
let hash = `${subreddit.display_name}-${cacheKey}`;
if (this.cache !== undefined && this.wikiTTL > 0) {
this.stats.cache.content.requests++;
const cachedContent = await this.cache.get(hash);
if (cachedContent !== undefined) {
this.logger.debug(`Cache Hit: ${cacheKey}`);
return cachedContent as string;
} else {
this.stats.cache.content.miss++;
}
}
let wikiContent: string;
// no cache hit, get from source
if (wikiContext !== undefined) {
let sub;
if (wikiContext.subreddit === undefined || wikiContext.subreddit.toLowerCase() === subreddit.display_name) {
sub = subreddit;
} else {
// @ts-ignore
const client = subreddit._r as Snoowrap;
sub = client.getSubreddit(wikiContext.subreddit);
}
try {
const wikiPage = sub.getWikiPage(wikiContext.wiki);
wikiContent = await wikiPage.content_md;
} catch (err) {
const msg = `Could not read wiki page. Please ensure the page 'https://reddit.com${sub.display_name_prefixed}wiki/${wikiContext}' exists and is readable`;
this.logger.error(msg, err);
throw new LoggedError(msg);
}
} else {
try {
const response = await fetch(extUrl as string);
wikiContent = await response.text();
} catch (err) {
const msg = `Error occurred while trying to fetch the url ${extUrl}`;
this.logger.error(msg, err);
throw new LoggedError(msg);
}
}
if (this.cache !== undefined && this.wikiTTL > 0) {
this.cache.set(hash, wikiContent, this.wikiTTL);
}
return wikiContent;
}
async testAuthorCriteria(item: (Comment | Submission), authorOpts: AuthorCriteria, include = true) {
if (this.cache !== undefined && this.authorTTL > 0) {
const hashObj = {itemId: item.id, ...authorOpts, include};
const hash = `authorCrit-${objectHash.sha1(hashObj)}`;
this.stats.cache.authorCrit.requests++;
let miss = false;
const cachedAuthorTest = await this.cache.wrap(hash, async () => {
miss = true;
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
}, {ttl: this.authorTTL});
if (!miss) {
this.logger.debug(`Cache Hit: Author Check on ${item.id}`);
} else {
this.stats.cache.authorCrit.miss++;
}
return cachedAuthorTest;
}
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
}
async generateFooter(item: Submission | Comment, actionFooter?: false | string) {
let footer = actionFooter !== undefined ? actionFooter : this.footer;
if (footer === false) {
return '';
}
const subName = await item.subreddit.display_name;
const permaLink = `https://reddit.com${await item.permalink}`
const modmailLink = `https://www.reddit.com/message/compose?to=%2Fr%2F${subName}&message=${encodeURIComponent(permaLink)}`
const footerRawContent = await this.getContent(footer, item.subreddit);
return he.decode(Mustache.render(footerRawContent, {subName, permaLink, modmailLink, botLink: BOT_LINK}));
}
}
class SubredditResourcesManager {
resources: Map<string, SubredditResources> = new Map();
authorTTL: number = 10000;
enabled: boolean = true;
modStreams: Map<string, SPoll<Snoowrap.Submission | Snoowrap.Comment>> = new Map();
defaultCache?: Cache;
cacheType: string = 'none';
ttlDefaults!: Required<TTLConfig>;
setDefaultsFromConfig(config: OperatorConfig) {
const {
caching: {
authorTTL,
userNotesTTL,
wikiTTL,
provider,
},
} = config;
this.setDefaultCache(provider);
this.setTTLDefaults({authorTTL, userNotesTTL, wikiTTL});
}
setDefaultCache(options: CacheOptions) {
this.cacheType = options.store;
this.defaultCache = createCacheManager(options);
}
setTTLDefaults(def: Required<TTLConfig>) {
this.ttlDefaults = def;
}
get(subName: string): SubredditResources | undefined {
if (this.resources.has(subName)) {
return this.resources.get(subName) as SubredditResources;
}
return undefined;
}
set(subName: string, initOptions: SubredditResourceConfig): SubredditResources {
let hash = 'default';
const { caching, ...init } = initOptions;
let opts: SubredditResourceOptions;
if(caching !== undefined) {
const {provider = 'memory', ...rest} = caching;
let cacheConfig = {
provider: buildCacheOptionsFromProvider(provider),
ttl: {
...this.ttlDefaults,
...rest
},
}
hash = objectHash.sha1(cacheConfig);
const {provider: trueProvider, ...trueRest} = cacheConfig;
opts = {
cache: createCacheManager(trueProvider),
cacheType: trueProvider.store,
cacheSettingsHash: hash,
...init,
...trueRest,
};
} else {
opts = {
cache: this.defaultCache,
cacheType: this.cacheType,
cacheSettingsHash: hash,
ttl: this.ttlDefaults,
...init,
}
}
let resource: SubredditResources;
const res = this.get(subName);
if(res === undefined || res.cacheSettingsHash !== hash) {
if(res !== undefined && res.cache !== undefined) {
res.cache.reset();
}
resource = new SubredditResources(subName, opts);
this.resources.set(subName, resource);
} else {
// just set non-cache related settings
resource = res;
if(opts.footer !== resource.footer) {
resource.footer = opts.footer || DEFAULT_FOOTER;
}
// reset cache stats when configuration is reloaded
resource.stats.cache = cacheStats();
}
return resource;
}
}
const manager = new SubredditResourcesManager();
export default manager;

268
src/Subreddit/UserNotes.ts Normal file
View File

@@ -0,0 +1,268 @@
import dayjs, {Dayjs} from "dayjs";
import {Comment, RedditUser, WikiPage} from "snoowrap";
import {COMMENT_URL_ID, deflateUserNotes, inflateUserNotes, parseLinkIdentifier, SUBMISSION_URL_ID} from "../util";
import Subreddit from "snoowrap/dist/objects/Subreddit";
import {Logger} from "winston";
import LoggedError from "../Utils/LoggedError";
import Submission from "snoowrap/dist/objects/Submission";
import {RichContent} from "../Common/interfaces";
import {Cache} from 'cache-manager';
interface RawUserNotesPayload {
ver: number,
constants: UserNotesConstants,
blob: RawBlobPayload
}
interface RawBlobPayload {
[username: string]: RawUserNoteRoot
}
interface RawUserNoteRoot {
ns: RawNote[]
}
export interface RawNote {
/**
* Note Text
* */
n: string;
/**
* Unix epoch in seconds
* */
t: number;
/**
* Moderator index from constants.users
* */
m: number;
/**
* Link shorthand
* */
l: string;
/**
* type/color index from constants.warnings
* */
w: number;
}
export type UserNotesConstants = Pick<any, "users" | "warnings">;
export class UserNotes {
notesTTL: number;
subreddit: Subreddit;
wiki: WikiPage;
moderators?: RedditUser[];
logger: Logger;
identifier: string;
cache?: Cache
cacheCB: Function;
users: Map<string, UserNote[]> = new Map();
constructor(ttl: number, subreddit: Subreddit, logger: Logger, cache: Cache | undefined, cacheCB: Function) {
this.notesTTL = ttl;
this.subreddit = subreddit;
this.logger = logger;
this.wiki = subreddit.getWikiPage('usernotes');
this.identifier = `${this.subreddit.display_name}-usernotes`;
this.cache = cache;
this.cacheCB = cacheCB;
}
async getUserNotes(user: RedditUser): Promise<UserNote[]> {
let notes: UserNote[] | undefined = [];
if (this.users !== undefined) {
notes = this.users.get(user.name);
if (notes !== undefined) {
this.logger.debug('Returned cached notes');
return notes;
}
}
const payload = await this.retrieveData();
const rawNotes = payload.blob[user.name];
if (rawNotes !== undefined) {
if (this.moderators === undefined) {
this.moderators = await this.subreddit.getModerators();
}
const notes = rawNotes.ns.map(x => UserNote.fromRaw(x, payload.constants, this.moderators as RedditUser[]));
// sort in ascending order by time
notes.sort((a, b) => a.time.isBefore(b.time) ? -1 : 1);
if (this.notesTTL > 0 && this.cache !== undefined) {
this.users.set(user.name, notes);
}
return notes;
} else {
return [];
}
}
async addUserNote(item: (Submission|Comment), type: string | number, text: string = ''): Promise<UserNote>
{
const payload = await this.retrieveData();
// idgaf
// @ts-ignore
const mod = await this.subreddit._r.getMe();
if(!payload.constants.users.includes(mod.name)) {
this.logger.info(`Mod ${mod.name} does not exist in UserNote constants, adding them`);
payload.constants.users.push(mod.name);
}
if(!payload.constants.warnings.find((x: string) => x === type)) {
this.logger.warn(`UserNote type '${type}' does not exist, adding it but make sure spelling and letter case is correct`);
payload.constants.warnings.push(type);
//throw new LoggedError(`UserNote type '${type}' does not exist. If you meant to use this please add it through Toolbox first.`);
}
const newNote = new UserNote(dayjs(), text, mod, type, `https://reddit.com${item.permalink}`);
if(payload.blob[item.author.name] === undefined) {
payload.blob[item.author.name] = {ns: []};
}
payload.blob[item.author.name].ns.push(newNote.toRaw(payload.constants));
await this.saveData(payload);
if(this.notesTTL > 0 && this.cache !== undefined) {
const currNotes = this.users.get(item.author.name) || [];
currNotes.push(newNote);
this.users.set(item.author.name, currNotes);
}
return newNote;
}
async warningExists(type: string): Promise<boolean>
{
const payload = await this.retrieveData();
return payload.constants.warnings.some((x: string) => x === type);
}
async retrieveData(): Promise<RawUserNotesPayload> {
if (this.notesTTL > 0 && this.cache !== undefined) {
const cachedPayload = await this.cache.get(this.identifier);
if (cachedPayload !== undefined) {
this.cacheCB(false);
return cachedPayload as unknown as RawUserNotesPayload;
}
this.cacheCB(true);
}
try {
// @ts-ignore
this.wiki = await this.subreddit.getWikiPage('usernotes').fetch();
const wikiContent = this.wiki.content_md;
// TODO don't handle for versions lower than 6
const userNotes = JSON.parse(wikiContent);
userNotes.blob = inflateUserNotes(userNotes.blob);
if (this.notesTTL > 0 && this.cache !== undefined) {
await this.cache.set(`${this.subreddit.display_name}-usernotes`, userNotes, this.notesTTL);
this.users = new Map();
}
return userNotes as RawUserNotesPayload;
} catch (err) {
const msg = `Could not read usernotes. Make sure at least one moderator has used toolbox and usernotes before.`;
this.logger.error(msg, err);
throw new LoggedError(msg);
}
}
async saveData(payload: RawUserNotesPayload): Promise<RawUserNotesPayload> {
const blob = deflateUserNotes(payload.blob);
const wikiPayload = {...payload, blob};
try {
// @ts-ignore
//this.wiki = await this.wiki.refresh();
// @ts-ignore
this.wiki = await this.subreddit.getWikiPage('usernotes').edit({text: JSON.stringify(wikiPayload), reason: 'ContextBot edited usernotes'});
if (this.notesTTL > 0 && this.cache !== undefined) {
await this.cache.set(this.identifier, payload, this.notesTTL);
this.users = new Map();
}
return payload as RawUserNotesPayload;
} catch (err) {
const msg = `Could not edit usernotes. Make sure at least one moderator has used toolbox and usernotes before and that this account has editing permissions`;
this.logger.error(msg, err);
throw new LoggedError(msg);
}
}
}
export interface UserNoteJson extends RichContent {
/**
* User Note type key
* @examples ["spamwarn"]
* */
type: string,
}
export class UserNote {
//time: Dayjs;
// text?: string;
// moderator: RedditUser;
// noteTypeIndex: number;
// noteType: string | null;
// link: string;
constructor(public time: Dayjs, public text: string, public moderator: RedditUser, public noteType: string | number, public link: string) {
}
public toRaw(constants: UserNotesConstants): RawNote {
return {
t: this.time.unix(),
n: this.text,
m: constants.users.findIndex((x: string) => x === this.moderator.name),
w: typeof this.noteType === 'number' ? this.noteType : constants.warnings.findIndex((x: string) => x === this.noteType),
l: usernoteLinkShorthand(this.link)
}
}
public static fromRaw(obj: RawNote, constants: UserNotesConstants, mods: RedditUser[]) {
const mod = mods.find(x => x.name === constants.users[obj.m]);
if (mod === undefined) {
throw new Error('Could not find moderator for Usernote');
}
return new UserNote(dayjs.unix(obj.t), obj.n, mod, constants.warnings[obj.w] === null ? obj.w : constants.warnings[obj.w], usernoteLinkExpand(obj.l))
}
}
// https://github.com/toolbox-team/reddit-moderator-toolbox/wiki/Subreddit-Wikis%3A-usernotes#link-string-formats
export const usernoteLinkExpand = (link: string) => {
if (link.charAt(0) === 'l') {
const pieces = link.split(',');
if (pieces.length === 3) {
// it's a comment
return `https://www.reddit.com/comments/${pieces[1]}/_/${pieces[2]}`;
}
// its a submission
return `https://redd.it/${pieces[1]}`;
} else {
// its an old modmail thread
return `https://www.reddit.com/message/messages/${link.split(',')[1]}`;
}
}
export const usernoteLinkShorthand = (link: string) => {
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);
let commentId = commentReg(link);
let submissionId = submissionReg(link);
if (commentId !== undefined) {
commentId = commentReg(link);
return `l,${submissionId},${commentId}`;
} else if (submissionId !== undefined) {
return `l,${submissionId}`;
}
// aren't dealing with messages at this point so just store whole thing if we didn't get a shorthand
return link;
}
export default UserNotes;

122
src/Utils/CommandConfig.ts Normal file
View File

@@ -0,0 +1,122 @@
import commander, {InvalidOptionArgumentError} from "commander";
import {argParseInt, parseBool, parseBoolWithDefault} from "../util";
export const clientId = new commander.Option('-i, --clientId <id>', 'Client ID for your Reddit application (default: process.env.CLIENT_ID)');
export const clientSecret = new commander.Option('-e, --clientSecret <secret>', 'Client Secret for your Reddit application (default: process.env.CLIENT_SECRET)');
export const redirectUri = new commander.Option('-u, --redirectUri <uri>', 'Redirect URI for your Reddit application (default: process.env.REDIRECT_URI)');
export const sessionSecret = new commander.Option('-t, --sessionSecret <secret>', 'Secret use to encrypt session id/data (default: process.env.SESSION_SECRET || a random string)');
export const createAccessTokenOption = () => new commander.Option('-a, --accessToken <token>', 'Access token retrieved from authenticating an account with your Reddit Application (default: process.env.ACCESS_TOKEN)');
export const createRefreshTokenOption = () => new commander.Option('-r, --refreshToken <token>', 'Refresh token retrieved from authenticating an account with your Reddit Application (default: process.env.REFRESH_TOKEN)');
export const subreddits = new commander.Option('-s, --subreddits <list...>', 'List of subreddits to run on. Bot will run on all subs it has access to if not defined (default: process.env.SUBREDDITS)');
export const logDir = new commander.Option('-d, --logDir [dir]', 'Absolute path to directory to store rotated logs in. Leaving undefined disables rotating logs (default: process.env.LOG_DIR)');
export const logLevel = new commander.Option('-l, --logLevel <level>', 'Minimum level to log at (default: process.env.LOG_LEVEL || verbose)');
export const wikiConfig = new commander.Option('-w, --wikiConfig <path>', `Relative url to contextbot wiki page EX https://reddit.com/r/subreddit/wiki/<path> (default: process.env.WIKI_CONFIG || 'botconfig/contextbot')`);
export const snooDebug = new commander.Option('--snooDebug', `Set Snoowrap to debug. If undefined will be on if logLevel='debug' (default: process.env.SNOO_DEBUG)`)
.argParser(parseBool);
export const authorTTL = new commander.Option('--authorTTL <ms>', 'Set the TTL (ms) for the Author Activities shared cache (default: process.env.AUTHOR_TTL || 60000)')
.argParser(argParseInt);
export const caching = new commander.Option('--caching <provider>', `Set the caching provider to use. Options 'memory', 'redis', or 'none' to disable (default: process.env.CACHING || memory)`)
.argParser(argParseInt);
export const heartbeat = new commander.Option('--heartbeat <s>', 'Interval, in seconds, between heartbeat checks. (default: process.env.HEARTBEAT || 300)')
.argParser(argParseInt);
export const softLimit = new commander.Option('--softLimit <limit>', 'When API limit remaining (600/10min) is lower than this subreddits will have SLOW MODE enabled (default: process.env.SOFT_LIMIT || 250)')
.argParser(argParseInt);
export const hardLimit = new commander.Option('--hardLimit <limit>', 'When API limit remaining (600/10min) is lower than this all subreddit polling will be paused until api limit reset (default: process.env.SOFT_LIMIT || 250)')
.argParser(argParseInt);
export const dryRun = new commander.Option('--dryRun', 'Set all subreddits in dry run mode, overriding configurations (default: process.env.DRYRUN || false)')
.argParser(parseBoolWithDefault(undefined));
export const checks = new commander.Option('-h, --checks <checkNames...>', 'An optional list of Checks, by name, that should be run. If none are specified all Checks for the Subreddit the Activity is in will be run');
export const proxy = new commander.Option('--proxy <proxyEndpoint>', 'Proxy Snoowrap requests through this endpoint (default: process.env.PROXY)');
export const operator = new commander.Option('--operator <name>', 'Username of the reddit user operating this application, used for displaying OP level info/actions in UI (default: process.env.OPERATOR)');
export const operatorDisplay = new commander.Option('--operatorDisplay <name>', 'An optional name to display who is operating this application in the UI (default: process.env.OPERATOR_DISPLAY || Anonymous)');
export const port = new commander.Option('-p, --port <port>', 'Port for web server to listen on (default: process.env.PORT || 8085)')
.argParser(argParseInt);
export const sharedMod = new commander.Option('-q, --shareMod', `If enabled then all subreddits using the default settings to poll "unmoderated" or "modqueue" will retrieve results from a shared request to /r/mod (default: process.env.SHARE_MOD || false)`)
.argParser(parseBool);
export const operatorConfig = new commander.Option('-c, --operatorConfig <path>', 'An absolute path to a JSON file to load all parameters from (default: process.env.OPERATOR_CONFIG)');
export const getUniversalWebOptions = (): commander.Option[] => {
return [
clientId,
clientSecret,
createAccessTokenOption(),
createRefreshTokenOption(),
redirectUri,
sessionSecret,
subreddits,
logDir,
logLevel,
wikiConfig,
snooDebug,
authorTTL,
heartbeat,
softLimit,
hardLimit,
dryRun,
proxy,
operator,
operatorDisplay,
port,
sharedMod,
operatorConfig,
];
}
export const getUniversalCLIOptions = (): commander.Option[] => {
const at = createAccessTokenOption();
at.required = true;
const rt = createRefreshTokenOption();
rt.required = true;
return [
clientId,
clientSecret,
at,
rt,
subreddits,
logDir,
logLevel,
wikiConfig,
snooDebug,
authorTTL,
heartbeat,
softLimit,
hardLimit,
dryRun,
proxy,
sharedMod,
operatorConfig,
]
}
export const addOptions = (com: commander.Command, options: commander.Option[]): commander.Command => {
return options.reduce((c, opt) => c.addOption(opt), com);
}
// TODO
export const subredditConfig = new commander.Option('--subredditsConfig <path>', 'An absolute path to a JSON file to load subreddit configs from');

View File

@@ -0,0 +1,7 @@
import LoggedError from "./LoggedError";
class ConfigParseError extends LoggedError {
}
export default ConfigParseError

View File

@@ -0,0 +1,22 @@
import ExtendableError from "es6-error";
class InvalidRegexError extends ExtendableError {
constructor(regex: RegExp | RegExp[], val?: string, url?: string) {
const msgParts = [
'Regex(es) did not match the value given.',
];
let regArr = Array.isArray(regex) ? regex : [regex];
for(const r of regArr) {
msgParts.push(`Regex: ${r}`)
}
if (val !== undefined) {
msgParts.push(`Value: ${val}`);
}
if (url !== undefined) {
msgParts.push(`Sample regex: ${url}`);
}
super(msgParts.join('\r\n'));
}
}
export default InvalidRegexError;

7
src/Utils/SimpleError.ts Normal file
View File

@@ -0,0 +1,7 @@
import ExtendableError from "es6-error";
class SimpleError extends ExtendableError {
}
export default SimpleError;

View File

@@ -0,0 +1,44 @@
import Snoowrap from "snoowrap";
// const proxyFactory = (endpoint: string) => {
// return class ProxiedSnoowrap extends Snoowrap {
// rawRequest(options: any) {
// // send all requests through a proxy
// return super.rawRequest(Object.assign(options, {
// proxy: endpoint,
// tunnel: false
// }))
// }
// }
// }
export class RequestTrackingSnoowrap extends Snoowrap {
requestCount: number = 0;
oauthRequest(...args: any) {
// send all requests through a proxy
if(args[1] === undefined || args[1] === 1) {
this.requestCount++;
}
// @ts-ignore
return super.oauthRequest(...args);
}
}
export class ProxiedSnoowrap extends RequestTrackingSnoowrap {
proxyEndpoint: string;
constructor(args: any) {
super(args);
const {proxy} = args;
this.proxyEndpoint = proxy;
}
rawRequest(options: any) {
// send all requests through a proxy
return super.rawRequest(Object.assign(options, {
proxy: this.proxyEndpoint,
tunnel: false
}))
}
}

View File

@@ -1,10 +1,32 @@
import {Comment, RedditUser} from "snoowrap";
import Snoowrap, {RedditUser} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import Comment from "snoowrap/dist/objects/Comment";
import {Duration, DurationUnitsObjectType} from "dayjs/plugin/duration";
import dayjs, {Dayjs} from "dayjs";
import Mustache from "mustache";
import {AuthorOptions, AuthorCriteria} from "../Rule";
import {ActivityWindowCriteria, ActivityWindowType} from "../Common/interfaces";
import he from "he";
import {RuleResult, UserNoteCriteria} from "../Rule";
import {
ActivityWindowType, CommentState, DomainInfo,
DurationVal,
SubmissionState,
TypedActivityStates
} from "../Common/interfaces";
import {
compareDurationValue, comparisonTextOp,
isActivityWindowCriteria,
normalizeName, parseDuration,
parseDurationComparison, parseGenericValueComparison, parseGenericValueOrPercentComparison, parseSubredditName,
truncateStringToLength
} from "../util";
import UserNotes from "../Subreddit/UserNotes";
import {Logger} from "winston";
import InvalidRegexError from "./InvalidRegexError";
import SimpleError from "./SimpleError";
import {AuthorCriteria} from "../Author/Author";
import { URL } from "url";
export const BOT_LINK = 'https://www.reddit.com/r/ContextModBot/comments/o1dugk/introduction_to_contextmodbot_and_rcb';
export interface AuthorTypedActivitiesOptions extends AuthorActivitiesOptions {
type?: 'comment' | 'submission',
@@ -12,33 +34,90 @@ export interface AuthorTypedActivitiesOptions extends AuthorActivitiesOptions {
export interface AuthorActivitiesOptions {
window: ActivityWindowType | Duration
chunkSize?: number,
// TODO maybe move this into window
keepRemoved?: boolean,
}
export async function getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
let window: number | Dayjs,
chunkSize = 30;
if (typeof options.window !== 'number') {
const endTime = dayjs();
let d;
if (dayjs.isDuration(options.window)) {
d = options.window;
} else {
// @ts-ignore
d = dayjs.duration(options.window);
const {
chunkSize: cs = 100,
window: optWindow,
keepRemoved = true,
} = options;
let satisfiedCount: number | undefined,
satisfiedEndtime: Dayjs | undefined,
chunkSize = Math.min(cs, 100),
satisfy = 'any';
let durVal: DurationVal | undefined;
let duration: Duration | undefined;
let includes: string[] = [];
let excludes: string[] = [];
if(isActivityWindowCriteria(optWindow)) {
const {
satisfyOn = 'any',
count,
duration,
subreddits: {
include = [],
exclude = [],
} = {},
} = optWindow;
includes = include.map(x => parseSubredditName(x).toLowerCase());
excludes = exclude.map(x => parseSubredditName(x).toLowerCase());
if(includes.length > 0 && excludes.length > 0) {
// TODO add logger so this can be logged...
// this.logger.warn('include and exclude both specified, exclude will be ignored');
}
if (!dayjs.isDuration(d)) {
// TODO print object
throw new Error('window given was not a number, a valid ISO8601 duration, a Day.js duration, or well-formed Duration options');
}
window = endTime.subtract(d.asMilliseconds(), 'milliseconds');
satisfiedCount = count;
durVal = duration;
satisfy = satisfyOn
} else if(typeof optWindow === 'number') {
satisfiedCount = optWindow;
} else {
window = options.window;
// use whichever is smaller so we only do one api request if window is smaller than default chunk size
chunkSize = Math.min(chunkSize, window);
durVal = optWindow as DurationVal;
}
// if count is less than max limit (100) go ahead and just get that many. may result in faster response time for low numbers
if(satisfiedCount !== undefined) {
chunkSize = Math.min(chunkSize, satisfiedCount);
}
if(durVal !== undefined) {
const endTime = dayjs();
if (typeof durVal === 'object') {
duration = dayjs.duration(durVal);
if (!dayjs.isDuration(duration)) {
throw new Error('window value given was not a well-formed Duration object');
}
} else {
try {
duration = parseDuration(durVal);
} catch (e) {
if (e instanceof InvalidRegexError) {
throw new Error(`window value of '${durVal}' could not be parsed as a valid ISO8601 duration or DayJS duration shorthand (see Schema)`);
}
throw e;
}
}
satisfiedEndtime = endTime.subtract(duration.asMilliseconds(), 'milliseconds');
}
if(satisfiedCount === undefined && satisfiedEndtime === undefined) {
throw new Error('window value was not valid');
} else if(satisfy === 'all' && !(satisfiedCount !== undefined && satisfiedEndtime !== undefined)) {
// even though 'all' was requested we don't have two criteria so its really 'any' logic
satisfy = 'any';
}
let items: Array<Submission | Comment> = [];
let lastItemDate;
//let count = 1;
let listing;
switch (options.type) {
@@ -53,23 +132,78 @@ export async function getAuthorActivities(user: RedditUser, options: AuthorTyped
break;
}
let hitEnd = false;
let offset = chunkSize;
while (!hitEnd) {
items = items.concat(listing);
if (typeof window === 'number') {
hitEnd = items.length >= window
} else {
const lastItem = listing[listing.length - 1];
const lastUtc = await lastItem.created_utc
lastItemDate = dayjs(lastUtc);
if (lastItemDate.isBefore(window)) {
hitEnd = true;
let countOk = false,
timeOk = false;
let listSlice = listing.slice(offset - chunkSize)
// TODO partition list by filtered so we can log a debug statement with count of filtered out activities
if (includes.length > 0) {
listSlice = listSlice.filter(x => {
const actSub = x.subreddit.display_name.toLowerCase();
return includes.includes(actSub);
});
} else if (excludes.length > 0) {
listSlice = listSlice.filter(x => {
const actSub = x.subreddit.display_name.toLowerCase();
return !excludes.includes(actSub);
});
}
if(!keepRemoved) {
// snoowrap typings think 'removed' property does not exist on submission
// @ts-ignore
listSlice = listSlice.filter(x => !activityIsRemoved(x));
}
if (satisfiedCount !== undefined && items.length + listSlice.length >= satisfiedCount) {
// satisfied count
if(satisfy === 'any') {
items = items.concat(listSlice).slice(0, satisfiedCount);
break;
}
countOk = true;
}
let truncatedItems: Array<Submission | Comment> = [];
if(satisfiedEndtime !== undefined) {
truncatedItems = listSlice.filter((x) => {
const utc = x.created_utc * 1000;
const itemDate = dayjs(utc);
// @ts-ignore
return satisfiedEndtime.isBefore(itemDate);
});
if (truncatedItems.length !== listSlice.length) {
if(satisfy === 'any') {
// satisfied duration
items = items.concat(truncatedItems);
break;
}
timeOk = true;
}
}
if (!hitEnd) {
hitEnd = listing.isFinished;
// if we've satisfied everything take whichever is bigger
if(satisfy === 'all' && countOk && timeOk) {
if(satisfiedCount as number > items.length + truncatedItems.length) {
items = items.concat(listSlice).slice(0, satisfiedCount);
} else {
items = items.concat(truncatedItems);
}
break;
}
// if we got this far neither count nor time was satisfied (or both) so just add all items from listing and fetch more if possible
items = items.concat(listSlice);
hitEnd = listing.isFinished;
if (!hitEnd) {
listing.fetchMore({amount: chunkSize});
offset += chunkSize;
listing = await listing.fetchMore({amount: chunkSize});
}
}
return Promise.resolve(items);
@@ -83,77 +217,478 @@ export const getAuthorSubmissions = async (user: RedditUser, options: AuthorActi
return await getAuthorActivities(user, {...options, type: 'submission'}) as unknown as Promise<Submission[]>;
}
export const renderContent = async (content: string, data: (Submission | Comment), additionalData = {}) => {
export const renderContent = async (template: string, data: (Submission | Comment), ruleResults: RuleResult[] = [], usernotes: UserNotes) => {
const templateData: any = {
kind: data instanceof Submission ? 'submission' : 'comment',
author: await data.author.name,
// make this a getter so that if we don't load notes (and api call) if we don't need to
// didn't work either for some reason
// tried to get too fancy :(
// get notes() {
// return usernotes.getUserNotes(data.author).then((notesData) => {
// // return usable notes data with some stats
// const current = notesData.length > 0 ? notesData[notesData.length -1] : undefined;
// // group by type
// const grouped = notesData.reduce((acc: any, x) => {
// const {[x.noteType]: nt = []} = acc;
// return Object.assign(acc, {[x.noteType]: nt.concat(x)});
// }, {});
// return {
// data: notesData,
// current,
// ...grouped,
// };
// });
// },
// when i was trying to use mustache-async (didn't work)
// notes: async () => {
// const notesData = await usernotes.getUserNotes(data.author);
// // return usable notes data with some stats
// const current = notesData.length > 0 ? notesData[notesData.length -1] : undefined;
// // group by type
// const grouped = notesData.reduce((acc: any, x) => {
// const {[x.noteType]: nt = []} = acc;
// return Object.assign(acc, {[x.noteType]: nt.concat(x)});
// }, {});
// return {
// data: notesData,
// current,
// ...grouped,
// };
// },
permalink: data.permalink,
botLink: BOT_LINK,
}
if(template.includes('{{item.notes')) {
// we need to get notes
const notesData = await usernotes.getUserNotes(data.author);
// return usable notes data with some stats
const current = notesData.length > 0 ? notesData[notesData.length -1] : undefined;
// group by type
const grouped = notesData.reduce((acc: any, x) => {
const {[x.noteType]: nt = []} = acc;
return Object.assign(acc, {[x.noteType]: nt.concat(x)});
}, {});
templateData.notes = {
data: notesData,
current,
...grouped,
};
}
if (data instanceof Submission) {
templateData.url = data.url;
templateData.title = data.title;
}
// normalize rule names and map context data
// NOTE: we are relying on users to use unique names for rules. If they don't only the last rule run of kind X will have its results here
const normalizedRuleResults = ruleResults.reduce((acc: object, ruleResult) => {
const {
name, triggered,
data = {},
result,
premise: {
kind
}
} = ruleResult;
// remove all non-alphanumeric characters (spaces, dashes, underscore) and set to lowercase
// we will set this as the rule property name to make it easy to access results from mustache template
const normalName = normalizeName(name);
return {
...acc, [normalName]: {
kind,
triggered,
result,
...data,
}
};
}, {});
return Mustache.render(content, {...templateData, ...additionalData});
const view = {item: templateData, rules: normalizedRuleResults};
const rendered = Mustache.render(template, view) as string;
return he.decode(rendered);
}
export const testAuthorCriteria = async (item: (Comment|Submission), authorOpts: AuthorCriteria, include = true) => {
export const testAuthorCriteria = async (item: (Comment | Submission), authorOpts: AuthorCriteria, include = true, userNotes: UserNotes) => {
// @ts-ignore
const author: RedditUser = await item.author;
for(const k of Object.keys(authorOpts)) {
switch(k) {
case 'name':
const authPass = () => {
// @ts-ignore
for (const n of authorOpts[k]) {
if (n.toLowerCase() === author.name.toLowerCase()) {
return true;
}
}
return false;
}
if((include && !authPass) || (!include && authPass)) {
return false;
}
break;
case 'flairCssClass':
const css = await item.author_flair_css_class;
const cssPass = () => {
// @ts-ignore
for(const c of authorOpts[k]) {
if(c === css) {
return;
for (const k of Object.keys(authorOpts)) {
// @ts-ignore
if (authorOpts[k] !== undefined) {
switch (k) {
case 'name':
const authPass = () => {
// @ts-ignore
for (const n of authorOpts[k]) {
if (n.toLowerCase() === author.name.toLowerCase()) {
return true;
}
}
return false;
}
return false;
}
if((include && !cssPass) || (!include && cssPass)) {
return false;
}
break;
case 'flairText':
const text = await item.author_flair_text;
const textPass = () => {
// @ts-ignore
for(const c of authorOpts[k]) {
if(c === text) {
return
const authResult = authPass();
if ((include && !authResult) || (!include && authResult)) {
return false;
}
break;
case 'flairCssClass':
const css = await item.author_flair_css_class;
const cssPass = () => {
// @ts-ignore
for (const c of authorOpts[k]) {
if (c === css) {
return;
}
}
return false;
}
return false;
}
if((include && !textPass) || (!include && textPass)) {
return false;
}
break;
case 'isMod':
const mods: RedditUser[] = await item.subreddit.getModerators();
const isModerator = mods.some(x => x.name === item.author.name);
const modMatch = authorOpts.isMod === isModerator;
if((include && !modMatch) || (!include && !modMatch)) {
return false;
}
const cssResult = cssPass();
if ((include && !cssResult) || (!include && cssResult)) {
return false;
}
break;
case 'flairText':
const text = await item.author_flair_text;
const textPass = () => {
// @ts-ignore
for (const c of authorOpts[k]) {
if (c === text) {
return
}
}
return false;
};
const textResult = textPass();
if ((include && !textResult) || (!include && textResult)) {
return false;
}
break;
case 'isMod':
const mods: RedditUser[] = await item.subreddit.getModerators();
const isModerator = mods.some(x => x.name === item.author.name);
const modMatch = authorOpts.isMod === isModerator;
if ((include && !modMatch) || (!include && modMatch)) {
return false;
}
break;
case 'age':
const ageTest = compareDurationValue(parseDurationComparison(await authorOpts.age as string), dayjs.unix(await item.author.created));
if ((include && !ageTest) || (!include && ageTest)) {
return false;
}
break;
case 'linkKarma':
const lkCompare = parseGenericValueOrPercentComparison(await authorOpts.linkKarma as string);
let lkMatch;
if(lkCompare.isPercent) {
// @ts-ignore
const tk = author.total_karma as number;
lkMatch = comparisonTextOp(author.link_karma / tk, lkCompare.operator, lkCompare.value/100);
} else {
lkMatch = comparisonTextOp(author.link_karma, lkCompare.operator, lkCompare.value);
}
if ((include && !lkMatch) || (!include && lkMatch)) {
return false;
}
break;
case 'commentKarma':
const ckCompare = parseGenericValueOrPercentComparison(await authorOpts.commentKarma as string);
let ckMatch;
if(ckCompare.isPercent) {
// @ts-ignore
const ck = author.total_karma as number;
ckMatch = comparisonTextOp(author.comment_karma / ck, ckCompare.operator, ckCompare.value/100);
} else {
ckMatch = comparisonTextOp(author.comment_karma, ckCompare.operator, ckCompare.value);
}
if ((include && !ckMatch) || (!include && ckMatch)) {
return false;
}
break;
case 'totalKarma':
const tkCompare = parseGenericValueComparison(await authorOpts.totalKarma as string);
if(tkCompare.isPercent) {
throw new SimpleError(`'totalKarma' value on AuthorCriteria cannot be a percentage`);
}
// @ts-ignore
const totalKarma = author.total_karma as number;
const tkMatch = comparisonTextOp(totalKarma, tkCompare.operator, tkCompare.value);
if ((include && !tkMatch) || (!include && tkMatch)) {
return false;
}
break;
case 'verified':
const vMatch = await author.has_verified_mail === authorOpts.verified as boolean;
if ((include && !vMatch) || (!include && vMatch)) {
return false;
}
break;
case 'userNotes':
const notes = await userNotes.getUserNotes(item.author);
const notePass = () => {
for (const noteCriteria of authorOpts[k] as UserNoteCriteria[]) {
const {count = '>= 1', search = 'current', type} = noteCriteria;
const {value, operator, isPercent, extra = ''} = parseGenericValueOrPercentComparison(count);
const order = extra.includes('asc') ? 'ascending' : 'descending';
switch (search) {
case 'current':
if (notes.length > 0 && notes[notes.length - 1].noteType === type) {
return true;
}
break;
case 'consecutive':
let orderedNotes = notes;
if (order === 'descending') {
orderedNotes = [...notes];
orderedNotes.reverse();
}
let currCount = 0;
for (const note of orderedNotes) {
if (note.noteType === type) {
currCount++;
} else {
currCount = 0;
}
if(isPercent) {
throw new SimpleError(`When comparing UserNotes with 'consecutive' search 'count' cannot be a percentage. Given: ${count}`);
}
if (comparisonTextOp(currCount, operator, value)) {
return true;
}
}
break;
case 'total':
if(isPercent) {
if(comparisonTextOp(notes.filter(x => x.noteType === type).length / notes.length, operator, value/100)) {
return true;
}
} else if(comparisonTextOp(notes.filter(x => x.noteType === type).length, operator, value)) {
return true;
}
}
}
return false;
}
const noteResult = notePass();
if ((include && !noteResult) || (!include && noteResult)) {
return false;
}
break;
}
}
}
return true;
}
export interface ItemContent {
submissionTitle: string,
content: string,
author: string,
permalink: string,
}
export const itemContentPeek = async (item: (Comment | Submission), peekLength = 200): Promise<[string, ItemContent]> => {
const truncatePeek = truncateStringToLength(peekLength);
let content = '';
let submissionTitle = '';
let peek = '';
const author = item.author.name;
if (item instanceof Submission) {
submissionTitle = item.title;
peek = `${truncatePeek(item.title)} by ${author} https://reddit.com${item.permalink}`;
} else if (item instanceof Comment) {
content = truncatePeek(item.body);
peek = `${truncatePeek(content)} by ${author} in https://reddit.com${item.permalink}`;
}
return [peek, {submissionTitle, content, author, permalink: item.permalink}];
}
const SPOTIFY_PODCAST_AUTHOR_REGEX: RegExp = /this episode from (?<author>.*?) on Spotify./;
const SPOTIFY_PODCAST_AUTHOR_REGEX_URL = 'https://regexr.com/61c2f';
const SPOTIFY_MUSIC_AUTHOR_REGEX: RegExp = /Listen to .*? on Spotify.\s(?<author>.+?)\s·\s(?<mediaType>.+?)\s/;
const SPOTIFY_MUSIC_AUTHOR_REGEX_URL = 'https://regexr.com/61c2r';
const ANCHOR_AUTHOR_REGEX: RegExp = /by (?<author>.+?)$/;
const ANCHOR_AUTHOR_REGEX_URL = 'https://regexr.com/61c31';
export const getAttributionIdentifier = (sub: Submission, useParentMediaDomain = false): DomainInfo => {
let domain: string = '';
let displayDomain: string = '';
let domainIdents: string[] = useParentMediaDomain ? [sub.domain] : [];
let provider: string | undefined;
let mediaType: string | undefined;
if (!useParentMediaDomain && sub.secure_media?.oembed !== undefined) {
const {
author_url,
author_name,
description,
provider_name,
} = sub.secure_media?.oembed;
switch(provider_name) {
case 'Spotify':
if(description !== undefined) {
let match = description.match(SPOTIFY_PODCAST_AUTHOR_REGEX);
if(match !== null) {
const {author} = match.groups as any;
displayDomain = author;
domainIdents.push(author);
mediaType = 'Podcast';
} else {
match = description.match(SPOTIFY_MUSIC_AUTHOR_REGEX);
if(match !== null) {
const {author, mediaType: mt} = match.groups as any;
displayDomain = author;
domainIdents.push(author);
mediaType = mt.toLowerCase();
}
}
}
break;
case 'Anchor FM Inc.':
if(author_name !== undefined) {
let match = author_name.match(ANCHOR_AUTHOR_REGEX);
if(match !== null) {
const {author} = match.groups as any;
displayDomain = author;
domainIdents.push(author);
mediaType = 'podcast';
}
}
break;
case 'YouTube':
mediaType = 'Video/Audio';
break;
default:
// nah
}
// handles yt, vimeo, twitter fine
if(displayDomain === '') {
if (author_name !== undefined) {
domainIdents.push(author_name);
if (displayDomain === '') {
displayDomain = author_name;
}
}
if (author_url !== undefined) {
domainIdents.push(author_url);
domain = author_url;
if (displayDomain === '') {
displayDomain = author_url;
}
}
}
if(displayDomain === '') {
// we have media but could not parse stuff for some reason just use url
const u = new URL(sub.url);
displayDomain = u.pathname;
domainIdents.push(u.pathname);
}
provider = provider_name;
} else if(sub.secure_media?.type !== undefined) {
domainIdents.push(sub.secure_media?.type);
domain = sub.secure_media?.type;
} else {
domain = sub.domain;
}
if(domain === '') {
domain = sub.domain;
}
if (displayDomain === '') {
displayDomain = domain;
}
return {display: displayDomain, domain, aliases: domainIdents, provider, mediaType};
}
export const isItem = (item: Submission | Comment, stateCriteria: TypedActivityStates, logger: Logger): [boolean, SubmissionState|CommentState|undefined] => {
if (stateCriteria.length === 0) {
return [true, undefined];
}
const log = logger.child({leaf: 'Item Check'});
for (const crit of stateCriteria) {
const [pass, passCrit] = (() => {
for (const k of Object.keys(crit)) {
// @ts-ignore
if (crit[k] !== undefined) {
switch(k) {
case 'removed':
const removed = activityIsRemoved(item);
if (removed !== crit['removed']) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${removed}`)
return [false, crit];
}
break;
case 'deleted':
const deleted = activityIsDeleted(item);
if (deleted !== crit['deleted']) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${deleted}`)
return [false, crit];
}
break;
case 'filtered':
const filtered = activityIsFiltered(item);
if (filtered !== crit['filtered']) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${filtered}`)
return [false, crit];
}
break;
default:
// @ts-ignore
if (item[k] !== undefined) {
// @ts-ignore
if (item[k] !== crit[k]) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${item[k]}`)
return [false, crit];
}
} else {
log.warn(`Tried to test for Item property '${k}' but it did not exist`);
}
break;
}
}
}
log.debug(`Passed: ${JSON.stringify(crit)}`);
return [true, crit];
})() as [boolean, SubmissionState|CommentState|undefined];
if (pass) {
return [true, passCrit];
}
}
return [false, undefined];
}
export const activityIsRemoved = (item: Submission|Comment): boolean => {
if(item instanceof Submission) {
// when automod filters a post it gets this category
return item.banned_at_utc !== null && item.removed_by_category !== 'automod_filtered';
}
// when automod filters a comment item.removed === false
// so if we want to processing filtered comments we need to check for this
return item.banned_at_utc !== null && item.removed;
}
export const activityIsFiltered = (item: Submission|Comment): boolean => {
if(item instanceof Submission) {
// when automod filters a post it gets this category
return item.banned_at_utc !== null && item.removed_by_category === 'automod_filtered';
}
// when automod filters a comment item.removed === false
// so if we want to processing filtered comments we need to check for this
return item.banned_at_utc !== null && !item.removed;
}
export const activityIsDeleted = (item: Submission|Comment): boolean => {
if(item instanceof Submission) {
return item.removed_by_category === 'deleted';
}
return item.author.name === '[deleted]'
}

View File

@@ -0,0 +1,55 @@
import {labelledFormat, logLevels} from "../util";
import winston, {Logger} from "winston";
const {transports} = winston;
export const getLogger = (options: any, name = 'default'): Logger => {
if(!winston.loggers.has(name)) {
const {
path,
level,
additionalTransports = [],
} = options || {};
const consoleTransport = new transports.Console();
const myTransports = [
consoleTransport,
];
let errorTransports = [consoleTransport];
for (const a of additionalTransports) {
myTransports.push(a);
errorTransports.push(a);
}
if (path !== undefined && path !== '') {
const rotateTransport = new winston.transports.DailyRotateFile({
dirname: path,
createSymlink: true,
symlinkName: 'contextBot-current.log',
filename: 'contextBot-%DATE%.log',
datePattern: 'YYYY-MM-DD',
maxSize: '5m'
});
// @ts-ignore
myTransports.push(rotateTransport);
// @ts-ignore
errorTransports.push(rotateTransport);
}
const loggerOptions = {
level: level || 'info',
format: labelledFormat(),
transports: myTransports,
levels: logLevels,
exceptionHandlers: errorTransports,
rejectionHandlers: errorTransports,
};
winston.loggers.add(name, loggerOptions);
}
return winston.loggers.get(name);
}

View File

@@ -1,158 +1,201 @@
import snoowrap from "snoowrap";
import minimist from 'minimist';
import winston from 'winston';
import 'winston-daily-rotate-file';
import dayjs from 'dayjs';
import utc from 'dayjs/plugin/utc.js';
import dduration from 'dayjs/plugin/duration.js';
import {labelledFormat} from "./util";
import EventEmitter from "events";
import relTime from 'dayjs/plugin/relativeTime.js';
import sameafter from 'dayjs/plugin/isSameOrAfter.js';
import samebefore from 'dayjs/plugin/isSameOrBefore.js';
import {Manager} from "./Subreddit/Manager";
import pEvent from "p-event";
import {Command, Argument} from 'commander';
import {
addOptions,
checks,
getUniversalCLIOptions,
getUniversalWebOptions,
operatorConfig
} from "./Utils/CommandConfig";
import {App} from "./App";
import createWebServer from './Server/server';
import createHelperServer from './Server/helper';
import Submission from "snoowrap/dist/objects/Submission";
import {COMMENT_URL_ID, parseLinkIdentifier, SUBMISSION_URL_ID} from "./util";
import LoggedError from "./Utils/LoggedError";
import {getLogger} from "./Utils/loggerFactory";
import {buildOperatorConfigWithDefaults, parseOperatorConfigFromSources} from "./ConfigBuilder";
dayjs.extend(utc);
dayjs.extend(dduration);
dayjs.extend(relTime);
dayjs.extend(sameafter);
dayjs.extend(samebefore);
const {transports} = winston;
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);
const argv = minimist(process.argv.slice(2));
const {
_: subredditsArgs = [],
clientId = process.env.CLIENT_ID,
clientSecret = process.env.CLIENT_SECRET,
accessToken = process.env.ACCESS_TOKEN,
refreshToken = process.env.REFRESH_TOKEN,
logDir = process.env.LOG_DIR,
logLevel = process.env.LOG_LEVEL,
} = argv;
const preRunCmd = new Command();
preRunCmd.addOption(operatorConfig);
preRunCmd.allowUnknownOption();
const logPath = logDir ?? `${process.cwd()}/logs`;
const program = new Command();
// @ts-ignore
const rotateTransport = new winston.transports.DailyRotateFile({
dirname: logPath,
createSymlink: true,
symlinkName: 'contextBot-current.log',
filename: 'contextBot-%DATE%.log',
datePattern: 'YYYY-MM-DD',
maxSize: '5m'
});
const consoleTransport = new transports.Console();
const myTransports = [
consoleTransport,
];
if (typeof logPath === 'string') {
// @ts-ignore
myTransports.push(rotateTransport);
}
const loggerOptions = {
level: logLevel || 'info',
format: labelledFormat(),
transports: myTransports,
};
winston.loggers.add('default', loggerOptions);
const logger = winston.loggers.get('default');
const version = process.env.VERSION || 'dev';
let subredditsArg = subredditsArgs;
if (subredditsArg.length === 0) {
// try to get from comma delim env variable
const subenv = process.env.SUBREDDITS;
if (typeof subenv === 'string') {
subredditsArg = subenv.split(',');
}
}
(async function () {
const creds = {
userAgent: `web:contextBot:${version}`,
clientId,
clientSecret,
refreshToken,
accessToken,
};
try {
const client = new snoowrap(creds);
client.config({warnings: true, retryErrorCodes: [500], maxRetryAttempts: 2, debug: logLevel === 'debug'});
//const me = await client.getMe().name;
// determine which subreddits this account has appropriate access to
let availSubs = [];
for (const sub of await client.getModeratedSubreddits()) {
// TODO don't know a way to check permissions yet
availSubs.push(sub);
// if(sub.user_is_moderator) {
// const modUser = sub.getModerators().find(x => x.name === myName);
// const canMod = modUser.features
// }
}
let subsToRun = [];
// if user specified subs to run on check they are all subs client can mod
if (subredditsArgs.length > 0) {
for (const sub of subredditsArg) {
const asub = availSubs.find(x => x.name.toLowerCase() === sub.trim().toLowerCase())
if (asub === undefined) {
logger.error(`Will not run on ${sub} because is not modded by, or does not have appropriate permissions to mod with, for this client.`);
let runCommand = program
.command('run')
.addArgument(new Argument('[interface]', 'Which interface to start the bot with').choices(['web', 'cli']).default(undefined, 'process.env.WEB || true'))
.description('Monitor new activities from configured subreddits.')
.allowUnknownOption();
runCommand = addOptions(runCommand, getUniversalWebOptions());
runCommand.action(async (interfaceVal, opts) => {
const config = buildOperatorConfigWithDefaults(await parseOperatorConfigFromSources({...opts, web: interfaceVal !== undefined ? interfaceVal === 'web': undefined}));
const {
credentials: {
redirectUri,
clientId,
clientSecret,
accessToken,
refreshToken,
},
web: {
enabled: web,
},
logging,
} = config;
const logger = getLogger(logging, 'init');
const hasClient = clientId !== undefined && clientSecret !== undefined;
const hasNoTokens = accessToken === undefined && refreshToken === undefined;
try {
if (web) {
if (hasClient && hasNoTokens) {
// run web helper
const server = createHelperServer(config);
await server;
} else {
if (redirectUri === undefined) {
logger.warn(`No 'redirectUri' found in arg/env. Bot will still run but web interface will not be accessible.`);
}
const server = createWebServer(config);
await server;
}
} else {
subsToRun.push(asub);
const app = new App(config);
await app.buildManagers();
await app.runManagers();
}
} catch (err) {
throw err;
}
});
let checkCommand = program
.command('check <activityIdentifier> [type]')
.allowUnknownOption()
.description('Run check(s) on a specific activity', {
activityIdentifier: 'Either a permalink URL or the ID of the Comment or Submission',
type: `If activityIdentifier is not a permalink URL then the type of activity ('comment' or 'submission'). May also specify 'submission' type when using a permalink to a comment to get the Submission`,
});
checkCommand = addOptions(checkCommand, getUniversalCLIOptions());
checkCommand
.addOption(checks)
.action(async (activityIdentifier, type, commandOptions = {}) => {
const config = buildOperatorConfigWithDefaults(await parseOperatorConfigFromSources(commandOptions));
const {checks = []} = commandOptions;
const app = new App(config);
let a;
const commentId = commentReg(activityIdentifier);
if (commentId !== undefined) {
if (type !== 'submission') {
// @ts-ignore
a = await app.client.getComment(commentId);
} else {
// @ts-ignore
a = await app.client.getSubmission(submissionReg(activityIdentifier) as string);
}
}
if (a === undefined) {
const submissionId = submissionReg(activityIdentifier);
if (submissionId !== undefined) {
if (type === 'comment') {
throw new Error(`Detected URL was for a submission but type was 'comment', cannot get activity`);
} else {
// @ts-ignore
a = await app.client.getSubmission(submissionId);
}
}
}
if (a === undefined) {
// if we get this far then probably not a URL
if (type === undefined) {
throw new Error(`activityIdentifier was not a valid Reddit URL and type was not specified`);
}
if (type === 'comment') {
// @ts-ignore
a = await app.client.getComment(activityIdentifier);
} else {
// @ts-ignore
a = await app.client.getSubmission(activityIdentifier);
}
}
// @ts-ignore
const activity = await a.fetch();
const sub = await activity.subreddit.display_name;
await app.buildManagers([sub]);
if (app.subManagers.length > 0) {
const manager = app.subManagers.find(x => x.subreddit.display_name === sub) as Manager;
await manager.runChecks(type === 'comment' ? 'Comment' : 'Submission', activity, {checkNames: checks});
}
});
let unmodCommand = program.command('unmoderated <subreddits...>')
.description('Run checks on all unmoderated activity in the modqueue', {
subreddits: 'The list of subreddits to run on. If not specified will run on all subreddits the account has moderation access to.'
})
.allowUnknownOption();
unmodCommand = addOptions(unmodCommand, getUniversalCLIOptions());
unmodCommand
.addOption(checks)
.action(async (subreddits = [], opts = {}) => {
const config = buildOperatorConfigWithDefaults(await parseOperatorConfigFromSources(opts));
const {checks = []} = opts;
const {subreddits: {names}} = config;
const app = new App(config);
await app.buildManagers(names);
for (const manager of app.subManagers) {
const activities = await manager.subreddit.getUnmoderated();
for (const a of activities.reverse()) {
manager.queue.push({
checkType: a instanceof Submission ? 'Submission' : 'Comment',
activity: a,
options: {checkNames: checks}
});
}
}
});
await program.parseAsync();
} catch (err) {
if (!err.logged && !(err instanceof LoggedError)) {
const logger = winston.loggers.get('default');
if (err.name === 'StatusCodeError' && err.response !== undefined) {
const authHeader = err.response.headers['www-authenticate'];
if (authHeader !== undefined && authHeader.includes('insufficient_scope')) {
logger.error('Reddit responded with a 403 insufficient_scope, did you choose the correct scopes?');
}
}
} else {
// otherwise assume all moddable subs from client should be run on
subsToRun = availSubs;
console.log(err);
}
let subSchedule: Manager[] = [];
// get configs for subs we want to run on and build/validate them
for (const sub of subsToRun) {
let content = undefined;
let json = undefined;
try {
const wiki = sub.getWikiPage('contextbot');
content = await wiki.content_md;
} catch (err) {
logger.error(`Could not read wiki configuration for ${sub.display_name}. Please ensure the page 'contextbot' exists and is readable -- error: ${err.message}`);
continue;
}
try {
json = JSON.parse(content);
} catch (err) {
logger.error(`Wiki page contents for ${sub.display_name} was not valid -- error: ${err.message}`);
continue;
}
try {
subSchedule.push(new Manager(sub, client, logger, json));
} catch (err) {
logger.error(`Config for ${sub.display_name} was not valid, will not run for this subreddit`);
}
}
const emitter = new EventEmitter();
for (const manager of subSchedule) {
manager.handle();
}
// never hits so we can run indefinitely
await pEvent(emitter, 'end');
} catch (err) {
if(err.name === 'StatusCodeError' && err.response !== undefined) {
const authHeader = err.response.headers['www-authenticate'];
if(authHeader !== undefined && authHeader.includes('insufficient_scope')) {
logger.error('Reddit responded with a 403 insufficient_scope, did you choose the correct scopes?');
}
}
debugger;
console.log(err);
process.kill(process.pid, 'SIGTERM');
}
}());
export {Author} from "./Author/Author";
export {AuthorCriteria} from "./Author/Author";
export {AuthorOptions} from "./Author/Author";

Some files were not shown because too many files have changed in this diff Show More