Compare commits

..

89 Commits
0.4.2 ... 0.5.0

Author SHA1 Message Date
FoxxMD
fb3047ca82 Remove error wording (may be normal shutdown) 2021-07-27 13:08:56 -04:00
FoxxMD
193ecfba2f Fix truthy condition for notifyOnTrigger 2021-07-27 12:56:53 -04:00
FoxxMD
ef3475e519 Add a blank notification manager for initial manager creation 2021-07-27 12:42:31 -04:00
FoxxMD
a003e18360 Add log statement when sending notification 2021-07-27 12:33:27 -04:00
FoxxMD
6b6124d76e Implement event hooks for notifications
* Implemented a generic notification manager with several event types
* Implemented discord notification provider
2021-07-27 12:24:36 -04:00
FoxxMD
e4f18e8f06 Fix cache call request rate when over 10 minutes 2021-07-26 18:45:53 -04:00
FoxxMD
24963ec333 More op doc fixes 2021-07-26 17:10:12 -04:00
FoxxMD
2ab6ee3715 Typo fix 2021-07-26 17:07:07 -04:00
FoxxMD
802884f686 Add operator configuration documentation 2021-07-26 17:06:02 -04:00
FoxxMD
67ed8ab4ee Add more caching info and tooltips for major components 2021-07-26 14:47:58 -04:00
FoxxMD
1e6d61ac31 Add 401 http status to retry handler
Seen this occasionally on polling and is actually a reddit issue, not client, so should be able to retry on it
2021-07-26 12:20:10 -04:00
FoxxMD
7cda47183f Simplify cli options and improve web server checks
* Check for in-use web port before starting so we can display a useful error message
* Remove 'web' command and instead make it an optional argument for `run` (default to web)
* Update dockerfile to use run command
* Remove remaining commander option defaults since they are defined in config builder now
2021-07-26 12:18:51 -04:00
FoxxMD
a9edd4b998 Fix miss percent formatting 2021-07-26 09:55:03 -04:00
FoxxMD
9e1d5f1dd4 Ui polish
* Add more caching information
* Add more visual distinction for selected subreddit
* Add querystring based on shown subreddit so view persists between reload
2021-07-23 17:03:30 -04:00
FoxxMD
4617e06459 Fix missing await on usernote cache call 2021-07-23 16:44:21 -04:00
FoxxMD
b2b4988246 Consolidate all caching
* Use cache manager instance in UserNotes and callback to report stats
* Simplify sub resource config interface and setter
* Use cache manager for express session
* Add session cache provider to operator config
2021-07-23 14:48:33 -04:00
FoxxMD
e1c24133dd Use name instead of user id for banning 2021-07-23 13:26:53 -04:00
FoxxMD
12a4e0436e Simplify resource stats generation 2021-07-23 11:46:37 -04:00
FoxxMD
484931d8b5 Fix bool middleware running on wrong route 2021-07-23 11:10:53 -04:00
FoxxMD
abf2674f80 Detect noisy html errors from reddit 2021-07-22 21:37:43 -04:00
FoxxMD
1f3cfbeef9 Fix cache miss value comparison 2021-07-22 21:37:08 -04:00
FoxxMD
2b21885a55 Fix default cache stuff when subreddit manager isn't built 2021-07-22 17:59:45 -04:00
FoxxMD
232925e691 Maybe fix error on ban 2021-07-22 17:53:47 -04:00
FoxxMD
a91b9ab146 Use more robust caching implementation
* use node-cache-manager so operator has a choice of memory or redis
* BC update TTL values to be in seconds instead of milliseconds
* Count requests and misses for cache
* display cache stats in ui
2021-07-22 17:47:19 -04:00
FoxxMD
73c3052c69 Refactor how configuration is parsed
* Implement a weak/strong interface for operator (app) config
* Consolidate config parsing code into ConfigBuilder
* Implement parsing configuration from a json file
* Refactor configuration parsing so there is a clear hierarchy to where and how config is overridden
2021-07-22 15:17:41 -04:00
FoxxMD
4fbb3edf8b Lots of regex fixes 2021-07-21 16:50:49 -04:00
FoxxMD
c69d66c001 Initial implementation of Regex Rule 2021-07-21 15:47:26 -04:00
FoxxMD
9b53974152 Fix formatting for rolling average 2021-07-21 12:14:28 -04:00
FoxxMD
13d3ed2314 Improve repeat pattern detection for repeat activity rule
* Fuzzy match on all identifiers to enable detecting small variations in title/body
* For reddit media submissions (image/video) check title instead of url
* For submissions with external url also check title
2021-07-21 12:09:51 -04:00
FoxxMD
9d7505fa38 More UI improvements
* Move theme toggle to title to make room in the subreddit tabs when there are many, many subreddits
* Implement check permalink with subreddit context -- can run any permalink on a subreddit's config
2021-07-21 12:07:37 -04:00
FoxxMD
1b94316987 Fix log level for api nanny 2021-07-20 20:30:06 -04:00
FoxxMD
9316019b01 Implement api nanny mode to help with heavy api usage 2021-07-20 20:15:15 -04:00
FoxxMD
4642f67104 Implement loading operator arguments (main program) from file 2021-07-20 16:38:35 -04:00
FoxxMD
a78692d7a0 Even more polish 2021-07-20 13:11:37 -04:00
FoxxMD
3627a5f60a More UI polish 2021-07-20 12:48:32 -04:00
FoxxMD
6b04ea0a9d Compact stats/info 2021-07-20 00:57:47 -04:00
FoxxMD
f6217547ae Some ui cleanup 2021-07-20 00:23:48 -04:00
FoxxMD
f1b24eb4a2 Add unmoderated/modqueue check actions to ui 2021-07-19 23:16:27 -04:00
FoxxMD
c9bdae66dd Implement configurable shared modqueue for operator convenience
If operator is running subreddits for lop-sided traffic burdens then /r/mod may be over the 100 activity limit, all from subreddit X while sub Y and Z have few activities. In this scenario Y and Z would never run since X would take up all results. Therefore default to individual modqueues and make shared modqueue opt-in by operator
2021-07-19 23:04:47 -04:00
FoxxMD
6ab162888b try-catch all the things 2021-07-19 22:37:50 -04:00
FoxxMD
dd202ac790 Add console to error/rejection transports 2021-07-19 22:37:39 -04:00
FoxxMD
eead88c9a7 Remove secondary client usage which was causing bug
For some reason using client *again* here was causing the primary client usage to lose response data. I think perhaps it needs to be cloned or need to create a new instance all-together rather than trying to re-use.
2021-07-19 22:37:22 -04:00
FoxxMD
f548be8060 Fix "All" actions type 2021-07-19 16:44:03 -04:00
FoxxMD
5df4fd5ccc Separate event polling and activity processing systems to improve robustness
* Use async/queue for handling activity processing on each manager
* Refactor polling to push activities into activity queue
* Refactor manager state to control activity processing queue and event polling independently
* Pause activity processing and wait until no work is being done to do config update
* Add way more logging for new systems
* Add basic ui controls/view for new systems
2021-07-19 16:32:41 -04:00
FoxxMD
b25001b7af Reduce false positives for repeat self/comment activities
* For self submissions use "title + body" to identify repeats instead of just body -- body could potentially be empty
* Add "minWordCount" with default of 1 so that blank comments are ignored
2021-07-18 21:30:44 -04:00
FoxxMD
8733717cda Remove submission title on comment item peek
Erroneously returning proxy object instead of actual submission. Dont' really need the title since its in the permalink so remove it entirely to avoid another api call
2021-07-18 21:05:56 -04:00
FoxxMD
6167d45e49 Refactor configuration loading to be independent of manager initialization
* Makes it easier to deal with never-successful subs (from startup) and invalid configs during run
* Paves the way for adding managers in-situ
* Add validConfig property to manager to track this and in UI
* Track if user manually stopped manager so we don't try to restart on heartbeat
2021-07-16 16:57:25 -04:00
FoxxMD
694842226b Cover case where user declines oauth 2021-07-16 15:35:25 -04:00
FoxxMD
02ad661226 Cover case where outside user logs in 2021-07-16 15:18:38 -04:00
FoxxMD
3be62f5560 Use partials for rendering head/title 2021-07-16 15:18:01 -04:00
FoxxMD
eb84df5133 Set dark theme as default 2021-07-16 12:12:17 -04:00
FoxxMD
4b11e348ba Update some logging to be more subreddit specific 2021-07-16 12:08:19 -04:00
FoxxMD
9edacf29fa Refactor log tracking to handle limit's per suberddit
Makes the 'limit' option in UI more true -- keep track of $max number of logs per subreddit and then combine at render time
2021-07-16 11:50:34 -04:00
FoxxMD
56c13474d9 Cover edge case where logs don't exist for a subreddit 2021-07-16 10:15:47 -04:00
FoxxMD
66a4144b7b retry on all stream types 2021-07-16 10:11:17 -04:00
FoxxMD
16880775fb Increase max polling retries
If reddit has a general outage this is where we want to wait the longest before giving up entirely since polling is infrequent (relative to running checks)
2021-07-16 10:11:03 -04:00
FoxxMD
d69d0e972c End stream when error occurs
Stream should also stop polling. It's up to the retry handler and individual usage of stream in app/manager to restart it
2021-07-16 10:10:18 -04:00
FoxxMD
a9350c2828 Fix retry handler and broaden request conditions
* fix other retry compare statement
* check request for status code missing (timeout, socket timeout, address unavailable, etc.) or valid status code
* clarify wording in retry handler logging
2021-07-16 10:09:38 -04:00
FoxxMD
2fe06f21d9 Fix stats margin 2021-07-15 23:04:09 -04:00
FoxxMD
42d71a918f Implement URL checker 2021-07-15 22:58:20 -04:00
FoxxMD
0aa2b24f39 Implement actions 2021-07-15 20:22:27 -04:00
FoxxMD
4771efa32a Add status indicator to subreddit tab name 2021-07-15 20:01:07 -04:00
FoxxMD
1d9f4f32b8 Implement logout and reduce footprint of light dark toggle 2021-07-15 19:54:12 -04:00
FoxxMD
d84e6f1905 Implement oauth helper ui 2021-07-15 19:34:01 -04:00
FoxxMD
ae19d1c9a1 Fix log check 2021-07-15 17:10:31 -04:00
FoxxMD
f9c7cf433e Switch to web as default run command and provide fallback
* Use 'web' as run command for dockerfile
* If on 'web' there is no redirect uri fallback to cli (doesn't break compatibility this way)
2021-07-15 15:44:47 -04:00
FoxxMD
2917233728 More startup logging and better logging on invalid/missing credentials 2021-07-15 15:31:56 -04:00
FoxxMD
6dfb5823ba Add web ui port as configurable 2021-07-15 15:02:41 -04:00
FoxxMD
14e7275f64 Implement operator-specific UI components
* Add api limit, reset, and heartbeat interval to All overview when operator is viewing
* Stream op stats on log emit
* Add env/arg for setting "Operated by"
2021-07-15 14:57:05 -04:00
FoxxMD
1fbe6b708d More layout improvements 2021-07-15 13:37:05 -04:00
FoxxMD
495213bba9 Improvements to web ui
* Implement stats tracking per manager
* Render stats per sub and overall in ui
* Clean up ui layout
2021-07-15 12:54:32 -04:00
FoxxMD
15e031d448 Implement web ui
* Refactor program options to allow running as web
* Implement authentication using reddit oauth
* Use session in memory to store identification and accessible subreddits
* Implement socket.io with shared session to enable streaming logs
* Implement log streaming with per-subreddit views
2021-07-15 01:18:21 -04:00
FoxxMD
6994bbe770 Remove errenous delayUntil default 2021-07-13 14:32:46 -04:00
FoxxMD
a3c923bda6 Fix passing dryrun arg 2021-07-13 10:31:29 -04:00
FoxxMD
a40c4c5e58 Fix erroneous debug change for heartbeat interval 2021-07-12 13:45:19 -04:00
FoxxMD
be9dcdee1c Refactor polling streams to reduce usage and improve error catching
* Re-implement all polling streams with extended poll class for catching errors
* Increase default polling interval to 30 seconds and limit to 50
* When using default interval/limit for mod polling use a shared stream between all managers and poll to /r/mod -- reduces api calls for polling mod streams to 1 regardless of how many subreddits are managed
2021-07-12 13:44:07 -04:00
FoxxMD
07b34caffb Add proxy options and handle polling errors
* Implement proxied snoowrap requests
* Extend snoostorm class so timeout/errors during polling can be caught
2021-07-12 10:01:53 -04:00
FoxxMD
c5a3404242 Remove erroneous debugging addition 2021-07-08 14:13:02 -04:00
FoxxMD
1e03b38f0a Add polling info on options build 2021-07-07 17:19:56 -04:00
FoxxMD
f64be77e70 Change log level of processing delay statement 2021-07-07 17:02:00 -04:00
FoxxMD
a3da77874b Ignore doc updates for docker 2021-07-07 16:42:06 -04:00
FoxxMD
a9f740c9fa Fix example links 2021-07-07 16:40:45 -04:00
FoxxMD
00e6346cdb Fix wording 2021-07-07 16:38:55 -04:00
FoxxMD
951359ac39 Add tuning documentation for repeat activity 2021-07-07 16:10:24 -04:00
FoxxMD
15824e5d0f Implement delay before processing
Using criteria for Activity being N seconds old allows for delay with possibility of immediate processing to avoid api call

Closes #23
2021-07-07 15:23:59 -04:00
FoxxMD
e7c794ec85 Fix activities example 2021-07-07 13:16:04 -04:00
FoxxMD
70e426de7e Fix dajys link 2021-07-07 13:14:27 -04:00
FoxxMD
cc2518d086 Refactor documentation and ENHANCE 2021-07-07 13:06:51 -04:00
FoxxMD
5517c75d4c Update examples 2021-07-07 09:44:12 -04:00
83 changed files with 9957 additions and 1040 deletions

View File

@@ -4,3 +4,4 @@ Dockerfile
.gitignore
.git
src/logs
/docs

View File

@@ -24,4 +24,8 @@ RUN mkdir -p $log_dir
VOLUME $log_dir
ENV LOG_DIR=$log_dir
ARG webPort=8085
ENV PORT=$webPort
EXPOSE $PORT
CMD [ "node", "src/index.js", "run" ]

View File

@@ -26,7 +26,7 @@ Some feature highlights:
* author criteria (name, css flair/text, moderator status, and [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes))
* Activity state (removed, locked, distinguished, etc.)
* Rules and Actions support named references so you write rules/actions once and reference them anywhere
* User-configurable global/subreddit-level API caching
* User-configurable global/subreddit-level API caching with optional redis-backend
* Support for [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) as criteria or Actions (writing notes)
* Docker container support
@@ -34,13 +34,12 @@ Some feature highlights:
* [How It Works](#how-it-works)
* [Installation](#installation)
* [Configuration](#configuration)
* [Examples](#examples)
* [Configuration And Docs](#configuration)
* [Usage](#usage)
### How It Works
Context Bot's configuration is made up of an array of **Checks**. Each **Check** consists of :
Context Bot's configuration is made up of a list of **Checks**. Each **Check** consists of :
#### Kind
@@ -48,7 +47,7 @@ Is this check for a submission or comment?
#### Rules
A list of **Rule** objects to run against the activity. If **any** Rule object is triggered by the activity then the Check runs its **Actions**
A list of **Rule** objects to run against the activity. Triggered Rules can cause the whole Check to trigger and run its **Actions**
#### Actions
@@ -90,14 +89,12 @@ docker run -e "CLIENT_ID=myId" ... foxxmd/reddit-context-bot
## Configuration
[**Check the docs for in-depth explanations of all concepts and examples**](/docs)
Context Bot's configuration can be written in JSON, [JSON5](https://json5.org/) or YAML. It's [schema](/src/Schema/App.json) conforms to [JSON Schema Draft 7](https://json-schema.org/).
I suggest using [Atlassian JSON Schema Viewer](https://json-schema.app/start) ([direct link](https://json-schema.app/view/%23?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json)) so you can view all documentation while also interactively writing and validating your config! From there you can drill down into any object, see its requirements, view an example JSON document, and live-edit your configuration on the right-hand side.
### Examples
Read through the [Examples](/examples) section for a thorough introduction to all the **Rules**, in-depth concepts, and sample configuration files.
### Action Templating
Actions that can submit text (Report, Comment) will have their `content` values run through a [Mustache Template](https://mustache.github.io/). This means you can insert data generated by Rules into your text before the Action is performed.
@@ -173,28 +170,39 @@ const content = "My content will render the property {{item.aProperty}} like thi
Usage: index [options] [command]
Options:
-c, --clientId <id> Client ID for your Reddit application (default: process.env.CLIENT_ID)
-e, --clientSecret <secret> Client Secret for your Reddit application (default: process.env.CLIENT_SECRET)
-a, --accessToken <token> Access token retrieved from authenticating an account with your Reddit Application (default: process.env.ACCESS_TOKEN)
-r, --refreshToken <token> Refresh token retrieved from authenticating an account with your Reddit Application (default: process.env.REFRESH_TOKEN)
-s, --subreddits <list...> List of subreddits to run on. Bot will run on all subs it has access to if not defined (default: process.env.SUBREDDITS (comma-seperated))
-d, --logDir <dir> Absolute path to directory to store rotated logs in (default: process.env.LOG_DIR || process.cwd()/logs)
-l, --logLevel <level> Log level (default: process.env.LOG_LEVEL || info)
-w, --wikiConfig <path> Relative url to contextbot wiki page EX https://reddit.com/r/subreddit/wiki/<path> (default: process.env.WIKI_CONFIG || 'botconfig/contextbot')
--snooDebug Set Snoowrap to debug (default: process.env.SNOO_DEBUG || false)
--authorTTL <ms> Set the TTL (ms) for the Author Activities shared cache (default: process.env.AUTHOR_TTL || 10000)
--heartbeat <s> Interval, in seconds, between heartbeat logs. Set to 0 to disable (default: process.env.HEARTBEAT || 300)
--apiLimitWarning <remaining> When API limit remaining (600/10min) is lower than this value log statements for limit will be raised to WARN level (default: process.env.API_REMAINING || 250)
--dryRun Set dryRun=true for all checks/actions on all subreddits (overrides any existing) (default: process.env.DRYRUN)
--disableCache Disable caching for all subreddits (default: process.env.DISABLE_CACHE || false)
-h, --help display help for command
Commands:
run Runs bot normally
run [options] [interface] Monitor new activities from configured subreddits.
check [options] <activityIdentifier> [type] Run check(s) on a specific activity
unmoderated [options] <subreddits...> Run checks on all unmoderated activity in the modqueue
help [command] display help for command
Options:
-c, --operatorConfig <path> An absolute path to a JSON file to load all parameters from (default: process.env.OPERATOR_CONFIG)
-i, --clientId <id> Client ID for your Reddit application (default: process.env.CLIENT_ID)
-e, --clientSecret <secret> Client Secret for your Reddit application (default: process.env.CLIENT_SECRET)
-a, --accessToken <token> Access token retrieved from authenticating an account with your Reddit Application (default: process.env.ACCESS_TOKEN)
-r, --refreshToken <token> Refresh token retrieved from authenticating an account with your Reddit Application (default: process.env.REFRESH_TOKEN)
-u, --redirectUri <uri> Redirect URI for your Reddit application (default: process.env.REDIRECT_URI)
-t, --sessionSecret <secret> Secret use to encrypt session id/data (default: process.env.SESSION_SECRET || a random string)
-s, --subreddits <list...> List of subreddits to run on. Bot will run on all subs it has access to if not defined (default: process.env.SUBREDDITS)
-d, --logDir [dir] Absolute path to directory to store rotated logs in. Leaving undefined disables rotating logs (default: process.env.LOG_DIR)
-l, --logLevel <level> Minimum level to log at (default: process.env.LOG_LEVEL || verbose)
-w, --wikiConfig <path> Relative url to contextbot wiki page EX https://reddit.com/r/subreddit/wiki/<path> (default: process.env.WIKI_CONFIG || 'botconfig/contextbot')
--snooDebug Set Snoowrap to debug. If undefined will be on if logLevel='debug' (default: process.env.SNOO_DEBUG)
--authorTTL <ms> Set the TTL (ms) for the Author Activities shared cache (default: process.env.AUTHOR_TTL || 60000)
--heartbeat <s> Interval, in seconds, between heartbeat checks. (default: process.env.HEARTBEAT || 300)
--softLimit <limit> When API limit remaining (600/10min) is lower than this subreddits will have SLOW MODE enabled (default: process.env.SOFT_LIMIT || 250)
--hardLimit <limit> When API limit remaining (600/10min) is lower than this all subreddit polling will be paused until api limit reset (default: process.env.SOFT_LIMIT || 250)
--dryRun Set all subreddits in dry run mode, overriding configurations (default: process.env.DRYRUN || false)
--proxy <proxyEndpoint> Proxy Snoowrap requests through this endpoint (default: process.env.PROXY)
--operator <name> Username of the reddit user operating this application, used for displaying OP level info/actions in UI (default: process.env.OPERATOR)
--operatorDisplay <name> An optional name to display who is operating this application in the UI (default: process.env.OPERATOR_DISPLAY || Anonymous)
-p, --port <port> Port for web server to listen on (default: process.env.PORT || 8085)
-q, --shareMod If enabled then all subreddits using the default settings to poll "unmoderated" or "modqueue" will retrieve results from a shared request to /r/mod (default: process.env.SHARE_MOD || false)
-h, --help display help for command
```
### Logging

211
docs/README.md Normal file
View File

@@ -0,0 +1,211 @@
# Documentation
# Table of Contents
* [Getting Started](#getting-started)
* [How It Works](#how-it-works)
* [Concepts](#concepts)
* [Rule](#rule)
* [Examples](#available-rules)
* [Rule Set](#rule-set)
* [Examples](#rule-set-examples)
* [Action](#action)
* [Examples](#available-actions)
* [Filters](#filters)
* [Configuration](#configuration)
* [Common Resources](#common-resources)
* [Activities `window`](#activities-window)
* [Comparisons](#thresholds-and-comparisons)
* [Best Practices](#best-practices)
* [Subreddit-ready Configurations](#subreddit-ready-configurations)
* FAQ
## Getting Started
Review **at least** the **How It Works** and **Concepts** below and then head to the [**Getting Started documentation.**](/docs/gettingStarted.md)
## How It Works
Where possible Reddit Context Bot (RCB) uses the same terminology as, and emulates the behavior, of **automoderator** so if you are familiar with that much of this may seem familiar to you.
RCB's lifecycle looks like this:
#### 1) A new event in your subreddit is received by RCB
The events RCB watches for are configured by you. These can be new modqueue items, submissions, or comments.
#### 2) RCB sequentially processes each Check in your configuration
A **Check** is a set of:
* One or more **Rules** that define what conditions should **trigger** this Check
* One or more **Actions** that define what the bot should do once the Check is **triggered**
#### 3) Each Check is processed, *in order*, until a Check is triggered
Once a Check is **triggered** no more Checks will be processed. This means all subsequent Checks in your configuration (in the order you listed them) are basically skipped.
#### 4) All Actions from that Check are executed
After all Actions are executed RCB returns to waiting for the next Event.
## Concepts
Core, high-level concepts regarding how RCB works.
### Checks
TODO
### Rule
A **Rule** is some set of **criteria** (conditions) that are tested against an Activity (comment/submission), a User, or a User's history. A Rule is considered **triggered** when the **criteria** for that rule are found to be **true** for whatever is being tested against.
There are generally three main properties for a Rule:
* **Critiera** -- The conditions/values you want to test for.
* **Activities Window** -- If applicable, the range of activities that the **criteria** will be tested against.
* **Rule-specific options** -- Any number of options that modify how the **criteria** are tested.
RCB has different **Rules** that can test against different types of behavior and aspects of a User, their history, and the Activity (submission/common) being checked.
#### Available Rules
Find detailed descriptions of all the Rules, with examples, below:
* [Attribution](/docs/examples/attribution)
* [Recent Activity](/docs/examples/recentActivity)
* [Repeat Activity](/docs/examples/repeatActivity)
* [History](/docs/examples/history)
* [Author](/docs/examples/author)
### Rule Set
A **Rule Set** is a "grouped" set of `Rules` with a **trigger condition** specified.
Rule Sets can be used interchangeably with other **Rules** and **Rule Sets** in the `rules` list of a **Check**.
They allow you to create more complex trigger behavior by combining multiple rules into one "parent rule".
It consists of:
* **condition** -- Under what condition should the Rule Set be considered triggered?
* `AND` -- ALL Rules in the Rule Set must **trigger** in order for the Rule Set to **trigger.**
* `OR` -- ANY Rule in the Rule Set that is **triggered** will trigger the whole Rule Set.
* **rules** -- The **Rules** for the Rule Set.
Example
```json5
{
"condition": "AND",
"rules": [
// all the rules go here
]
}
```
#### Rule Set Examples
* [**Detailed Example**](/docs/examples/advancedConcepts/ruleSets.json5)
### Action
An **Action** is some action the bot can take against the checked Activity (comment/submission) or Author of the Activity. RCB has Actions for most things a normal reddit user or moderator can do.
### Available Actions
* Remove (Comment/Submission)
* Flair (Submission)
* Ban (User)
* Approve (Comment/Submission)
* Comment (Reply to Comment/Submission)
* Lock (Comment/Submission)
* Report (Comment/Submission)
* [UserNote](/docs/examples/userNotes) (User, when /r/Toolbox is used)
For detailed explanation and options of what individual Actions can do [see the links in the `actions` property in the schema.](https://json-schema.app/view/%23/%23%2Fdefinitions%2FSubmissionCheckJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json)
### Filters
TODO
## Configuration
* For **Operator/Bot maintainers** see **[Operation Configuration](/docs/operatorConfiguration.md)**
* For **Moderators** see the [App Schema](https://json-schema.app/view/%23?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) and [examples](/docs/examples)
## Common Resources
Technical information on recurring, common data/patterns used in RCB.
### Activities `window`
Most **Rules** must define the **range of Activities (submissions and/or comments)** that will be used to check the criteria of the Rule. This range is defined wherever you see a `window` property in configuration.
Refer to the [Activities Window](/docs/activitiesWindow.md) documentation for a technical explanation with examples.
### Thresholds and Comparisons
TODO
## Best Practices
### Named Rules
All **Rules** in a subreddit's configuration can be assigned a **name** that can then be referenced from any other Check.
Create general-use rules so they can be reused and de-clutter your configuration. Additionally RCB will automatically cache the result of a rule so there is a performance and api usage benefit to re-using Rules.
See [ruleNameReuse.json5](/docs/examples/advancedConcepts/ruleNameReuse.json5) for a detailed configuration with annotations.
### Check Order
Checks are run in the order they appear in your configuration, therefore you should place your highest requirement/severe action checks at the top and lowest requirement/moderate actions at the bottom.
This is so that if an Activity warrants a more serious reaction that Check is triggered first rather than having a lower requirement check with less severe actions triggered and causing all subsequent Checks to be skipped.
* Attribution >50% AND Repeat Activity 8x AND Recent Activity in 2 subs => remove submission + ban
* Attribution >20% AND Repeat Activity 4x AND Recent Activity in 5 subs => remove submission + flair user restricted
* Attribution >20% AND Repeat Activity 2x => remove submission
* Attribution >20% AND History comments <30% => remove submission
* Attribution >15% => report
* Repeat Activity 2x => report
* Recent Activity in 3 subs => report
* Author not vetted => flair new user submission
### Rule Order
The ordering of your Rules within a Check/RuleSet can have an impact on Check performance (speed) as well as API usage.
Consider these three rules:
* Rule A -- Recent Activity => 3 subreddits => last 15 submissions
* Rule B -- Repeat Activity => last 3 days
* Rule C -- Attribution => >10% => last 90 days or 300 submissions
The first two rules are lightweight in their requirements -- Rule A can be completed in 1 API call, Rule B potentially completed in 1 Api call.
However, depending on how active the Author is, Rule C will take *at least* 3 API calls just to get all activities (Reddit limit 100 items per call).
If the Check is using `AND` condition for its rules (default) then if either Rule A or Rule B fail then Rule C will never run. This means 3 API calls never made plus the time waiting for each to return.
**It is therefore advantageous to list your lightweight Rules first in each Check.**
### API Caching
Context bot implements some basic caching functionality for **Author Activities** and wiki pages (on Comment/Report Actions).
**Author Activities** are cached for a subreddit-configurable amount of time (10 seconds by default). A cached activities set can be re-used if the **window on a Rule is identical to the window on another Rule**.
This means that when possible you should re-use window values.
IE If you want to check an Author's Activities for a time range try to always use **7 Days** or always use **50 Items** for absolute counts.
Re-use will result in less API calls and faster Check times.
## Subreddit-ready Configurations
TODO
## FAQ
TODO

217
docs/activitiesWindow.md Normal file
View File

@@ -0,0 +1,217 @@
# Activity Window
Most **Rules** have a `window` property somewhere within their configuration. This property defines the range of **Activities** (submission and/or comments) that should be retrieved for checking the criteria of the Rule.
As an example if you want to run an **Recent Activity Rule** to check if a user has had activity in /r/mealtimevideos you also need to define what range of activities you want to look at from that user's history.
## `window` property overview (tldr)
The value of `window` can be any of these types:
* `number` count of activities
* `string` [duration](#duration-string-recommended) or [iso 8601](#an-iso-8601-duration-string)
* [duration `object`](#duration-object)
* [ActivityWindowCriteria `object`](#activitywindowcriteria)
Examples of all of the above
<details>
```
// count, last 100 activities
{
"window": 100
}
// duration string, last 10 days
{
"window": "10 days"
}
// duration object, last 2 months and 5 days
{
"window": {
"months": 2,
"days": 5,
}
}
// iso 8601 string, last 15 minutes
{
"window": "PT15M"
}
// ActivityWindowCriteria, last 100 activities or 6 weeks of activities (whichever is found first)
{
"window": {
"count": 100,
"duration": "6 weeks"
}
}
```
</details>
## Types of Ranges
There are two types of values that can be used when defining a range:
### Count
This is the **number** of activities you want to retrieve. It's straightforward -- if you want to look at the last 100 activities for a user you can use `100` as the value.
### Duration
A **duration of time** between which all activities will be retrieved. This is a **relative value** that calculates the actual range based on **the duration of time subtracted from when the rule is run.**
For example:
* Today is **July 15th**
* You define a duration of **10 days**
Then the range of activities to be retrieved will be between **July 5th and July 15th** (10 days).
#### Duration Values
The value used to define the duration can be **any of these three types**:
##### Duration String (recommended)
A string consisting of
* A [Dayjs unit of time](https://day.js.org/docs/en/durations/creating#list-of-all-available-units)
* The value of that unit of time
Examples:
* `9 days`
* `14 hours`
* `80 seconds`
You can ensure your string is valid by testing it [here.](https://regexr.com/61em3)
##### Duration Object
If you need to specify multiple units of time for your duration you can instead provide a [Dayjs duration **object**](https://day.js.org/docs/en/durations/creating#list-of-all-available-units) consisting of Dayjs unit-values.
Example
```json
{
"days": 4,
"hours": 6,
"minutes": 20
}
```
##### An ISO 8601 duration string
If you're a real nerd you can also use a [standard duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)) string.
Examples
* `PT15M` (15 minutes)
Ensure your string is valid by testing it [here.](https://regexr.com/61em9)
## ActivityWindowCriteria
This is an object that lets you specify more granular conditions for your range.
The full object looks like this:
```json
{
"count": 100,
"duration": "10 days",
"satisfyOn": "any",
"subreddits": {
"include": ["mealtimevideos","pooptimevideos"],
"exclude": ["videos"]
}
}
```
### Specifying Range
You may use **one or both range properties.**
If both range properties are specified then the value `satisfyOn` determines how the final range is determined
#### Using `"satisfyOn": "any"` (default)
If **any** then Activities will be retrieved until one of the range properties is met, **whichever occurs first.**
Example
```json
{
"count": 80,
"duration": "90 days",
"satisfyOn": "any"
}
```
Activities are retrieved in chunks of 100 (or `count`, whichever is smaller)
* If 90 days of activities returns only 40 activities => returns 40 activities
* If 80 activities is only 20 days of range => 80 activities
#### Using `"satisfyOn": "all"`
If **all** then both ranges must be satisfied. Effectively, whichever range produces the most Activities will be the one that is used.
Example
```json
{
"count": 100,
"duration": "90 days",
"satisfyOn": "all"
}
```
Activities are retrieved in chunks of 100 (or `count`, whichever is smaller)
* If at 90 days of activities => 40 activities retrieved
* continue retrieving results until 100 activities
* so range is >90 days of activities
* If at 100 activities => 20 days of activities retrieved
* continue retrieving results until 90 days of range
* so results in >100 activities
### Filtering Activities
You may filter retrieved Activities using an array of subreddits.
**Note:** Activities are filtered **before** range check is made so you will always end up with specified range (but may require more api calls if many activities are filtered out)
#### Include
Use **include** to specify which subreddits should be included from results
Example where only activities from /r/mealtimevideos and /r/modsupport will be returned
```json
{
"count": 100,
"duration": "90 days",
"satisfyOn": "any",
"subreddits": {
"include": ["mealtimevideos","modsupport"]
}
}
```
#### Exclude
Use **exclude** to specify which subreddits should NOT be in the results
Example where activities from /r/mealtimevideos and /r/modsupport will not be returned in results
```json
{
"count": 100,
"duration": "90 days",
"satisfyOn": "any",
"subreddits": {
"exclude": ["mealtimevideos","modsupport"]
}
}
```
**Note:** `exclude` will be ignored if `include` is also present.

25
docs/examples/README.md Normal file
View File

@@ -0,0 +1,25 @@
# Examples
This directory contains example of valid, ready-to-go configurations for Context Bot for the purpose of:
* showcasing what the bot can do
* providing best practices for writing your configuration
* providing generally useful configurations **that can be used immediately** or as a jumping-off point for your configuration
### Examples Overview
* Rules
* [Attribution](/docs/examples/attribution)
* [Recent Activity](/docs/examples/recentActivity)
* [Repeat Activity](/docs/examples/repeatActivity)
* [History](/docs/examples/history)
* [Author](/docs/examples/author)
* [Toolbox User Notes](/docs/examples/userNotes)
* [Advanced Concepts](/docs/examples/advancedConcepts)
* [Rule Sets](/docs/examples/advancedConcepts/ruleSets.json5)
* [Name Rules](/docs/examples/advancedConcepts/ruleNameReuse.json5)
* [Check Ordering](/docs/examples/advancedConcepts)
* Subreddit-ready examples
* Coming soon...

View File

@@ -1,6 +1,6 @@
### Named Rules
See [ruleNameReuse.json5](/examples/advancedConcepts/ruleNameReuse.json5)
See [ruleNameReuse.json5](/docs/examples/advancedConcepts/ruleNameReuse.json5)
### Check Order
@@ -23,7 +23,7 @@ The `rules` array on a `Checks` can contain both `Rule` objects and `RuleSet` ob
A **Rule Set** is a "nested" set of `Rule` objects with a passing condition specified. These allow you to create more complex trigger behavior by combining multiple rules.
See **[ruleSets.json5](/examples/advancedConcepts/ruleSets.json5)** for a complete example as well as consulting the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FRuleSetJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json).
See **[ruleSets.json5](/docs/examples/advancedConcepts/ruleSets.json5)** for a complete example as well as consulting the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FRuleSetJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json).
### Rule Order

View File

@@ -14,13 +14,11 @@
"kind": "attribution",
"criteria": [
{
"threshold": "10%",
"window": {
"days": 90
}
"threshold": "> 10%",
"window": "90 days"
},
{
"threshold": "10%",
"threshold": "> 10%",
"window": 100
}
],
@@ -54,7 +52,7 @@
"useSubmissionAsReference":true,
"thresholds": [
{
"totalCount": 1,
"threshold": ">= 1",
"subreddits": [
"DeFreeKarma",
"FreeKarma4U",
@@ -63,9 +61,7 @@
]
}
],
"window": {
"days": 7
}
"window": "7 days"
}
],
"actions": [

View File

@@ -13,13 +13,11 @@
"kind": "attribution",
"criteria": [
{
"threshold": "10%",
"window": {
"days": 90
}
"threshold": "> 10%",
"window": "90 days"
},
{
"threshold": "10%",
"threshold": "> 10%",
"window": 100
}
],
@@ -42,14 +40,12 @@
"kind": "attribution",
"criteria": [
{
"threshold": "10%",
"threshold": "> 10%",
"thresholdOn": "submissions",
"window": {
"days": 90
}
"window": "90 days"
},
{
"threshold": "10%",
"threshold": "> 10%",
"thresholdOn": "submissions",
"window": 100
}
@@ -62,23 +58,12 @@
"criteriaJoin": "OR",
"criteria": [
{
"window": {
"days": 90
},
"comment": {
"threshold": "50%",
"condition": "<"
}
"window": "90 days",
"comment": "< 50%"
},
{
"window": {
"days": 90
},
"comment": {
"asOp": true,
"threshold": "40%",
"condition": ">"
}
"window": "90 days",
"comment": "> 40% OP"
}
]
}
@@ -95,5 +80,5 @@
}
]
},
]
],
}

View File

@@ -10,5 +10,5 @@ Consult the [schema](https://json-schema.app/view/%23/%23%2Fdefinitions%2FCheckJ
### Examples
* [Self Promotion as percentage of all Activities](/examples/attribution/redditSelfPromoAll.json5) - Check if Author is submitting much more than they comment.
* [Self Promotion as percentage of Submissions](/examples/attribution/redditSelfPromoSubmissionsOnly.json5) - Check if any of Author's aggregated submission origins are >10% of their submissions
* [Self Promotion as percentage of all Activities](/docs/examples/attribution/redditSelfPromoAll.json5) - Check if Author is submitting much more than they comment.
* [Self Promotion as percentage of Submissions](/docs/examplesm/attribution/redditSelfPromoSubmissionsOnly.json5) - Check if any of Author's aggregated submission origins are >10% of their submissions

View File

@@ -13,17 +13,15 @@
"criteria": [
{
// threshold can be a percent or an absolute number
"threshold": "10%",
"threshold": "> 10%",
// The default is "all" -- calculate percentage of entire history (submissions & comments)
// "thresholdOn": "all",
// look at last 90 days of Author's activities (comments and submissions)
"window": {
"days": 90
}
"window": "90 days"
},
{
"threshold": "10%",
"threshold": "> 10%",
// look at Author's last 100 activities (comments and submissions)
"window": 100
}

View File

@@ -13,17 +13,15 @@
"criteria": [
{
// threshold can be a percent or an absolute number
"threshold": "10%",
"threshold": "> 10%",
// calculate percentage of submissions, rather than entire history (submissions & comments)
"thresholdOn": "submissions",
// look at last 90 days of Author's activities (comments and submissions)
"window": {
"days": 90
}
"window": "90 days"
},
{
"threshold": "10%",
"threshold": "> 10%",
"thresholdOn": "submissions",
// look at Author's last 100 activities (comments and submissions)
"window": 100

View File

@@ -9,7 +9,7 @@ The **Author** rule triggers if any [AuthorCriteria](https://json-schema.app/vie
* author's subreddit flair text
* author's subreddit flair css
* author's subreddit mod status
* [Toolbox User Notes](/examples/userNotes)
* [Toolbox User Notes](/docs/examples/userNotes)
The Author **Rule** is best used in conjunction with other Rules to short-circuit a Check based on who the Author is. It is easier to use a Rule to do this then to write **author filters** for every Rule (and makes Rules more re-useable).
@@ -18,10 +18,10 @@ Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorRule
### Examples
* Basic examples
* [Flair new user Submission](/examples/author/flairNewUserSubmission.json5) - If the Author does not have the `vet` flair then flair the Submission with `New User`
* [Flair vetted user Submission](/examples/author/flairNewUserSubmission.json5) - If the Author does have the `vet` flair then flair the Submission with `Vetted`
* [Flair new user Submission](/docs/examples/author/flairNewUserSubmission.json5) - If the Author does not have the `vet` flair then flair the Submission with `New User`
* [Flair vetted user Submission](/docs/examples/author/flairNewUserSubmission.json5) - If the Author does have the `vet` flair then flair the Submission with `Vetted`
* Used with other Rules
* [Ignore vetted user](/examples/author/flairNewUserSubmission.json5) - Short-circuit the Check if the Author has the `vet` flair
* [Ignore vetted user](/docs/examples/author/flairNewUserSubmission.json5) - Short-circuit the Check if the Author has the `vet` flair
## Filter
@@ -35,4 +35,4 @@ All **Rules** and **Checks** have an optional `authorIs` property that takes an
### Examples
* [Skip recent activity check based on author](/examples/author/authorFilter.json5) - Skip a Recent Activity check for a set of subreddits if the Author of the Submission has any set of flairs.
* [Skip recent activity check based on author](/docs/examples/author/authorFilter.json5) - Skip a Recent Activity check for a set of subreddits if the Author of the Submission has any set of flairs.

View File

@@ -12,16 +12,14 @@
"lookAt": "submissions",
"thresholds": [
{
"totalCount": 1,
"threshold": ">= 1",
"subreddits": [
"DeFreeKarma",
"FreeKarma4U",
]
}
],
"window": {
"days": 7
}
"window": "7 days"
},
{
"name": "noobmemer",
@@ -51,15 +49,13 @@
"lookAt": "submissions",
"thresholds": [
{
"totalCount": 1,
"threshold": ">= 1",
"subreddits": [
"dankmemes",
]
}
],
"window": {
"days": 7
}
"window": "7 days"
}
],
"actions": [

View File

@@ -24,13 +24,11 @@
"kind": "attribution",
"criteria": [
{
"threshold": "10%",
"window": {
"days": 90
}
"threshold": "> 10%",
"window": "90 days"
},
{
"threshold": "10%",
"threshold": "> 10%",
"window": 100
}
],
@@ -41,16 +39,14 @@
"lookAt": "submissions",
"thresholds": [
{
"totalCount": 1,
"threshold": ">= 1",
"subreddits": [
"DeFreeKarma",
"FreeKarma4U",
]
}
],
"window": {
"days": 7
}
"window": "7 days"
},
{
"name": "memes",
@@ -58,15 +54,13 @@
"lookAt": "submissions",
"thresholds": [
{
"totalCount": 3,
"threshold": ">= 3",
"subreddits": [
"dankmemes",
]
}
],
"window": {
"days": 7
}
"window": "7 days"
}
],
// will NOT run if the Author for this Submission has the flair "vet"

View File

@@ -9,5 +9,5 @@ Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FHistoryJSO
### Examples
* [Low Comment Engagement](/examples/history/lowEngagement.json5) - Check if Author is submitting much more than they comment.
* [OP Comment Engagement](/examples/history/opOnlyEngagement.json5) - Check if Author is mostly engaging only in their own content
* [Low Comment Engagement](/docs/examples/history/lowEngagement.json5) - Check if Author is submitting much more than they comment.
* [OP Comment Engagement](/docs/examples/history/opOnlyEngagement.json5) - Check if Author is mostly engaging only in their own content

View File

@@ -12,14 +12,9 @@
"criteria": [
{
// look at last 90 days of Author's activities
"window": {
"days": 90
},
"window": "90 days",
// trigger if less than 30% of their activities in this time period are comments
"comment": {
"threshold": "30%",
"condition": "<"
}
"comment": "< 30%"
},
]
}

View File

@@ -12,15 +12,9 @@
"criteria": [
{
// look at last 90 days of Author's activities
"window": {
"days": 90
},
// trigger if less than 30% of their activities in this time period are comments
"comment": {
"asOp": true,
"threshold": "60%",
"condition": ">"
}
"window": "90 days",
// trigger if more than 60% of their activities in this time period are comments as OP
"comment": "> 60% OP"
},
]
}

View File

@@ -6,5 +6,5 @@ Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FRecentActi
### Examples
* [Free Karma Subreddits](/examples/recentActivity/freeKarma.json5) - Check if the Author has recently posted in any "free karma" subreddits
* [Submission in Free Karma Subreddits](/examples/recentActivity/freeKarmaOnSubmission.json5) - Check if the Author has posted the Submission this check is running on in any "free karma" subreddits recently
* [Free Karma Subreddits](/docs/examples/recentActivity/freeKarma.json5) - Check if the Author has recently posted in any "free karma" subreddits
* [Submission in Free Karma Subreddits](/docs/examples/recentActivity/freeKarmaOnSubmission.json5) - Check if the Author has posted the Submission this check is running on in any "free karma" subreddits recently

View File

@@ -9,13 +9,14 @@
{
"name": "freekarma",
"kind": "recentActivity",
"useSubmissionAsReference": false,
// when `lookAt` is not present this rule will look for submissions and comments
// lookAt: "submissions"
// lookAt: "comments"
"thresholds": [
{
// for all subreddits, if the number of activities (sub/comment) is equal to or greater than 1 then the rule is triggered
"totalCount": 1,
"threshold": ">= 1",
"subreddits": [
"DeFreeKarma",
"FreeKarma4U",
@@ -25,9 +26,7 @@
}
],
// will look at all of the Author's activities in the last 7 days
"window": {
"days": 7
}
"window": "7 days"
}
],
"actions": [

View File

@@ -17,7 +17,7 @@
"thresholds": [
{
// for all subreddits, if the number of activities (sub/comment) is equal to or greater than 1 then the rule is triggered
"totalCount": 1,
"threshold": ">= 1",
"subreddits": [
"DeFreeKarma",
"FreeKarma4U",
@@ -27,9 +27,7 @@
}
],
// look at all of the Author's submissions in the last 7 days
"window": {
"days": 7
}
"window": "7 days"
}
],
"actions": [

View File

@@ -0,0 +1,49 @@
# Repeat Activity
The **Repeat Activity** rule will check for patterns of repetition in an Author's Submission/Comment history. Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FRepeatActivityJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
## Tuning
The most critical properties for this Rule are **gapAllowance** and **lookAt**.
### `lookAt`
Determines which Activities from a User's history are checked when looking for repeats.
Can be either:
* `all` -- All of a user's submissions and comments are considered
* `submissions` -- Only a user's submissions are considered
Defaults to `all`
### `gapAllowance`
`gapAllowance` determines how many **non-repeat Activities** are "allowed" between "in a row" submissions. `N` number of non-repeat activities will be thrown away during the count which allows checking for patterns with a bit of "fuzziness".
By default `gapAllowance: 0` so all repeats must be truly consecutive.
___
Consider the following example in a user's history:
* crossposts 2 times
* 1 comment
* crossposts 2 times
* 2 comments
* crossposts 4 times
Your goal is to remove a submission if it has been crossposted **5 times.**
With defaults for lookAt and gapAllowance this rule **would not be triggered** because no set of consecutive submissions was repeated 5 times.
With only `lookAt: "submissions"` this rule **would trigger** because all the comments would be ignored resulting in 8 repeats.
With only `gapAllowance: 1` this rule **would not trigger** because the 2 comment non-repeat would break the "in a row" count.
With only `gapAllowance: 2` this rule **would trigger** because the the 1 and 2 comment non-repeats would be thrown out resulting in 8 repeats.
**Note:** `lookAt: "submissions"` should be used with caution because all comments are thrown away. This isn't indicative of real repeat behavior if the user is a heavy commenter. For this reason the default is `all`.
## Examples
* [Crosspost Spamming](/docs/examples/repeatActivity/crosspostSpamming.json5) - Check if an Author is spamming their Submissions across multiple subreddits
* [Burst-posting](/docs/examples/repeatActivity/burstPosting.json5) - Check if Author is crossposting their Submissions in short bursts

View File

@@ -14,11 +14,9 @@
// the number of non-repeat activities (submissions or comments) to ignore between repeat submissions
"gapAllowance": 3,
// if the Author has posted this Submission 6 times, ignoring 3 non-repeat activities between each repeat, then this rule will trigger
"threshold": 6,
"threshold": ">= 6",
// look at all of the Author's submissions in the last 7 days
"window": {
"days": 7
}
"window": "7 days"
}
],
"actions": [

View File

@@ -12,11 +12,9 @@
// will only look at Submissions in Author's history that contain the same content (link) as the Submission this check was initiated by
"useSubmissionAsReference": true,
// if the Author has posted this Submission 5 times consecutively then this rule will trigger
"threshold": 5,
"threshold": ">= 5",
// look at all of the Author's submissions in the last 7 days
"window": {
"days": 7
}
"window": "7 days"
}
],
"actions": [

View File

@@ -8,13 +8,13 @@ Context Bot supports reading and writing [User Notes](https://www.reddit.com/r/t
## Filter
User Notes are an additional criteria on [AuthorCriteria](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) that can be used alongside other Author properties for both [filtering rules and in the AuthorRule.](/examples/author/)
User Notes are an additional criteria on [AuthorCriteria](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) that can be used alongside other Author properties for both [filtering rules and in the AuthorRule.](/docs/examples/author/)
Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FUserNoteCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the **UserNoteCriteria** object that can be used in AuthorCriteria.
### Examples
* [Do not tag user with Good User note](/examples/userNotes/usernoteFilter.json5)
* [Do not tag user with Good User note](/docs/examples/userNotes/usernoteFilter.json5)
## Action
@@ -23,4 +23,4 @@ A User Note can also be added to the Author of a Submission or Comment with the
### Examples
* [Add note on user doing self promotion](/examples/userNotes/usernoteSP.json5)
* [Add note on user doing self promotion](/docs/examples/userNotes/usernoteSP.json5)

View File

@@ -20,13 +20,11 @@
},
"criteria": [
{
"threshold": "10%",
"window": {
"days": 90
}
"threshold": "> 10%",
"window": "90 days"
},
{
"threshold": "10%",
"threshold": "> 10%",
"window": 100
}
],
@@ -39,7 +37,7 @@
// https://github.com/toolbox-team/reddit-moderator-toolbox/wiki/Subreddit-Wikis%3A-usernotes#working-with-note-types
"type": "spamwarn",
// content is mustache templated as usual
"content": "Self Promotion: {{rules.attr10all.refDomainTitle}} {{rules.attr10sub.largestPercent}}%"
"content": "Self Promotion: {{rules.attr10all.titlesDelim}} {{rules.attr10sub.largestPercent}}%"
}
]
}

View File

@@ -11,13 +11,11 @@
"kind": "attribution",
"criteria": [
{
"threshold": "10%",
"window": {
"days": 90
}
"threshold": "> 10%",
"window": "90 days"
},
{
"threshold": "10%",
"threshold": "> 10%",
"window": 100
}
],
@@ -30,7 +28,7 @@
// https://github.com/toolbox-team/reddit-moderator-toolbox/wiki/Subreddit-Wikis%3A-usernotes#working-with-note-types
"type": "spamwarn",
// content is mustache templated as usual
"content": "Self Promotion: {{rules.attr10all.refDomainTitle}} {{rules.attr10sub.largestPercent}}%"
"content": "Self Promotion: {{rules.attr10all.titlesDelim}} {{rules.attr10sub.largestPercent}}%"
}
]
}

19
docs/gettingStarted.md Normal file
View File

@@ -0,0 +1,19 @@
### Creating Your Configuration
#### Get the raw contents of the configuration
* In a new tab open the github page for the configuration you want ([example](/docs/examples/repeatActivity/crosspostSpamming.json5))
* Click the **Raw** button...keep this tab open and move on to the next step
#### Edit your wiki configuration
* Visit the wiki page of the subreddit you want the bot to moderate
* Using default bot settings this will be `https://old.reddit.com/r/YOURSUBERDDIT/wiki/botconfig/contextbot`
* If the page does not exist create it, otherwise click **Edit**
* Copy-paste the configuration into the wiki text box
* In the previous tab you opened (for the configuration) **Select All** (Ctrl+A), then **Copy**
* On the wiki page **Paste** into the text box
* Save the edited wiki page
* Ensure the wiki page visibility is restricted
* On the wiki page click **settings** (**Page settings** in new reddit)
* Check the box for **Only mods may edit and view** and then **save**

View File

@@ -0,0 +1,335 @@
The **Operator** configuration refers to configuration used configure to the actual application/bot. This is different
from the **Subreddit** configuration that is defined in each Subreddit's wiki and determines the rules/actions for
activities the Bot runs on.
# Table of Contents
* [Minimum Required Configuration](#minimum-required-configuration)
* [Defining Configuration](#defining-configuration)
* [Examples](#example-configurations)
* [Minimum Config](#minimum-config)
* [Using Config Overrides](#using-config-overrides)
* [Cache Configuration](#cache-configuration)
# Minimum Required Configuration
The minimum required configuration variables to run the bot on subreddits are:
* clientId
* clientSecret
* refreshToken
* accessToken
However, only **clientId** and **clientSecret** are required to run the **oauth helper** mode for generate the last two
configuration variables.
# Defining Configuration
RCB can be configured using **any or all** of the approaches below. **At each level ALL configuration values are
optional** but some are required depending on the mode of operation for the application.
Any values defined at a **lower-listed** level of configuration will override any values from a higher-listed
configuration.
* **ENV** -- Environment variables loaded from an [`.env`](https://github.com/toddbluhm/env-cmd) file (path may be
specified with `--file` cli argument)
* **ENV** -- Any already existing environment variables (exported on command line/terminal profile/etc.)
* **FILE** -- Values specified in a JSON configuration file using the structure shown below (TODO example json file)
* **ARG** -- Values specified as CLI arguments to the program (see [Usage](/README.md#usage)
or `node src/index.js run help` for details)
In the below configuration, if the variable is available at a level of configuration other than **FILE** it will be
noted with the same symbol as above. The value shown is the default.
**NOTE:** To load a JSON configuration (for **FILE**) use the `-c` cli argument EX: `node src/index.js -c /path/to/JSON/config.json`
```js
const config = {
operator: {
// Username of the reddit user operating this application, used for displaying OP level info/actions in UI
//
// ENV => OPERATOR
// ARG => --operator <name>
name: undefined,
// An optional name to display who is operating this application in the UI
//
// ENV => OPERATOR_DISPLAY
// ARG => --operator <name>
display: undefined,
},
// Values required to interact with Reddit's API
credentials: {
// Client ID for your Reddit application
//
// ENV => CLIENT_ID
// ARG => --clientId <id>
clientId: undefined,
// Client Secret for your Reddit application
//
// ENV => CLIENT_SECRET
// ARG => --clientSecret <secret>
clientSecret: undefined,
// Redirect URI for your Reddit application
//
// ENV => REDIRECT_URI
// ARG => --redirectUri <uri>
redirectUri: undefined,
// Access token retrieved from authenticating an account with your Reddit Application
//
// ENV => ACCESS_TOKEN
// ARG => --accessToken <token>
accessToken: undefined,
// Refresh token retrieved from authenticating an account with your Reddit Application
//
// ENV => REFRESH_TOKEN
// ARG => --refreshToken <token>
refreshToken: undefined
},
logging: {
// Minimum level to log at.
// Must be one of: error, warn, info, verbose, debug
//
// ENV => LOG_LEVEL
// ARG => --logLevel <level>
level: 'verbose',
// Absolute path to directory to store rotated logs in.
//
// Leaving undefined disables rotating logs
// Use ENV => true or ARG => --logDir to log to the current directory under /logs folder
//
// ENV => LOG_DIR
// ARG => --logDir [dir]
path: undefined,
},
snoowrap: {
// Proxy endpoint to make Snoowrap requests to
//
// ENV => PROXY
// ARG => --proxy <proxyEndpoint>
proxy: undefined,
// Set Snoowrap to log debug statements. If undefined will debug based on current log level
//
// ENV => SNOO_DEBUG
// ARG => --snooDebug
debug: false,
},
subreddits: {
// Names of subreddits for bot to run on
//
// If undefined bot will run on all subreddits it is a moderated of
//
// ENV => SUBREDDITS (comma-separated)
// ARG => --subreddits <list...>
names: undefined,
// If true set all subreddits in dry run mode, overriding configurations
//
// ENV => DRYRUN
// ARG => --dryRun
dryRun: false,
// The default relative url to contextbot wiki page EX https://reddit.com/r/subreddit/wiki/<path>
//
// ENV => WIKI_CONFIG
// ARG => --wikiConfig <path>
wikiConfig: 'botconfig/contextbot',
// Interval, in seconds, to perform application heartbeat
//
// ENV => HEARTBEAT
// ARG => --heartbeat <sec>
heartbeatInterval: 300,
},
polling: {
// If set to true all subreddits polling unmoderated/modqueue with default polling settings will share a request to "r/mod"
// otherwise each subreddit will poll its own mod view
//
// ENV => SHARE_MOD
// ARG => --shareMod
sharedMod: false,
// Default interval, in seconds, to poll activity sources at
interval: 30,
},
web: {
// Whether the web server interface should be started
// In most cases this does not need to be specified as the application will automatically detect if it is possible to start it --
// use this to specify 'cli' if you encounter errors with port/address or are paranoid
//
// ENV => WEB
// ARG => 'node src/index.js run [interface]' -- interface can be 'web' or 'cli'
enabled: true,
// Set the port for the web interface
//
// ENV => PORT
// ARG => --port <number>
port: 8085,
session: {
// The cache provider for sessions
// can be 'memory', 'redis', or a custom config
provider: 'memory',
// The secret value used to encrypt session data
// If provider is persistent (redis) specifying a value here will ensure sessions are valid between application restarts
//
// If undefined a random string is generated
secret: undefined,
},
// The default log level to filter to in the web interface
// If not specified will be same as application log level
logLevel: undefined,
// Maximum number of log statements to keep in memory for each subreddit
maxLogs: 200,
},
caching: {
// The default maximum age of cached data for an Author's history
//
// ENV => AUTHOR_TTL
// ARG => --authorTTL <sec>
authorTTL: 60,
// The default maximum age of cached usernotes for a subreddit
userNotesTTL: 300,
// The default maximum age of cached content, retrieved from an external URL or subreddit wiki, used for comments/ban/footer
wikiTTL: 300,
// The cache provider used for caching reddit API responses and some internal results
// can be 'memory', 'redis', or a custom config
provider: 'memory'
},
api: {
// The number of API requests remaining at which "slow mode" should be enabled
//
// ENV => SOFT_LIMT
// ARG => --softLimit <limit>
softLimit: 250,
// The number of API requests remaining at at which all subreddit event polling should be paused
//
// ENV => HARD_LIMIT
// ARG => --hardLimit <limit>
hardLimit: 50,
}
}
```
# Example Configurations
## Minimum Config
Below are examples of the minimum required config to run the application using all three config approaches independently.
Using **FILE**
<details>
```json
{
"credentials": {
"clientId": "f4b4df1c7b2",
"clientSecret": "34v5q1c56ub",
"refreshToken": "34_f1w1v4",
"accessToken": "p75_1c467b2"
}
}
```
</details>
Using **ENV** (`.env`)
<details>
```
CLIENT_ID=f4b4df1c7b2
CLIENT_SECRET=34v5q1c56ub
REFRESH_TOKEN=34_f1w1v4
ACCESS_TOKEN=p75_1c467b2
```
</details>
Using **ARG**
<details>
```
node src/index.js run --clientId=f4b4df1c7b2 --clientSecret=34v5q1c56ub --refreshToken=34_f1w1v4 --accessToken=p75_1c467b2
```
</details>
## Using Config Overrides
Using all three configs together:
**FILE**
<details>
```json
{
"credentials": {
"clientId": "f4b4df1c7b2",
"refreshToken": "34_f1w1v4",
"accessToken": "p75_1c467b2"
}
}
```
</details>
**ENV** (`.env`)
<details>
```
CLIENT_SECRET=34v5q1c56ub
SUBREDDITS=sub1,sub2,sub3
PORT=9008
LOG_LEVEL=DEBUG
```
</details>
**ARG**
<details>
```
node src/index.js run --subreddits=sub1
```
</details>
Produces these variables at runtime for the application:
```
clientId: f4b4df1c7b2
clientSecret: 34v5q1c56ub
refreshToken: 34_f1w1v4
accessToken: accessToken
subreddits: sub1
port: 9008
log level: debug
```
# Cache Configuration
RCB implements two caching backend **providers**. By default all providers use `memory`:
* `memory` -- in-memory (non-persistent) backend
* `redis` -- [Redis](https://redis.io/) backend
Each `provider` object in configuration can be specified as:
* one of the above **strings** to use the **defaults settings** or
* an **object** with keys to override default settings
A caching object in the json configuration:
```json5
{
"provider": {
"store": "memory", // one of "memory" or "redis"
"ttl": 60, // the default max age of a key in seconds
"max": 500, // the maximum number of keys in the cache (for "memory" only)
// the below properties only apply to 'redis' provider
"host": 'localhost',
"port": 6379,
"auth_pass": null,
"db": 0,
}
}
```

View File

@@ -1,43 +0,0 @@
# Examples
This directory contains example of valid, ready-to-go configurations for Context Bot for the purpose of:
* showcasing what the bot can do
* providing best practices for writing your configuration
* providing generally useful configurations **that can be used immediately** or as a jumping-off point for your configuration
### Creating Your Configuration
#### Get the raw contents of the configuration
* In a new tab open the github page for the configuration you want ([example](/examples/repeatActivity/crosspostSpamming.json5))
* Click the **Raw** button...keep this tab open and move on to the next step
#### Edit your wiki configuration
* Visit the wiki page of the subreddit you want the bot to moderate
* Using default bot settings this will be `https://old.reddit.com/r/YOURSUBERDDIT/wiki/botconfig/contextbot`
* If the page does not exist create it, otherwise click **Edit**
* Copy-paste the configuration into the wiki text box
* In the previous tab you opened (for the configuration) **Select All** (Ctrl+A), then **Copy**
* On the wiki page **Paste** into the text box
* Save the edited wiki page
* Ensure the wiki page visibility is restricted
* On the wiki page click **settings** (**Page settings** in new reddit)
* Check the box for **Only mods may edit and view** and then **save**
### Examples Overview
* Rules
* [Attribution](/examples/attribution)
* [Recent Activity](/examples/recentActivity)
* [Repeat Activity](/examples/repeatActivity)
* [History](/examples/history)
* [Author](/examples/author)
* [Toolbox User Notes](/examples/userNotes)
* [Advanced Concepts](/examples/advancedConcepts)
* [Rule Sets](/examples/advancedConcepts/ruleSets.json5)
* [Name Rules](/examples/advancedConcepts/ruleNameReuse.json5)
* [Check Ordering](/examples/advancedConcepts)
* Subreddit-ready examples
* Coming soon...

View File

@@ -1,8 +0,0 @@
# Repeat Activity
The **Repeat Activity** rule will check for patterns of repetition in an Author's Submission/Comment history. Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FRepeatActivityJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
### Examples
* [Crosspost Spamming](/examples/repeatActivity/crosspostSpamming.json5) - Check if an Author is spamming their Submissions across multiple subreddits
* [Burst-posting](/examples/repeatActivity/burstPosting.json5) - Check if Author is crossposting their Submissions in short bursts

2483
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -8,11 +8,12 @@
"build": "tsc",
"start": "node server.js",
"guard": "ts-auto-guard src/JsonConfig.ts",
"schema": "npm run -s schema-app & npm run -s schema-ruleset & npm run -s schema-rule & npm run -s schema-action",
"schema": "npm run -s schema-app & npm run -s schema-ruleset & npm run -s schema-rule & npm run -s schema-action & npm run -s schema-config",
"schema-app": "typescript-json-schema tsconfig.json JSONConfig --out src/Schema/App.json --required --tsNodeRegister --refs",
"schema-ruleset": "typescript-json-schema tsconfig.json RuleSetJson --out src/Schema/RuleSet.json --required --tsNodeRegister --refs",
"schema-rule": "typescript-json-schema tsconfig.json RuleJson --out src/Schema/Rule.json --required --tsNodeRegister --refs",
"schema-action": "typescript-json-schema tsconfig.json ActionJson --out src/Schema/Action.json --required --tsNodeRegister --refs",
"schema-config": "typescript-json-schema tsconfig.json OperatorJsonConfig --out src/Schema/OperatorConfig.json --required --tsNodeRegister --refs",
"schemaNotWorking": "./node_modules/.bin/ts-json-schema-generator -f tsconfig.json -p src/JsonConfig.ts -t JSONConfig --out src/Schema/vegaSchema.json",
"circular": "madge --circular --extensions ts src/index.ts",
"circular-graph": "madge --image graph.svg --circular --extensions ts src/index.ts"
@@ -24,32 +25,53 @@
"author": "",
"license": "ISC",
"dependencies": {
"@awaitjs/express": "^0.8.0",
"ajv": "^7.2.4",
"commander": "^7.2.0",
"async": "^3.2.0",
"body-parser": "^1.19.0",
"cache-manager": "^3.4.4",
"cache-manager-redis-store": "^2.0.0",
"commander": "^8.0.0",
"dayjs": "^1.10.5",
"deepmerge": "^4.2.2",
"ejs": "^3.1.6",
"env-cmd": "^10.1.0",
"es6-error": "^4.1.1",
"express": "^4.17.1",
"express-session": "^1.17.2",
"express-session-cache-manager": "^1.0.2",
"express-socket.io-session": "^1.3.5",
"fast-deep-equal": "^3.1.3",
"fuse.js": "^6.4.6",
"he": "^1.2.0",
"js-yaml": "^4.1.0",
"json5": "^2.2.0",
"memory-cache": "^0.2.0",
"mustache": "^4.2.0",
"node-fetch": "^2.6.1",
"object-hash": "^2.2.0",
"p-event": "^4.2.0",
"pako": "^0.2.6",
"safe-stable-stringify": "^1.1.1",
"snoostorm": "^1.5.2",
"snoowrap": "^1.23.0",
"socket.io": "^4.1.3",
"tcp-port-used": "^1.0.2",
"typescript": "^4.3.4",
"webhook-discord": "^3.7.7",
"winston": "FoxxMD/winston#fbab8de969ecee578981c77846156c7f43b5f01e",
"winston-daily-rotate-file": "^4.5.5",
"zlib": "^1.0.5"
},
"devDependencies": {
"@tsconfig/node14": "^1.0.0",
"@types/async": "^3.2.7",
"@types/cache-manager": "^3.4.2",
"@types/cache-manager-redis-store": "^2.0.0",
"@types/express": "^4.17.13",
"@types/express-session": "^1.17.4",
"@types/express-socket.io-session": "^1.3.6",
"@types/he": "^1.1.1",
"@types/js-yaml": "^4.0.1",
"@types/lru-cache": "^5.1.1",
"@types/memory-cache": "^0.2.1",
"@types/minimist": "^1.2.1",
"@types/mustache": "^4.1.1",
@@ -57,6 +79,7 @@
"@types/node-fetch": "^2.5.10",
"@types/object-hash": "^2.1.0",
"@types/pako": "^1.0.1",
"@types/tcp-port-used": "^1.0.0",
"ts-auto-guard": "*",
"ts-json-schema-generator": "^0.93.0",
"typescript-json-schema": "^0.50.1"

View File

@@ -8,13 +8,14 @@ export class ApproveAction extends Action {
return 'Approve';
}
async process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
//snoowrap typing issue, thinks comments can't be locked
// @ts-ignore
if (item.approved) {
this.logger.warn('Item is already approved');
}
if (!this.dryRun) {
if (!dryRun) {
// @ts-ignore
await item.approve();
}

View File

@@ -33,7 +33,8 @@ export class BanAction extends Action {
return 'Ban';
}
async process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
const content = this.message === undefined ? undefined : await this.resources.getContent(this.message, item.subreddit);
const renderedBody = content === undefined ? undefined : await renderContent(content, item, ruleResults, this.resources.userNotes);
const renderedContent = renderedBody === undefined ? undefined : `${renderedBody}${await this.resources.generateFooter(item, this.footer)}`;
@@ -45,10 +46,12 @@ export class BanAction extends Action {
const durText = this.duration === undefined ? 'permanently' : `for ${this.duration} days`;
this.logger.info(`Banning ${item.author.name} ${durText}${this.reason !== undefined ? ` (${this.reason})` : ''}`);
this.logger.verbose(`\r\n${banPieces.join('\r\n')}`);
if (!this.dryRun) {
if (!dryRun) {
// @ts-ignore
await item.subreddit.banUser({
name: item.author.id,
const fetchedSub = await item.subreddit.fetch();
const fetchedName = await item.author.name;
await fetchedSub.banUser({
name: fetchedName,
banMessage: renderedContent === undefined ? undefined : renderedContent,
banReason: this.reason,
banNote: this.note,

View File

@@ -32,7 +32,8 @@ export class CommentAction extends Action {
return 'Comment';
}
async process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
const content = await this.resources.getContent(this.content, item.subreddit);
const body = await renderContent(content, item, ruleResults, this.resources.userNotes);
@@ -46,18 +47,18 @@ export class CommentAction extends Action {
return;
}
let reply: Comment;
if(!this.dryRun) {
if(!dryRun) {
// @ts-ignore
reply = await item.reply(renderedContent);
}
if (this.lock) {
if (!this.dryRun) {
if (!dryRun) {
// snoopwrap typing issue, thinks comments can't be locked
// @ts-ignore
await item.lock();
}
}
if (this.distinguish && !this.dryRun) {
if (this.distinguish && !dryRun) {
// @ts-ignore
await reply.distinguish({sticky: this.sticky});
}

View File

@@ -8,13 +8,14 @@ export class LockAction extends Action {
return 'Lock';
}
async process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
//snoowrap typing issue, thinks comments can't be locked
// @ts-ignore
if (item.locked) {
this.logger.warn('Item is already locked');
}
if (!this.dryRun) {
if (!dryRun) {
//snoowrap typing issue, thinks comments can't be locked
// @ts-ignore
await item.lock();

View File

@@ -9,14 +9,15 @@ export class RemoveAction extends Action {
return 'Remove';
}
async process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
// issue with snoowrap typings, doesn't think prop exists on Submission
// @ts-ignore
if (activityIsRemoved(item)) {
this.logger.warn('Item is already removed');
return;
}
if (!this.dryRun) {
if (!dryRun) {
// @ts-ignore
await item.remove();
}

View File

@@ -23,12 +23,13 @@ export class ReportAction extends Action {
return 'Report';
}
async process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
const content = await this.resources.getContent(this.content, item.subreddit);
const renderedContent = await renderContent(content, item, ruleResults, this.resources.userNotes);
this.logger.verbose(`Contents:\r\n${renderedContent}`);
const truncatedContent = reportTrunc(renderedContent);
if(!this.dryRun) {
if(!dryRun) {
// @ts-ignore
await item.report({reason: truncatedContent});
}

View File

@@ -24,7 +24,8 @@ export class UserNoteAction extends Action {
return 'User Note';
}
async process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
const content = await this.resources.getContent(this.content, item.subreddit);
const renderedContent = await renderContent(content, item, ruleResults, this.resources.userNotes);
this.logger.verbose(`Note:\r\n(${this.type}) ${renderedContent}`);
@@ -37,7 +38,7 @@ export class UserNoteAction extends Action {
return;
}
}
if (!this.dryRun) {
if (!dryRun) {
await this.resources.userNotes.addUserNote(item, this.type, renderedContent);
} else if (!await this.resources.userNotes.warningExists(this.type)) {
this.logger.warn(`UserNote type '${this.type}' does not exist. If you meant to use this please add it through Toolbox first.`);

View File

@@ -46,7 +46,8 @@ export abstract class Action {
return this.name === this.getKind() ? this.getKind() : `${this.getKind()} - ${this.name}`;
}
async handle(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
async handle(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
const dryRun = runtimeDryrun || this.dryRun;
let actionRun = false;
const [itemPass, crit] = isItem(item, this.itemIs, this.logger);
if (!itemPass) {
@@ -57,7 +58,7 @@ export abstract class Action {
if (this.authorIs.include !== undefined && this.authorIs.include.length > 0) {
for (const auth of this.authorIs.include) {
if (await this.resources.testAuthorCriteria(item, auth)) {
await this.process(item, ruleResults);
await this.process(item, ruleResults, runtimeDryrun);
return true;
}
}
@@ -67,7 +68,7 @@ export abstract class Action {
if (!actionRun && this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
for (const auth of this.authorIs.exclude) {
if (await this.resources.testAuthorCriteria(item, auth, false)) {
await this.process(item, ruleResults);
await this.process(item, ruleResults, runtimeDryrun);
return true;
}
}
@@ -78,14 +79,14 @@ export abstract class Action {
}
const authorRunResults = await authorRun();
if (null === authorRunResults) {
await this.process(item, ruleResults);
await this.process(item, ruleResults, runtimeDryrun);
} else if (!authorRunResults) {
return;
}
this.logger.verbose(`${this.dryRun ? 'DRYRUN - ' : ''}Done`);
this.logger.verbose(`${dryRun ? 'DRYRUN - ' : ''}Done`);
}
abstract process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void>;
abstract process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryun?: boolean): Promise<void>;
}
export interface ActionOptions extends ActionConfig {

View File

@@ -1,14 +1,25 @@
import Snoowrap, { Subreddit } from "snoowrap";
import Snoowrap, {Subreddit} from "snoowrap";
import {Manager} from "./Subreddit/Manager";
import winston, {Logger} from "winston";
import {argParseInt, labelledFormat, parseBool, parseFromJsonOrYamlToObject, parseSubredditName, sleep} from "./util";
import snoowrap from "snoowrap";
import {
argParseInt,
createRetryHandler, formatNumber,
labelledFormat, logLevels,
parseBool, parseDuration,
parseFromJsonOrYamlToObject,
parseSubredditName,
sleep
} from "./util";
import pEvent from "p-event";
import EventEmitter from "events";
import CacheManager from './Subreddit/SubredditResources';
import dayjs, {Dayjs} from "dayjs";
import LoggedError from "./Utils/LoggedError";
import ConfigParseError from "./Utils/ConfigParseError";
import {ProxiedSnoowrap, RequestTrackingSnoowrap} from "./Utils/SnoowrapClients";
import {ModQueueStream, UnmoderatedStream} from "./Subreddit/Streams";
import {getLogger} from "./Utils/loggerFactory";
import {DurationString, OperatorConfig, PAUSED, RUNNING, STOPPED, SYSTEM, USER} from "./Common/interfaces";
import { Duration } from "dayjs/plugin/duration";
const {transports} = winston;
@@ -30,113 +41,163 @@ export class App {
wikiLocation: string;
dryRun?: true | undefined;
heartbeatInterval: number;
apiLimitWarning: number;
nextHeartbeat?: Dayjs;
heartBeating: boolean = false;
//apiLimitWarning: number;
softLimit: number | string = 250;
hardLimit: number | string = 50;
nannyMode?: 'soft' | 'hard';
nextExpiration!: Dayjs;
botName?: string;
startedAt: Dayjs = dayjs();
sharedModqueue: boolean = false;
constructor(options: any = {}) {
apiSample: number[] = [];
interval: any;
apiRollingAvg: number = 0;
apiEstDepletion?: Duration;
depletedInSecs: number = 0;
constructor(config: OperatorConfig) {
const {
subreddits = [],
clientId,
clientSecret,
accessToken,
refreshToken,
logDir = process.env.LOG_DIR || `${process.cwd()}/logs`,
logLevel = process.env.LOG_LEVEL || 'verbose',
wikiConfig = process.env.WIKI_CONFIG || 'botconfig/contextbot',
snooDebug = process.env.SNOO_DEBUG || false,
dryRun = process.env.DRYRUN || false,
heartbeat = process.env.HEARTBEAT || 300,
apiLimitWarning = process.env.API_REMAINING || 250,
version,
authorTTL = process.env.AUTHOR_TTL || 10000,
disableCache = process.env.DISABLE_CACHE || false,
} = options;
subreddits: {
names = [],
wikiConfig,
dryRun,
heartbeatInterval,
},
credentials: {
clientId,
clientSecret,
refreshToken,
accessToken,
},
snoowrap: {
proxy,
debug,
},
polling: {
sharedMod,
},
caching: {
authorTTL,
provider: {
store
}
},
api: {
softLimit,
hardLimit,
}
} = config;
CacheManager.authorTTL = argParseInt(authorTTL);
CacheManager.enabled = !parseBool(disableCache);
CacheManager.setDefaultsFromConfig(config);
this.dryRun = parseBool(dryRun) === true ? true : undefined;
this.heartbeatInterval = argParseInt(heartbeat);
this.apiLimitWarning = argParseInt(apiLimitWarning);
this.heartbeatInterval = heartbeatInterval;
//this.apiLimitWarning = argParseInt(apiLimitWarning);
this.softLimit = softLimit;
this.hardLimit = hardLimit;
this.wikiLocation = wikiConfig;
this.sharedModqueue = sharedMod;
const consoleTransport = new transports.Console();
const myTransports = [
consoleTransport,
];
let errorTransports = [];
if (logDir !== false) {
let logPath = logDir;
if (logPath === true) {
logPath = `${process.cwd()}/logs`;
}
const rotateTransport = new winston.transports.DailyRotateFile({
dirname: logPath,
createSymlink: true,
symlinkName: 'contextBot-current.log',
filename: 'contextBot-%DATE%.log',
datePattern: 'YYYY-MM-DD',
maxSize: '5m'
});
// @ts-ignore
myTransports.push(rotateTransport);
errorTransports.push(rotateTransport);
}
const loggerOptions = {
level: logLevel || 'info',
format: labelledFormat(),
transports: myTransports,
levels: {
error: 0,
warn: 1,
info: 2,
http: 3,
verbose: 4,
debug: 5,
trace: 5,
silly: 6
},
exceptionHandlers: errorTransports,
rejectionHandlers: errorTransports,
};
winston.loggers.add('default', loggerOptions);
this.logger = winston.loggers.get('default');
this.logger = getLogger(config.logging);
if (this.dryRun) {
this.logger.info('Running in DRYRUN mode');
}
let subredditsArg = [];
if (subreddits !== undefined) {
if (Array.isArray(subreddits)) {
subredditsArg = subreddits;
} else {
subredditsArg = subreddits.split(',');
}
}
this.subreddits = subredditsArg.map(parseSubredditName);
this.subreddits = names.map(parseSubredditName);
const creds = {
userAgent: `web:contextBot:${version}`,
userAgent: `web:contextBot:dev`,
clientId,
clientSecret,
refreshToken,
accessToken,
};
this.client = new snoowrap(creds);
const missingCreds = [];
for(const [k,v] of Object.entries(creds)) {
if(v === undefined || v === '' || v === null) {
missingCreds.push(k);
}
}
if(missingCreds.length > 0) {
this.logger.error('There are credentials missing that would prevent initializing the Reddit API Client and subsequently the rest of the application');
this.logger.error(`Missing credentials: ${missingCreds.join(', ')}`)
this.logger.info(`If this is a first-time setup use the 'web' command for a web-based guide to configuring your application`);
this.logger.info(`Or check the USAGE section of the readme for the correct naming of these arguments/environment variables`);
throw new LoggedError(`Missing credentials: ${missingCreds.join(', ')}`);
}
this.client = proxy === undefined ? new Snoowrap(creds) : new ProxiedSnoowrap({...creds, proxy});
this.client.config({
warnings: true,
maxRetryAttempts: 5,
debug: parseBool(snooDebug),
debug,
logger: snooLogWrapper(this.logger.child({labels: ['Snoowrap']})),
continueAfterRatelimitError: true,
});
const retryHandler = createRetryHandler({maxRequestRetry: 8, maxOtherRetry: 1}, this.logger);
const modStreamErrorListener = (name: string) => async (err: any) => {
this.logger.error('Polling error occurred', err);
const shouldRetry = await retryHandler(err);
if(shouldRetry) {
defaultUnmoderatedStream.startInterval();
} else {
for(const m of this.subManagers) {
if(m.modStreamCallbacks.size > 0) {
m.notificationManager.handle('runStateChanged', `${name.toUpperCase()} Polling Stopped`, 'Encountered too many errors from Reddit while polling. Will try to restart on next heartbeat.');
}
}
this.logger.error(`Mod stream ${name.toUpperCase()} encountered too many errors while polling. Will try to restart on next heartbeat.`);
}
}
const defaultUnmoderatedStream = new UnmoderatedStream(this.client, {subreddit: 'mod'});
// @ts-ignore
defaultUnmoderatedStream.on('error', modStreamErrorListener('unmoderated'));
const defaultModqueueStream = new ModQueueStream(this.client, {subreddit: 'mod'});
// @ts-ignore
defaultModqueueStream.on('error', modStreamErrorListener('modqueue'));
CacheManager.modStreams.set('unmoderated', defaultUnmoderatedStream);
CacheManager.modStreams.set('modqueue', defaultModqueueStream);
const onTerm = () => {
for(const m of this.subManagers) {
m.notificationManager.handle('runStateChanged', 'Application Shutdown', 'The application was shutdown');
}
}
process.on('SIGTERM', () => {
onTerm();
});
}
async testClient() {
try {
// @ts-ignore
await this.client.getMe();
this.logger.info('Test API call successful');
} catch (err) {
this.logger.error('An error occurred while trying to initialize the Reddit API Client which would prevent the entire application from running.');
if(err.name === 'StatusCodeError') {
const authHeader = err.response.headers['www-authenticate'];
if (authHeader !== undefined && authHeader.includes('insufficient_scope')) {
this.logger.error('Reddit responded with a 403 insufficient_scope. Please ensure you have chosen the correct scopes when authorizing your account.');
} else if(err.statusCode === 401) {
this.logger.error('It is likely a credential is missing or incorrect. Check clientId, clientSecret, refreshToken, and accessToken');
}
this.logger.error(`Error Message: ${err.message}`);
} else {
this.logger.error(err);
}
err.logged = true;
throw err;
}
}
async buildManagers(subreddits: string[] = []) {
@@ -144,6 +205,7 @@ export class App {
const name = await this.client.getMe().name;
this.logger.info(`Reddit API Limit Remaining: ${this.client.ratelimitRemaining}`);
this.logger.info(`Authenticated Account: /u/${name}`);
this.botName = name;
for (const sub of await this.client.getModeratedSubreddits()) {
// TODO don't know a way to check permissions yet
availSubs.push(sub);
@@ -153,7 +215,7 @@ export class App {
let subsToRun: Subreddit[] = [];
const subsToUse = subreddits.length > 0 ? subreddits.map(parseSubredditName) : this.subreddits;
if (subsToUse.length > 0) {
this.logger.info(`User-defined subreddit constraints detected (CLI argument or environmental variable), will try to run on: ${subsToUse.join(', ')}`);
this.logger.info(`Operator-defined subreddit constraints detected (CLI argument or environmental variable), will try to run on: ${subsToUse.join(', ')}`);
for (const sub of subsToUse) {
const asub = availSubs.find(x => x.display_name.toLowerCase() === sub.toLowerCase())
if (asub === undefined) {
@@ -173,41 +235,16 @@ export class App {
let subSchedule: Manager[] = [];
// get configs for subs we want to run on and build/validate them
for (const sub of subsToRun) {
let content = undefined;
let wiki;
const manager = new Manager(sub, this.client, this.logger, {dryRun: this.dryRun, sharedModqueue: this.sharedModqueue});
try {
// @ts-ignore
wiki = await sub.getWikiPage(this.wikiLocation).fetch();
content = wiki.content_md;
await manager.parseConfiguration('system', true, {suppressNotification: true});
} catch (err) {
this.logger.error(`[${sub.display_name_prefixed}] Could not read wiki configuration. Please ensure the page https://reddit.com${sub.url}wiki/${this.wikiLocation} exists and is readable -- error: ${err.message}`);
continue;
}
if(content === '') {
this.logger.error(`[${sub.display_name_prefixed}] Wiki page contents was empty`);
continue;
}
const [configObj, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(content);
if (configObj === undefined) {
this.logger.error(`[${sub.display_name_prefixed}] Could not parse wiki page contents as JSON or YAML:`);
this.logger.error(jsonErr);
this.logger.error(yamlErr);
continue;
}
try {
const manager = new Manager(sub, this.client, this.logger, configObj, {dryRun: this.dryRun});
manager.lastWikiCheck = dayjs();
manager.lastWikiRevision = dayjs.unix(wiki.revision_date);
subSchedule.push(manager);
} catch (err) {
if(!(err instanceof LoggedError)) {
this.logger.error(`[${sub.display_name_prefixed}] Config was not valid`, err);
if (!(err instanceof LoggedError)) {
this.logger.error(`Config was not valid:`, {subreddit: sub.display_name_prefixed});
this.logger.error(err, {subreddit: sub.display_name_prefixed});
}
}
subSchedule.push(manager);
}
this.subManagers = subSchedule;
}
@@ -216,86 +253,209 @@ export class App {
try {
this.heartBeating = true;
while (true) {
this.nextHeartbeat = dayjs().add(this.heartbeatInterval, 'second');
await sleep(this.heartbeatInterval * 1000);
const heartbeat = `HEARTBEAT -- Reddit API Rate Limit remaining: ${this.client.ratelimitRemaining}`
if (this.apiLimitWarning >= this.client.ratelimitRemaining) {
this.logger.warn(heartbeat);
} else {
this.logger.info(heartbeat);
}
for(const s of this.subManagers) {
const heartbeat = `HEARTBEAT -- API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ~${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion === undefined ? 'N/A' : this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`
this.logger.info(heartbeat);
for (const s of this.subManagers) {
if(s.botState.state === STOPPED && s.botState.causedBy === USER) {
this.logger.debug('Skipping config check/restart on heartbeat due to previously being stopped by user', {subreddit: s.displayLabel});
continue;
}
try {
await s.parseConfiguration();
if(!s.running) {
s.handle();
const newConfig = await s.parseConfiguration();
if(newConfig || (s.queueState.state !== RUNNING && s.queueState.causedBy === SYSTEM))
{
await s.startQueue('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running queue'});
}
if(newConfig || (s.eventsState.state !== RUNNING && s.eventsState.causedBy === SYSTEM))
{
await s.startEvents('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running events'});
}
if(s.botState.state !== RUNNING && s.eventsState.state === RUNNING && s.queueState.state === RUNNING) {
s.botState = {
state: RUNNING,
causedBy: 'system',
}
}
} catch (err) {
s.stop();
this.logger.info('Will retry parsing config on next heartbeat...');
this.logger.info('Stopping event polling to prevent activity processing queue from backing up. Will be restarted when config update succeeds.')
await s.stopEvents('system', {reason: 'Invalid config will cause events to pile up in queue. Will be restarted when config update succeeds (next heartbeat).'});
if(!(err instanceof LoggedError)) {
this.logger.error(err, {subreddit: s.displayLabel});
}
if(this.nextHeartbeat !== undefined) {
this.logger.info(`Will retry parsing config on next heartbeat (in ${dayjs.duration(this.nextHeartbeat.diff(dayjs())).humanize()})`, {subreddit: s.displayLabel});
}
}
}
await this.runModStreams(true);
}
} catch (err) {
this.logger.error('Error occurred during heartbeat', err);
throw err;
} finally {
this.nextHeartbeat = undefined;
this.heartBeating = false;
}
}
async runManagers() {
// basic backoff delay if reddit is under load and not responding
let timeoutCount = 0;
let maxTimeoutCount = 4;
let otherRetryCount = 0;
// not sure should even allow so set to 0 for now
let maxOtherCount = 0;
let keepRunning = true;
let lastErrorAt: Dayjs | undefined;
while (keepRunning) {
try {
for (const manager of this.subManagers) {
if (!manager.running) {
manager.handle();
async runModStreams(notify = false) {
for(const [k,v] of CacheManager.modStreams) {
if(!v.running && v.listeners('item').length > 0) {
v.startInterval();
this.logger.info(`Starting default ${k.toUpperCase()} mod stream`);
if(notify) {
for(const m of this.subManagers) {
if(m.modStreamCallbacks.size > 0) {
m.notificationManager.handle('runStateChanged', `${k.toUpperCase()} Polling Started`, 'Polling was successfully restarted on heartbeat.');
}
}
}
if (this.heartbeatInterval !== 0 && !this.heartBeating) {
this.heartbeat();
}
const emitter = new EventEmitter();
await pEvent(emitter, 'end');
keepRunning = false;
} catch (err) {
if (lastErrorAt !== undefined && dayjs().diff(lastErrorAt, 'minute') >= 5) {
// if its been longer than 5 minutes since last error clear counters
timeoutCount = 0;
otherRetryCount = 0;
}
lastErrorAt = dayjs();
if (err.message.includes('ETIMEDOUT') || (err.code !== undefined && err.code.includes('ETIMEDOUT'))) {
timeoutCount++;
if (timeoutCount > maxTimeoutCount) {
this.logger.error(`Timeouts (${timeoutCount}) exceeded max allowed (${maxTimeoutCount})`);
throw err;
}
// exponential backoff
const ms = (Math.pow(2, timeoutCount - 1) + (Math.random() - 0.3)) * 1000;
this.logger.warn(`Reddit response timed out. Will wait ${ms / 1000} seconds before restarting managers`);
await sleep(ms);
} else {
// linear backoff
otherRetryCount++;
if (maxOtherCount > otherRetryCount) {
throw err;
}
const ms = (3 * 1000) * otherRetryCount;
this.logger.warn(`Non-timeout error occurred. Will wait ${ms / 1000} seconds before restarting managers`);
await sleep(ms);
}
}
}
}
async runManagers() {
if(this.subManagers.every(x => !x.validConfigLoaded)) {
this.logger.warn('All managers have invalid configs!');
}
for (const manager of this.subManagers) {
if (manager.validConfigLoaded && manager.botState.state !== RUNNING) {
await manager.start('system', {reason: 'Caused by application startup'});
}
}
await this.runModStreams();
if (this.heartbeatInterval !== 0 && !this.heartBeating) {
this.heartbeat();
}
this.runApiNanny();
const emitter = new EventEmitter();
await pEvent(emitter, 'end');
}
async runApiNanny() {
while(true) {
await sleep(10000);
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
const nowish = dayjs().add(10, 'second');
if(nowish.isAfter(this.nextExpiration)) {
// it's possible no api calls are being made because of a hard limit
// need to make an api call to update this
// @ts-ignore
await this.client.getMe();
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
}
const rollingSample = this.apiSample.slice(0, 7)
rollingSample.unshift(this.client.ratelimitRemaining);
this.apiSample = rollingSample;
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
if(this.apiSample[index + 1] !== undefined) {
const d = Math.abs(curr - this.apiSample[index + 1]);
if(d === 0) {
return [...acc, 0];
}
return [...acc, d/10];
}
return acc;
}, []);
this.apiRollingAvg = diff.reduce((acc, curr) => acc + curr,0) / diff.length; // api requests per second
this.depletedInSecs = this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
this.apiEstDepletion = dayjs.duration({seconds: this.depletedInSecs});
this.logger.debug(`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`);
let hardLimitHit = false;
if(typeof this.hardLimit === 'string') {
const hardDur = parseDuration(this.hardLimit);
hardLimitHit = hardDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
hardLimitHit = this.hardLimit > this.client.ratelimitRemaining;
}
if(hardLimitHit) {
if(this.nannyMode === 'hard') {
continue;
}
this.logger.info(`Detected HARD LIMIT of ${this.hardLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${this.apiRollingAvg}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info(`All subreddit event polling has been paused`, {leaf: 'Api Nanny'});
for(const m of this.subManagers) {
m.pauseEvents('system');
m.notificationManager.handle('runStateChanged', 'Hard Limit Triggered', `Hard Limit of ${this.hardLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit event polling has been paused.`, 'system', 'warn');
}
this.nannyMode = 'hard';
continue;
}
let softLimitHit = false;
if(typeof this.softLimit === 'string') {
const softDur = parseDuration(this.softLimit);
softLimitHit = softDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
softLimitHit = this.softLimit > this.client.ratelimitRemaining;
}
if(softLimitHit) {
if(this.nannyMode === 'soft') {
continue;
}
this.logger.info(`Detected SOFT LIMIT of ${this.softLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info('Trying to detect heavy usage subreddits...', {leaf: 'Api Nanny'});
let threshold = 0.5;
let offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
if(offenders.length === 0) {
threshold = 0.25;
// reduce threshold
offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
}
if(offenders.length > 0) {
this.logger.info(`Slowing subreddits using >- ${threshold}req/s:`, {leaf: 'Api Nanny'});
for(const m of offenders) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
} else {
this.logger.info(`Couldn't detect specific offenders, slowing all...`, {leaf: 'Api Nanny'});
for(const m of this.subManagers) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
}
this.nannyMode = 'soft';
continue;
}
if(this.nannyMode !== undefined) {
this.logger.info('Turning off due to better conditions...', {leaf: 'Api Nanny'});
for(const m of this.subManagers) {
if(m.delayBy !== undefined) {
m.delayBy = undefined;
m.notificationManager.handle('runStateChanged', 'Normal Processing Resumed', 'Slow Mode has been turned off due to better API conditions', 'system');
}
if(m.queueState.state === PAUSED && m.queueState.causedBy === SYSTEM) {
m.startQueue('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
if(m.eventsState.state === PAUSED && m.eventsState.causedBy === SYSTEM) {
await m.startEvents('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
}
this.nannyMode = undefined;
}
}
}

View File

@@ -8,6 +8,10 @@ export class CommentCheck extends Check {
super(options);
const {itemIs = []} = options;
this.itemIs = itemIs;
this.logSummary('comment');
this.logSummary();
}
logSummary() {
super.logSummary('comment');
}
}

View File

@@ -9,6 +9,10 @@ export class SubmissionCheck extends Check {
super(options);
const {itemIs = []} = options;
this.itemIs = itemIs;
this.logSummary('submission');
this.logSummary();
}
logSummary() {
super.logSummary('submission');
}
}

View File

@@ -45,6 +45,7 @@ export class Check implements ICheck {
exclude: AuthorCriteria[]
};
dryRun?: boolean;
notifyOnTrigger: boolean;
resources: SubredditResources;
constructor(options: CheckOptions) {
@@ -54,6 +55,7 @@ export class Check implements ICheck {
condition = 'AND',
rules = [],
actions = [],
notifyOnTrigger = false,
subredditName,
itemIs = [],
authorIs: {
@@ -71,6 +73,7 @@ export class Check implements ICheck {
this.name = name;
this.description = description;
this.notifyOnTrigger = notifyOnTrigger;
this.condition = condition;
this.itemIs = itemIs;
this.authorIs = {
@@ -139,7 +142,7 @@ export class Check implements ICheck {
}
runStats.push(`${this.actions.length} Actions`);
// not sure if this should be info or verbose
this.logger.info(`${type.toUpperCase()} (${this.condition}) => ${runStats.join(' | ')}${this.description !== undefined ? ` => ${this.description}` : ''}`);
this.logger.info(`${type.toUpperCase()} (${this.condition})${this.notifyOnTrigger ? ' ||Notify on Trigger|| ' : ''} => ${runStats.join(' | ')}${this.description !== undefined ? ` => ${this.description}` : ''}`);
if (this.rules.length === 0 && this.itemIs.length === 0 && this.authorIs.exclude.length === 0 && this.authorIs.include.length === 0) {
this.logger.warn('No rules, item tests, or author test found -- this check will ALWAYS PASS!');
}
@@ -242,18 +245,19 @@ export class Check implements ICheck {
}
}
async runActions(item: Submission | Comment, ruleResults: RuleResult[]): Promise<Action[]> {
this.logger.debug(`${this.dryRun ? 'DRYRUN - ' : ''}Running Actions`);
async runActions(item: Submission | Comment, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<Action[]> {
const dr = runtimeDryrun || this.dryRun;
this.logger.debug(`${dr ? 'DRYRUN - ' : ''}Running Actions`);
const runActions: Action[] = [];
for (const a of this.actions) {
try {
await a.handle(item, ruleResults);
await a.handle(item, ruleResults, runtimeDryrun);
runActions.push(a);
} catch (err) {
this.logger.error(`Action ${a.getActionUniqueName()} encountered an error while running`, err);
}
}
this.logger.info(`${this.dryRun ? 'DRYRUN - ' : ''}Ran Actions: ${runActions.map(x => x.getActionUniqueName()).join(' | ')}`);
this.logger.info(`${dr ? 'DRYRUN - ' : ''}Ran Actions: ${runActions.map(x => x.getActionUniqueName()).join(' | ')}`);
return runActions;
}
}
@@ -299,6 +303,7 @@ export interface CheckOptions extends ICheck {
actions: ActionConfig[]
logger: Logger
subredditName: string
notifyOnTrigger?: boolean
}
export interface CheckJson extends ICheck {
@@ -326,6 +331,13 @@ export interface CheckJson extends ICheck {
* @examples [[{"kind": "comment", "content": "this is the content of the comment", "distinguish": true}, {"kind": "lock"}]]
* */
actions: Array<ActionTypeJson>
/**
* If notifications are configured and this is `true` then an `eventActioned` event will be sent when this check is triggered.
*
* @default false
* */
notifyOnTrigger?: boolean,
}
export interface SubmissionCheckJson extends CheckJson {

2
src/Common/defaults.ts Normal file
View File

@@ -0,0 +1,2 @@
export const cacheOptDefaults = {ttl: 60, max: 500, checkPeriod: 600};
export const cacheTTLDefaults = {authorTTL: 60, userNotesTTL: 300, wikiTTL: 300};

View File

@@ -1,4 +1,6 @@
import {Duration} from "dayjs/plugin/duration";
import Poll from "snoostorm/out/util/Poll";
import Snoowrap from "snoowrap";
/**
* An ISO 8601 Duration
@@ -72,7 +74,7 @@ export interface ActivityWindowCriteria {
*
* Acceptable values:
*
* **A `string` consisting of a value and a [Day.js](https://day.js.org/docs/en/durations/creating) time unit**
* **A `string` consisting of a value and a [Day.js](https://day.js.org/docs/en/durations/creating) time unit** ([test your value](https://regexr.com/61em3))
*
* * EX `9 days` => Range is `NOW <---> 9 days ago`
*
@@ -80,7 +82,7 @@ export interface ActivityWindowCriteria {
*
* * EX `{"days": 90, "minutes": 15}` => Range is `NOW <---> 90 days and 15 minutes ago`
*
* **An [ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) `string`**
* **An [ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) `string`** ([test your value](https://regexr.com/61em9))
*
* * EX `PT15M` => 15 minutes => Range is `NOW <----> 15 minutes ago`
*
@@ -302,12 +304,39 @@ export interface PollingOptionsStrong extends PollingOptions {
interval: number,
}
export interface PollingDefaults {
/**
* The maximum number of Activities to get on every request
* @default 50
* @examples [50]
* */
limit?: number
/**
* Amount of time, in seconds, to wait between requests
*
* @default 30
* @examples [30]
* */
interval?: number,
/**
* Delay processing Activity until it is `N` seconds old
*
* Useful if there are other bots that may process an Activity and you want this bot to run first/last/etc.
*
* If the Activity is already `N` seconds old when it is initially retrieved no refresh of the Activity occurs (no API request is made) and it is immediately processed.
*
* */
delayUntil?: number,
}
/**
* A configuration for where, how, and when to poll Reddit for Activities to process
*
* @examples [{"pollOn": "unmoderated","limit": 25, "interval": 20000}]
* */
export interface PollingOptions {
export interface PollingOptions extends PollingDefaults {
/**
* What source to get Activities from. The source you choose will modify how the bots behaves so choose carefully.
@@ -335,7 +364,7 @@ export interface PollingOptions {
* * they are not initially filtered by Automoderator or
* * after they have been manually approved from modqueue
*
* ## newComm
* ### newComm
*
* Get only new `Comments`
*
@@ -346,44 +375,34 @@ export interface PollingOptions {
*
* */
pollOn: 'unmoderated' | 'modqueue' | 'newSub' | 'newComm'
/**
* The maximum number of Activities to get on every request
* @default 25
* @examples [25]
* */
limit?: number
/**
* Amount of time, in milliseconds, to wait between requests
*
* @default 20000
* @examples [20000]
* */
interval?: number,
}
export interface SubredditCacheConfig {
export interface TTLConfig {
/**
* Amount of time, in milliseconds, author activities (Comments/Submission) should be cached
* @examples [10000]
* @default 10000
* Amount of time, in seconds, author activities (Comments/Submission) should be cached
* @examples [60]
* @default 60
* */
authorTTL?: number;
/**
* Amount of time, in milliseconds, wiki content pages should be cached
* @examples [300000]
* @default 300000
* Amount of time, in seconds, wiki content pages should be cached
* @examples [300]
* @default 300
* */
wikiTTL?: number;
/**
* Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
* @examples [60000]
* @default 60000
* @examples [300]
* @default 300
* */
userNotesTTL?: number;
}
export interface SubredditCacheConfig extends TTLConfig {
provider?: CacheProvider | CacheOptions
}
export interface Footer {
/**
* Customize the footer for Actions that send replies (Comment/Ban)
@@ -431,20 +450,20 @@ export interface ManagerOptions {
*
* **A `PollingOptions` object**
*
* If you want to specify non-default preoperties
* If you want to specify non-default properties
*
* ****
* If not specified the default is `["unmoderated"]`
*
* @default [["unmoderated"]]
* @example [["unmoderated","newComm"]]
* */
polling?: (string|PollingOptions)[]
* */
polling?: (string | PollingOptions)[]
/**
* Per-subreddit config for caching TTL values. If set to `false` caching is disabled.
* */
caching?: false | SubredditCacheConfig
caching?: SubredditCacheConfig
/**
* Use this option to override the `dryRun` setting for all `Checks`
@@ -488,6 +507,8 @@ export interface ManagerOptions {
* @example ["shortName"]
* */
nickname?: string
notifications?: NotificationConfig
}
/**
@@ -582,3 +603,236 @@ export interface DomainInfo {
provider?: string,
mediaType?: string
}
export const DEFAULT_POLLING_INTERVAL = 30;
export const DEFAULT_POLLING_LIMIT = 50;
export type Invokee = 'system' | 'user';
export const SYSTEM = 'system';
export const USER = 'user';
export type RunState = 'running' | 'paused' | 'stopped';
export const STOPPED = 'stopped';
export const RUNNING = 'running';
export const PAUSED = 'paused';
export interface NamedGroup {
[name: string]: string
}
export interface GlobalRegExResult {
match: string,
groups: string[],
named: NamedGroup | undefined
}
export interface RegExResult {
matched: boolean,
matches: string[],
global: GlobalRegExResult[]
}
type LogLevel = "error" | "warn" | "info" | "verbose" | "debug";
export type CacheProvider = 'memory' | 'redis' | 'none';
// export type StrongCache = SubredditCacheConfig & {
// provider: CacheOptions
// }
export type StrongCache = {
authorTTL: number,
userNotesTTL: number,
wikiTTL: number
provider: CacheOptions
}
export interface CacheOptions {
store: CacheProvider,
host?: string | undefined,
port?: number | undefined,
auth_pass?: string | undefined,
db?: number | undefined,
ttl?: number,
max?: number
}
export type NotificationProvider = 'discord';
export type NotificationEventType = 'runStateChanged' | 'pollingError' | 'eventActioned' | 'configUpdated'
export interface NotificationProviderConfig {
name: string
type: NotificationProvider
}
export interface DiscordProviderConfig extends NotificationProviderConfig {
url: string
}
export type NotificationProviders = DiscordProviderConfig;
export interface NotificationEventConfig {
types: NotificationEventType[]
providers: string[]
}
export interface NotificationContent {
logLevel?: string
title: string
body?: string
footer?: string
}
export type NotificationEvents = (NotificationEventType[] | NotificationEventConfig)[];
export interface NotificationConfig {
providers: NotificationProviders[],
events: NotificationEvents
}
export interface Notifier {
name: string
type: string;
handle: Function
}
export interface ManagerStateChangeOption {
reason?: string
suppressNotification?: boolean
}
export interface OperatorJsonConfig {
operator?: {
name?: string,
display?: string,
},
credentials?: {
clientId?: string,
clientSecret?: string,
redirectUri?: string,
accessToken?: string,
refreshToken?: string
},
notifications?: NotificationConfig
logging?: {
level?: LogLevel,
path?: string,
},
snoowrap?: {
proxy?: string,
debug?: boolean,
}
subreddits?: {
names?: string[],
dryRun?: boolean,
wikiConfig?: string,
heartbeatInterval?: number,
},
polling?: PollingDefaults & {
sharedMod?: boolean,
limit?: number,
interval?: number,
},
web?: {
enabled?: boolean,
port?: number,
session?: {
provider?: 'memory' | 'redis' | CacheOptions,
secret?: string,
}
logLevel?: LogLevel,
maxLogs?: number,
}
// caching?: (SubredditCacheConfig & {
// provider?: CacheProvider | CacheOptions | undefined
// }) | CacheProvider | undefined
caching?: {
/**
* Amount of time, in milliseconds, author activities (Comments/Submission) should be cached
* @examples [10000]
* @default 10000
* */
authorTTL?: number;
/**
* Amount of time, in milliseconds, wiki content pages should be cached
* @examples [300000]
* @default 300000
* */
wikiTTL?: number;
/**
* Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
* @examples [60000]
* @default 60000
* */
userNotesTTL?: number;
provider?: CacheProvider | CacheOptions
} | CacheProvider
api?: {
softLimit?: number,
hardLimit?: number,
}
}
export interface OperatorConfig extends OperatorJsonConfig {
operator: {
name?: string
display?: string,
},
credentials: {
clientId: string,
clientSecret: string,
redirectUri?: string,
accessToken?: string,
refreshToken?: string
},
notifications?: NotificationConfig
logging: {
level: LogLevel,
path?: string,
},
snoowrap: {
proxy?: string,
debug?: boolean,
}
subreddits: {
names?: string[],
dryRun?: boolean,
wikiConfig: string,
heartbeatInterval: number,
},
polling: {
sharedMod: boolean,
limit: number,
interval: number,
},
web: {
enabled: boolean,
port: number,
session: {
provider: CacheOptions,
secret: string,
}
logLevel?: LogLevel,
maxLogs: number,
}
caching: {
authorTTL: number,
userNotesTTL: number,
wikiTTL: number
provider: CacheOptions
},
api: {
softLimit: number,
hardLimit: number,
}
}
//export type OperatorConfig = Required<OperatorJsonConfig>;
interface CacheTypeStat {
requests: number,
miss: number,
}
export interface ResourceStats {
[key: string]: CacheTypeStat
}

View File

@@ -11,8 +11,9 @@ import {HistoryJSONConfig} from "../Rule/HistoryRule";
import {UserNoteActionJson} from "../Action/UserNoteAction";
import {ApproveActionJson} from "../Action/ApproveAction";
import {BanActionJson} from "../Action/BanAction";
import {RegexRuleJSONConfig} from "../Rule/RegexRule";
export type RuleJson = RecentActivityRuleJSONConfig | RepeatActivityJSONConfig | AuthorRuleJSONConfig | AttributionJSONConfig | HistoryJSONConfig | string;
export type RuleJson = RecentActivityRuleJSONConfig | RepeatActivityJSONConfig | AuthorRuleJSONConfig | AttributionJSONConfig | HistoryJSONConfig | RegexRuleJSONConfig | string;
export type RuleObjectJson = Exclude<RuleJson, string>
export type ActionJson = CommentActionJson | FlairActionJson | ReportActionJson | LockActionJson | RemoveActionJson | ApproveActionJson | BanActionJson | UserNoteActionJson | string;

View File

@@ -1,23 +1,98 @@
import {Logger} from "winston";
import {createAjvFactory, mergeArr, normalizeName} from "./util";
import {
buildCacheOptionsFromProvider,
createAjvFactory,
mergeArr,
normalizeName,
overwriteMerge,
parseBool, randomId,
readJson,
removeUndefinedKeys
} from "./util";
import {CommentCheck} from "./Check/CommentCheck";
import {SubmissionCheck} from "./Check/SubmissionCheck";
import Ajv from 'ajv';
import * as schema from './Schema/App.json';
import Ajv, {Schema} from 'ajv';
import * as appSchema from './Schema/App.json';
import * as operatorSchema from './Schema/OperatorConfig.json';
import {JSONConfig} from "./JsonConfig";
import LoggedError from "./Utils/LoggedError";
import {CheckStructuredJson} from "./Check";
import {PollingOptions, PollingOptionsStrong, PollOn} from "./Common/interfaces";
import {
DEFAULT_POLLING_INTERVAL,
DEFAULT_POLLING_LIMIT,
OperatorJsonConfig,
OperatorConfig,
PollingOptions,
PollingOptionsStrong,
PollOn, StrongCache, CacheProvider, CacheOptions
} from "./Common/interfaces";
import {isRuleSetJSON, RuleSetJson, RuleSetObjectJson} from "./Rule/RuleSet";
import deepEqual from "fast-deep-equal";
import {ActionJson, ActionObjectJson, RuleJson, RuleObjectJson} from "./Common/types";
import {isActionJson} from "./Action";
import {getLogger} from "./Utils/loggerFactory";
import {GetEnvVars} from 'env-cmd';
import {operatorConfig} from "./Utils/CommandConfig";
import merge from 'deepmerge';
import * as process from "process";
import {cacheOptDefaults, cacheTTLDefaults} from "./Common/defaults";
export interface ConfigBuilderOptions {
logger: Logger,
}
export const validateJson = (config: object, schema: Schema, logger: Logger): any => {
const ajv = createAjvFactory(logger);
const valid = ajv.validate(schema, config);
if (valid) {
return config;
} else {
logger.error('Json config was not valid. Please use schema to check validity.', {leaf: 'Config'});
if (Array.isArray(ajv.errors)) {
for (const err of ajv.errors) {
let parts = [
`At: ${err.dataPath}`,
];
let data;
if (typeof err.data === 'string') {
data = err.data;
} else if (err.data !== null && typeof err.data === 'object' && (err.data as any).name !== undefined) {
data = `Object named '${(err.data as any).name}'`;
}
if (data !== undefined) {
parts.push(`Data: ${data}`);
}
let suffix = '';
// @ts-ignore
if (err.params.allowedValues !== undefined) {
// @ts-ignore
suffix = err.params.allowedValues.join(', ');
suffix = ` [${suffix}]`;
}
parts.push(`${err.keyword}: ${err.schemaPath} => ${err.message}${suffix}`);
// if we have a reference in the description parse it out so we can log it here for context
if (err.parentSchema !== undefined && err.parentSchema.description !== undefined) {
const desc = err.parentSchema.description as string;
const seeIndex = desc.indexOf('[See]');
if (seeIndex !== -1) {
let newLineIndex: number | undefined = desc.indexOf('\n', seeIndex);
if (newLineIndex === -1) {
newLineIndex = undefined;
}
const seeFragment = desc.slice(seeIndex + 5, newLineIndex);
parts.push(`See:${seeFragment}`);
}
}
logger.error(`Schema Error:\r\n${parts.join('\r\n')}`, {leaf: 'Config'});
}
}
throw new LoggedError('Config schema validity failure');
}
}
export class ConfigBuilder {
configLogger: Logger;
logger: Logger;
@@ -29,54 +104,8 @@ export class ConfigBuilder {
}
validateJson(config: object): JSONConfig {
const ajv = createAjvFactory(this.logger);
const valid = ajv.validate(schema, config);
if (valid) {
return config as JSONConfig;
} else {
this.configLogger.error('Json config was not valid. Please use schema to check validity.');
if (Array.isArray(ajv.errors)) {
for (const err of ajv.errors) {
let parts = [
`At: ${err.dataPath}`,
];
let data;
if (typeof err.data === 'string') {
data = err.data;
} else if (err.data !== null && typeof err.data === 'object' && (err.data as any).name !== undefined) {
data = `Object named '${(err.data as any).name}'`;
}
if (data !== undefined) {
parts.push(`Data: ${data}`);
}
let suffix = '';
// @ts-ignore
if (err.params.allowedValues !== undefined) {
// @ts-ignore
suffix = err.params.allowedValues.join(', ');
suffix = ` [${suffix}]`;
}
parts.push(`${err.keyword}: ${err.schemaPath} => ${err.message}${suffix}`);
// if we have a reference in the description parse it out so we can log it here for context
if(err.parentSchema !== undefined && err.parentSchema.description !== undefined) {
const desc = err.parentSchema.description as string;
const seeIndex = desc.indexOf('[See]');
if(seeIndex !== -1) {
let newLineIndex: number | undefined = desc.indexOf('\n', seeIndex);
if(newLineIndex === -1) {
newLineIndex = undefined;
}
const seeFragment = desc.slice(seeIndex + 5, newLineIndex);
parts.push(`See:${seeFragment}`);
}
}
this.configLogger.error(`Schema Error:\r\n${parts.join('\r\n')}`);
}
}
throw new LoggedError('Config schema validity failure');
}
const validConfig = validateJson(config, appSchema, this.logger);
return validConfig as JSONConfig;
}
parseToStructured(config: JSONConfig): CheckStructuredJson[] {
@@ -106,14 +135,15 @@ export const buildPollingOptions = (values: (string | PollingOptions)[]): Pollin
let opts: PollingOptionsStrong[] = [];
for (const v of values) {
if (typeof v === 'string') {
opts.push({pollOn: v as PollOn, interval: 10000, limit: 25});
opts.push({pollOn: v as PollOn, interval: DEFAULT_POLLING_INTERVAL, limit: DEFAULT_POLLING_LIMIT});
} else {
const {
pollOn: p,
interval = 20000,
limit = 25
interval = DEFAULT_POLLING_INTERVAL,
limit = DEFAULT_POLLING_LIMIT,
delayUntil,
} = v;
opts.push({pollOn: p as PollOn, interval, limit});
opts.push({pollOn: p as PollOn, interval, limit, delayUntil});
}
}
return opts;
@@ -215,3 +245,354 @@ export const insertNamedActions = (actions: Array<ActionJson>, namedActions: Map
return strongActions;
}
export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
const {
subreddits,
clientId,
clientSecret,
accessToken,
refreshToken,
redirectUri,
wikiConfig,
dryRun,
heartbeat,
softLimit,
hardLimit,
authorTTL,
operator,
operatorDisplay,
snooProxy,
snooDebug,
sharedMod,
logLevel,
logDir,
port,
sessionSecret,
caching,
web
} = args || {};
const data = {
operator: {
name: operator,
display: operatorDisplay
},
credentials: {
clientId,
clientSecret,
accessToken,
refreshToken,
redirectUri,
},
subreddits: {
names: subreddits,
wikiConfig,
heartbeatInterval: heartbeat,
dryRun
},
logging: {
level: logLevel,
path: logDir === true ? `${process.cwd()}/logs` : undefined,
},
snoowrap: {
proxy: snooProxy,
debug: snooDebug,
},
web: {
enabled: web,
port,
session: {
secret: sessionSecret
}
},
polling: {
sharedMod,
},
caching: {
provider: caching,
authorTTL
},
api: {
softLimit,
hardLimit
}
}
return removeUndefinedKeys(data) as OperatorJsonConfig;
}
export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
let subsVal = process.env.SUBREDDITS;
let subs;
if (subsVal !== undefined) {
subsVal = subsVal.trim();
if (subsVal.includes(',')) {
// try to parse using comma
subs = subsVal.split(',').map(x => x.trim()).filter(x => x !== '');
} else {
// otherwise try spaces
subs = subsVal.split(' ')
// remove any extraneous spaces
.filter(x => x !== ' ' && x !== '');
}
if (subs.length === 0) {
subs = undefined;
}
}
const data = {
operator: {
name: process.env.OPERATOR,
display: process.env.OPERATOR_DISPLAY
},
credentials: {
clientId: process.env.CLIENT_ID,
clientSecret: process.env.CLIENT_SECRET,
accessToken: process.env.ACCESS_TOKEN,
refreshToken: process.env.REFRESH_TOKEN,
redirectUri: process.env.REDIRECT_URI,
},
subreddits: {
names: subs,
wikiConfig: process.env.WIKI_CONFIG,
heartbeatInterval: process.env.HEARTBEAT !== undefined ? parseInt(process.env.HEARTBEAT) : undefined,
dryRun: parseBool(process.env.DRYRUN, undefined),
},
logging: {
// @ts-ignore
level: process.env.LOG_LEVEL,
path: process.env.LOG_DIR === 'true' ? `${process.cwd()}/logs` : undefined,
},
snoowrap: {
proxy: process.env.PROXY,
debug: parseBool(process.env.SNOO_DEBUG, undefined),
},
web: {
enabled: process.env.WEB !== undefined ? parseBool(process.env.WEB) : undefined,
port: process.env.PORT !== undefined ? parseInt(process.env.PORT) : undefined,
session: {
provider: process.env.SESSION_PROVIDER,
secret: process.env.SESSION_SECRET
}
},
polling: {
sharedMod: parseBool(process.env.SHARE_MOD),
},
caching: {
provider: {
// @ts-ignore
store: process.env.CACHING
},
authorTTL: process.env.AUTHOR_TTL !== undefined ? parseInt(process.env.AUTHOR_TTL) : undefined
},
api: {
softLimit: process.env.SOFT_LIMIT !== undefined ? parseInt(process.env.SOFT_LIMIT) : undefined,
hardLimit: process.env.HARD_LIMIT !== undefined ? parseInt(process.env.HARD_LIMIT) : undefined
}
}
return removeUndefinedKeys(data) as OperatorJsonConfig;
}
// Hierarchy (lower level overwrites above)
//
// .env file
// Actual ENVs (from environment)
// json config
// args from cli
export const parseOperatorConfigFromSources = async (args: any): Promise<OperatorJsonConfig> => {
const {logLevel = process.env.LOG_LEVEL, logDir = process.env.LOG_DIR || false} = args || {};
const envPath = process.env.OPERATOR_ENV;
// create a pre config logger to help with debugging
const initLogger = getLogger({logLevel, logDir: logDir === true ? `${process.cwd()}/logs` : logDir}, 'init');
try {
const vars = await GetEnvVars({
envFile: {
filePath: envPath,
fallback: true
}
});
// if we found variables in the file of at a fallback path then add them in before we do main arg parsing
for (const [k, v] of Object.entries(vars)) {
// don't override existing
if (process.env[k] === undefined) {
process.env[k] = v;
}
}
} catch (err) {
let msg = 'No .env file found at default location (./env)';
if (envPath !== undefined) {
msg = `${msg} or OPERATOR_ENV path (${envPath})`;
}
initLogger.warn(`${msg} -- this may be normal if neither was provided.`);
// mimicking --silent from env-cmd
//swallow silently for now 😬
}
const {operatorConfig = process.env.OPERATOR_CONFIG} = args;
let configFromFile: OperatorJsonConfig = {};
if (operatorConfig !== undefined) {
let rawConfig;
try {
rawConfig = await readJson(operatorConfig, {log: initLogger});
} catch (err) {
initLogger.error('Cannot continue app startup because operator config file was not parseable.');
err.logged = true;
throw err;
}
try {
configFromFile = validateJson(rawConfig, operatorSchema, initLogger) as OperatorJsonConfig;
} catch (err) {
initLogger.error('Cannot continue app startup because operator config file was not valid.');
throw err;
}
}
const configFromArgs = parseOpConfigFromArgs(args);
const configFromEnv = parseOpConfigFromEnv();
const mergedConfig = merge.all([configFromEnv, configFromFile, configFromArgs], {
arrayMerge: overwriteMerge,
});
return removeUndefinedKeys(mergedConfig) as OperatorJsonConfig;
}
export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): OperatorConfig => {
const {
operator: {
name,
display = 'Anonymous'
} = {},
credentials: {
clientId: ci,
clientSecret: cs,
...restCred
} = {},
subreddits: {
names = [],
wikiConfig = 'botconfig/contextbot',
heartbeatInterval = 300,
dryRun
} = {},
logging: {
level = 'verbose',
path,
} = {},
snoowrap = {},
web: {
enabled = true,
port = 8085,
maxLogs = 200,
session: {
secret = randomId(),
provider: sessionProvider = { store: 'memory' },
} = {}
} = {},
polling: {
sharedMod = false,
limit = 100,
interval = 30,
} = {},
caching = 'memory',
api: {
softLimit = 250,
hardLimit = 50
} = {},
} = data;
let cache = {
...cacheTTLDefaults,
provider: {
store: 'memory',
...cacheOptDefaults
}
};
if (typeof caching === 'string') {
cache = {
provider: {
store: caching as CacheProvider,
...cacheOptDefaults
},
...cacheTTLDefaults
};
} else if (typeof caching === 'object') {
const {provider, ...restConfig} = caching;
if (typeof provider === 'string') {
cache = {
...cacheTTLDefaults,
...restConfig,
provider: {
store: provider as CacheProvider,
...cacheOptDefaults
}
}
} else {
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
cache = {
...cacheTTLDefaults,
...restConfig,
provider: {
store,
...cacheOptDefaults,
...rest,
},
}
}
}
const config: OperatorConfig = {
operator: {
name,
display
},
credentials: {
clientId: (ci as string),
clientSecret: (cs as string),
...restCred,
},
logging: {
level,
path
},
snoowrap,
subreddits: {
names,
wikiConfig,
heartbeatInterval,
dryRun,
},
web: {
enabled,
port,
session: {
secret,
provider: typeof sessionProvider === 'string' ? {
...buildCacheOptionsFromProvider({
ttl: 86400000,
store: sessionProvider,
})
} : {
...buildCacheOptionsFromProvider(sessionProvider),
ttl: 86400000,
},
},
maxLogs,
},
// @ts-ignore
caching: cache,
polling: {
sharedMod,
limit,
interval,
},
api: {
softLimit,
hardLimit
}
};
return config;
}

View File

@@ -0,0 +1,46 @@
import webhook from 'webhook-discord';
import {NotificationContent} from "../Common/interfaces";
class DiscordNotifier {
name: string
type: string = 'Discord';
url: string;
constructor(name: string, url: string) {
this.name = name;
this.url = url;
}
handle(val: NotificationContent) {
const h = new webhook.Webhook(this.url);
const hook = new webhook.MessageBuilder();
const {logLevel, title, footer, body = ''} = val;
hook.setName('RCB')
.setTitle(title)
.setDescription(body)
if (footer !== undefined) {
// @ts-ignore
hook.setFooter(footer, false);
}
switch (logLevel) {
case 'error':
hook.setColor("##ff0000");
break;
case 'warn':
hook.setColor("#ffe900");
break;
default:
hook.setColor("#00fffa");
break;
}
h.send(hook);
}
}
export default DiscordNotifier;

View File

@@ -0,0 +1,122 @@
import {
NotificationConfig,
NotificationEventConfig,
NotificationEvents,
NotificationEventType,
Notifier
} from "../Common/interfaces";
import DiscordNotifier from "./DiscordNotifier";
import {Logger} from "winston";
import {mergeArr} from "../util";
import Subreddit from "snoowrap/dist/objects/Subreddit";
class NotificationManager {
notifiers: Notifier[] = [];
events: NotificationEvents = [];
logger: Logger;
subreddit: Subreddit;
name: string;
constructor(logger: Logger, subreddit: Subreddit, displayName: string, config?: NotificationConfig) {
this.logger = logger.child({leaf: 'Notifications'}, mergeArr);
this.subreddit = subreddit;
this.name = displayName;
if (config !== undefined) {
const {events = [], providers = []} = config;
this.events = events;
for (const p of providers) {
switch (p.type) {
case 'discord':
this.notifiers.push(new DiscordNotifier(p.name, p.url));
break;
default:
this.logger.warn(`Notification provider type of ${p.type} not recognized.`);
break;
}
}
if (this.events.length > 0 && this.notifiers.length === 0) {
this.logger.warn(`Config specified ${this.events.length} event hooks but not notification providers were setup!`);
}
}
}
getStats() {
let notifiers: string[] = [];
if (this.notifiers.length > 0) {
notifiers = this.notifiers.map(x => `${x.name} (${x.type})`);
}
let events: string[] = [];
if (this.events.length > 0) {
events = this.events.reduce((acc: string[], curr) => {
const e = Array.isArray(curr) ? curr : curr.types;
for (const ev of e) {
if (!acc.includes(ev)) {
acc.push(ev);
}
}
return acc;
}, []);
}
return {
notifiers,
events,
}
}
handle(name: NotificationEventType, title: string, body?: string, causedBy?: string, logLevel?: string) {
if (this.notifiers.length === 0 || this.events.length === 0) {
return;
}
let notifiers: Notifier[] = [];
for (const e of this.events) {
// array of event NotificationEventType
if (Array.isArray(e)) {
const ev = e as NotificationEventType[];
for (const v of ev) {
if (v === name) {
// if we find the event here then we want to sent the event to all configured notifiers
notifiers = notifiers.concat(this.notifiers);
}
}
} else {
// e is a NotificationEventConfig
const ev = e as NotificationEventConfig;
const hasEvent = ev.types.some(x => x === name);
if (hasEvent) {
const p = ev.providers.map(y => y.toLowerCase());
const validNotifiers = this.notifiers.filter(x => p.includes(x.name.toLowerCase()));
notifiers = notifiers.concat(validNotifiers);
}
}
}
// remove dups
notifiers = notifiers.reduce((acc: Notifier[], curr: Notifier) => {
if (!acc.some(x => x.name === curr.name)) {
return acc.concat(curr);
}
return acc;
}, []);
let footer = [];
if (causedBy !== undefined) {
footer.push(`* Performed by "${causedBy}"`);
}
footer.push(`* Notification triggered by "${name}"`);
this.logger.info(`Sending notification for ${name} to providers: ${notifiers.map(x => `${x.name} (${x.type})`).join(', ')}`);
for (const n of notifiers) {
n.handle({
title: `${title} (${this.name})`,
body: body || '',
footer: footer.length > 0 ? footer.join('\n') : undefined,
logLevel
});
}
}
}
export default NotificationManager;

View File

@@ -102,7 +102,6 @@ export class HistoryRule extends Rule {
}
protected async process(item: Submission): Promise<[boolean, RuleResult]> {
// TODO reuse activities between ActivityCriteria to reduce api calls
let criteriaResults = [];

392
src/Rule/RegexRule.ts Normal file
View File

@@ -0,0 +1,392 @@
import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
import {Comment} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {
comparisonTextOp, FAIL, isExternalUrlSubmission, parseGenericValueComparison,
parseGenericValueOrPercentComparison, parseRegex,
PASS
} from "../util";
import {
ActivityWindowType, JoinOperands,
} from "../Common/interfaces";
import dayjs from 'dayjs';
export interface RegexCriteria {
/**
* A descriptive name that will be used in logging and be available for templating
*
* @examples ["swear words"]
* */
name?: string
/**
* A valid Regular Expression to test content against
*
* Do not wrap expression in forward slashes
*
* EX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`
*
* @examples ["reddit|FoxxMD"]
* */
regex: string,
/**
* Regex flags to use
* */
regexFlags?: string,
/**
* Which content from an Activity to test the regex against
*
* Only used if the Activity being tested is a Submission -- Comments are only tested against their content (duh)
*
* @default ["title", "body"]
* */
testOn?: ('title' | 'body' | 'url')[]
/**
* **When used with `window`** determines what type of Activities to retrieve
*
* @default "all"
* */
lookAt?: 'submissions' | 'comments' | 'all',
/**
* A string containing a comparison operator and a value to determine when an Activity is determined "matched"
*
* The syntax is `(< OR > OR <= OR >=) <number>`
*
* * EX `> 7 => greater than 7 matches found in the Activity, Activity is matched
* * EX `<= 3` => less than 3 matches found in the Activity, Activity is matched
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)(\s+.*)*$
* @default "> 0"
* @examples ["> 0"]
* */
matchThreshold?: string,
/**
* An string containing a comparison operator and a value to determine how many Activities need to be "matched" (based on `matchThreshold` condition) to trigger the rule
*
* **Only useful when used in conjunction with `window`**. If no `window` is specified only the Activity being checked is tested (so the default should/will be used).
*
* To disable (you are only using `totalMatchThreshold`) set to `null`
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign]`
*
* * EX `> 3` => greater than 3 Activities met the `matchThreshold` condition, Rule is triggered
* * EX `<= 10%` => less than 10% of all Activities retrieved from `window` met the `matchThreshold` condition, Rule is triggered
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* @default "> 0"
* @examples ["> 0"]
* */
activityMatchThreshold?: string,
/**
* A string containing a comparison operator and a value to determine how many total matches satisfies the criteria.
*
* If both this and `activityMatchThreshold` are present then whichever is satisfied first will be used.
*
* If not using `window` then this should not be used as running `matchThreshold` on one Activity is effectively the same behavior ( but I'm not gonna stop ya ¯\\\_(ツ)\_/¯ )
*
* The syntax is `(< OR > OR <= OR >=) <number>`
*
* * EX `> 7` => greater than 7 matches found in Activity + Author history `window`
* * EX `<= 3` => less than 3 matches found in the Activity + Author history `window`
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)(\s+.*)*$
* @default "null"
* @examples ["> 0"]
* */
totalMatchThreshold?: string,
window?: ActivityWindowType
}
export class RegexRule extends Rule {
criteria: RegexCriteria[];
condition: JoinOperands;
constructor(options: RegexRuleOptions) {
super(options);
const {
criteria = [],
condition = 'OR'
} = options || {};
if (criteria.length < 1) {
throw new Error('Must provide at least one RegexCriteria');
}
this.criteria = criteria;
this.condition = condition;
}
getKind(): string {
return 'Regex';
}
getSpecificPremise(): object {
return {
criteria: this.criteria,
condition: this.condition,
}
}
protected async process(item: Submission | Comment): Promise<[boolean, RuleResult]> {
let criteriaResults = [];
for (const criteria of this.criteria) {
const {
name,
regex,
regexFlags,
testOn: testOnVals = ['title', 'body'],
lookAt = 'all',
matchThreshold = '> 0',
activityMatchThreshold = '> 0',
totalMatchThreshold = null,
window,
} = criteria;
// normalize their values and also ensure we don't have duplicates
const testOn = testOnVals.map(y => y.toLowerCase()).reduce((acc: string[], curr) => {
if (acc.includes(curr)) {
return acc;
}
return acc.concat(curr);
}, []);
// check regex
const reg = new RegExp(regex);
// ok cool its a valid regex
const matchComparison = parseGenericValueComparison(matchThreshold);
const activityMatchComparison = activityMatchThreshold === null ? undefined : parseGenericValueOrPercentComparison(activityMatchThreshold);
const totalMatchComparison = totalMatchThreshold === null ? undefined : parseGenericValueComparison(totalMatchThreshold);
// since we are dealing with user input (regex) it's likely they mess up their expression and end up matching *a lot* of stuff
// so to keep memory under control only keep the first 100 matches
// and just count the rest
let matches: string[] = [];
let matchCount = 0;
let activitiesMatchedCount = 0;
let activitiesTested = 0;
let activityThresholdMet;
let totalThresholdMet;
// first lets see if the activity we are checking satisfies thresholds
// since we may be able to avoid api calls to get history
let actMatches = this.getMatchesFromActivity(item, testOn, reg, regexFlags);
matches = matches.concat(actMatches).slice(0, 100);
matchCount += actMatches.length;
activitiesTested++;
const singleMatched = comparisonTextOp(actMatches.length, matchComparison.operator, matchComparison.value);
if (singleMatched) {
activitiesMatchedCount++;
}
if (activityMatchComparison !== undefined) {
activityThresholdMet = !activityMatchComparison.isPercent && comparisonTextOp(activitiesMatchedCount, activityMatchComparison.operator, activityMatchComparison.value);
}
if (totalMatchComparison !== undefined) {
totalThresholdMet = comparisonTextOp(matchCount, totalMatchComparison.operator, totalMatchComparison.value);
}
let history: (Submission | Comment)[] = [];
if ((activityThresholdMet === false || totalThresholdMet === false) && window !== undefined) {
// our checking activity didn't meet threshold requirements and criteria does define window
// leh go
switch (lookAt) {
case 'all':
history = await this.resources.getAuthorActivities(item.author, {window: window});
break;
case 'submissions':
history = await this.resources.getAuthorSubmissions(item.author, {window: window});
break;
case 'comments':
history = await this.resources.getAuthorComments(item.author, {window: window});
}
// remove current activity it exists in history so we don't count it twice
history = history.filter(x => x.id !== item.id);
const historyLength = history.length;
let activityCountFunc: Function | undefined;
if (activityMatchComparison !== undefined) {
if (activityMatchComparison.isPercent) {
activityCountFunc = (actsMatched: number) => {
return comparisonTextOp(actsMatched / historyLength, activityMatchComparison.operator, activityMatchComparison.value / 100);
}
} else {
activityCountFunc = (actsMatched: number) => {
return comparisonTextOp(actsMatched, activityMatchComparison.operator, activityMatchComparison.value);
}
}
}
for (const h of history) {
activitiesTested++;
const aMatches = this.getMatchesFromActivity(h, testOn, reg, regexFlags);
matches = matches.concat(aMatches).slice(0, 100);
matchCount += aMatches.length;
const matched = comparisonTextOp(aMatches.length, matchComparison.operator, matchComparison.value);
if (matched) {
activitiesMatchedCount++;
}
if (activityCountFunc !== undefined && activityThresholdMet !== true && activityCountFunc(activitiesMatchedCount)) {
activityThresholdMet = true;
}
if (totalMatchComparison !== undefined && totalThresholdMet !== true) {
totalThresholdMet = comparisonTextOp(matchCount, totalMatchComparison.operator, totalMatchComparison.value)
}
}
}
let humanWindow = '';
if (history.length > 0) {
if (typeof window === 'number') {
humanWindow = `${history.length} Items`;
} else {
const firstActivity = history[0];
const lastActivity = history[history.length - 1];
humanWindow = dayjs.duration(dayjs(firstActivity.created_utc * 1000).diff(dayjs(lastActivity.created_utc * 1000))).humanize();
}
} else {
humanWindow = '1 Item';
}
const critResults = {
criteria: {
name,
regex,
testOn,
matchThreshold,
activityMatchThreshold,
totalMatchThreshold,
window: humanWindow
},
matches,
matchCount,
activitiesMatchedCount,
activityThresholdMet,
totalThresholdMet,
triggered: false,
};
if (activityThresholdMet === undefined && totalThresholdMet === undefined) {
// user should not have disabled both but in this scenario we'll pretend activityThresholdMet = singleMatch
critResults.activityThresholdMet = singleMatched;
critResults.triggered = singleMatched;
} else {
critResults.triggered = activityThresholdMet === true || totalThresholdMet === true;
}
criteriaResults.push(critResults);
if (this.condition === 'OR') {
if (critResults.triggered) {
break;
}
} else if (!critResults.triggered) {
// since its AND and didn't match the whole rule will fail
break;
}
}
const criteriaMet = this.condition === 'OR' ? criteriaResults.some(x => x.triggered) : criteriaResults.every(x => x.triggered);
const logSummary: string[] = [];
let index = 0;
for (const c of criteriaResults) {
index++;
let msg = `Crit ${c.criteria.name || index} ${c.triggered ? PASS : FAIL}`;
if (c.activityThresholdMet !== undefined) {
msg = `${msg} -- Activity Match=> ${c.activityThresholdMet ? PASS : FAIL} ${c.activitiesMatchedCount} ${c.criteria.activityMatchThreshold} (Threshold ${c.criteria.matchThreshold})`;
}
if (c.totalThresholdMet !== undefined) {
msg = `${msg} -- Total Matches=> ${c.totalThresholdMet ? PASS : FAIL} ${c.matchCount} ${c.criteria.totalMatchThreshold}`;
} else {
msg = `${msg} and ${c.matchCount} Total Matches`;
}
msg = `${msg} (Window: ${c.criteria.window})`;
logSummary.push(msg);
}
const result = `${criteriaMet ? PASS : FAIL} ${logSummary.join(' || ')}`;
this.logger.verbose(result);
return Promise.resolve([criteriaMet, this.getResult(criteriaMet, {result, data: criteriaResults})]);
}
protected getMatchesFromActivity(a: (Submission | Comment), testOn: string[], reg: RegExp, flags?: string): string[] {
let m: string[] = [];
// determine what content we are testing
let contents: string[] = [];
if (a instanceof Submission) {
for (const l of testOn) {
switch (l) {
case 'title':
contents.push(a.title);
break;
case 'body':
if (a.is_self) {
contents.push(a.selftext);
}
break;
case 'url':
if (isExternalUrlSubmission(a)) {
contents.push(a.url);
}
break;
}
}
} else {
contents.push(a.body)
}
for (const c of contents) {
const results = parseRegex(reg, c, flags);
if (results.matched) {
m = m.concat(results.matches);
}
}
return m;
}
}
interface RegexConfig {
/**
* A list of Regular Expressions and conditions under which tested Activity(ies) are matched
* @minItems 1
* @examples [{"regex": "/reddit/", "matchThreshold": "> 3"}]
* */
criteria: RegexCriteria[]
/**
* * If `OR` then any set of Criteria that pass will trigger the Rule
* * If `AND` then all Criteria sets must pass to trigger the Rule
*
* @default "OR"
* */
condition?: 'AND' | 'OR'
}
export interface RegexRuleOptions extends RegexConfig, RuleOptions {
}
/**
* Test a (list of) Regular Expression against the contents or title of an Activity
*
* Optionally, specify a `window` of the User's history to additionally test against
*
* Available data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):
*
* */
export interface RegexRuleJSONConfig extends RegexConfig, RuleJSONConfig {
/**
* @examples ["regex"]
* */
kind: 'regex'
}
export default RegexRule;

View File

@@ -5,6 +5,7 @@ import AuthorRule, {AuthorRuleJSONConfig} from "./AuthorRule";
import {AttributionJSONConfig, AttributionRule} from "./AttributionRule";
import {Logger} from "winston";
import HistoryRule, {HistoryJSONConfig} from "./HistoryRule";
import RegexRule, {RegexRuleJSONConfig} from "./RegexRule";
export function ruleFactory
(config: RuleJSONConfig, logger: Logger, subredditName: string): Rule {
@@ -25,6 +26,9 @@ export function ruleFactory
case 'history':
cfg = config as HistoryJSONConfig;
return new HistoryRule({...cfg, logger, subredditName});
case 'regex':
cfg = config as RegexRuleJSONConfig;
return new RegexRule({...cfg, logger, subredditName});
default:
throw new Error('rule "kind" was not recognized.');
}

View File

@@ -3,13 +3,14 @@ import {RuleOptions, RuleResult} from "../index";
import {Comment} from "snoowrap";
import {
activityWindowText,
comparisonTextOp, FAIL,
comparisonTextOp, FAIL, isExternalUrlSubmission, isRedditMedia,
parseGenericValueComparison, parseSubredditName,
parseUsableLinkIdentifier as linkParser, PASS
} from "../../util";
import {ActivityWindow, ActivityWindowType, ReferenceSubmission} from "../../Common/interfaces";
import Submission from "snoowrap/dist/objects/Submission";
import dayjs from "dayjs";
import Fuse from 'fuse.js'
const parseUsableLinkIdentifier = linkParser();
@@ -27,7 +28,9 @@ const getActivityIdentifier = (activity: (Submission | Comment), length = 200) =
let identifier: string;
if (activity instanceof Submission) {
if (activity.is_self) {
identifier = activity.selftext.slice(0, length);
identifier = `${activity.title}${activity.selftext.slice(0, length)}`;
} else if(isRedditMedia(activity)) {
identifier = activity.title;
} else {
identifier = parseUsableLinkIdentifier(activity.url) as string;
}
@@ -37,6 +40,11 @@ const getActivityIdentifier = (activity: (Submission | Comment), length = 200) =
return identifier;
}
const fuzzyOptions = {
includeScore: true,
distance: 15
};
export class RepeatActivityRule extends SubmissionRule {
threshold: string;
window: ActivityWindowType;
@@ -46,6 +54,7 @@ export class RepeatActivityRule extends SubmissionRule {
include: string[];
exclude: string[];
keepRemoved: boolean;
minWordCount: number;
constructor(options: RepeatActivityOptions) {
super(options);
@@ -54,11 +63,13 @@ export class RepeatActivityRule extends SubmissionRule {
window = 100,
gapAllowance,
useSubmissionAsReference = true,
minWordCount = 1,
lookAt = 'all',
include = [],
exclude = [],
keepRemoved = false,
} = options;
this.minWordCount = minWordCount;
this.keepRemoved = keepRemoved;
this.threshold = threshold;
this.window = window;
@@ -112,7 +123,10 @@ export class RepeatActivityRule extends SubmissionRule {
const {openSets = [], allSets = []} = acc;
let identifier = getActivityIdentifier(activity);
const isUrl = isExternalUrlSubmission(activity);
let fu = new Fuse([identifier], !isUrl ? fuzzyOptions : {...fuzzyOptions, distance: 5});
const validSub = filterFunc(activity);
let minMet = identifier.length >= this.minWordCount;
let updatedAllSets = [...allSets];
let updatedOpenSets: RepeatActivityData[] = [];
@@ -120,18 +134,44 @@ export class RepeatActivityRule extends SubmissionRule {
let currIdentifierInOpen = false;
const bufferedActivities = this.gapAllowance === undefined || this.gapAllowance === 0 ? [] : activities.slice(Math.max(0, index - this.gapAllowance), Math.max(0, index));
for (const o of openSets) {
if (o.identifier === identifier && validSub) {
const res = fu.search(o.identifier);
const match = res.length > 0;
if (match && validSub && minMet) {
updatedOpenSets.push({...o, sets: [...o.sets, activity]});
currIdentifierInOpen = true;
} else if (bufferedActivities.some(x => getActivityIdentifier(x) === identifier) && validSub) {
} else if (bufferedActivities.some(x => fu.search(getActivityIdentifier(x)).length > 0) && validSub && minMet) {
updatedOpenSets.push(o);
} else {
} else if(!currIdentifierInOpen && !isUrl) {
updatedAllSets.push(o);
}
}
if (!currIdentifierInOpen) {
updatedOpenSets.push({identifier, sets: [activity]})
if(isUrl) {
// could be that a spammer is using different URLs for each submission but similar submission titles so search by title as well
const sub = activity as Submission;
identifier = sub.title;
fu = new Fuse([identifier], !isUrl ? fuzzyOptions : {...fuzzyOptions, distance: 5});
minMet = identifier.length >= this.minWordCount;
for (const o of openSets) {
const res = fu.search(o.identifier);
const match = res.length > 0;
if (match && validSub && minMet) {
updatedOpenSets.push({...o, sets: [...o.sets, activity]});
currIdentifierInOpen = true;
} else if (bufferedActivities.some(x => fu.search(getActivityIdentifier(x)).length > 0) && validSub && minMet && !updatedOpenSets.includes(o)) {
updatedOpenSets.push(o);
} else if(!updatedAllSets.includes(o)) {
updatedAllSets.push(o);
}
}
if (!currIdentifierInOpen) {
updatedOpenSets.push({identifier, sets: [activity]})
}
}
}
return {openSets: updatedOpenSets, allSets: updatedAllSets};
@@ -152,8 +192,19 @@ export class RepeatActivityRule extends SubmissionRule {
let applicableGroupedActivities = identifierGroupedActivities;
if (this.useSubmissionAsReference) {
applicableGroupedActivities = new Map();
const referenceSubmissions = identifierGroupedActivities.get(getActivityIdentifier(item));
applicableGroupedActivities.set(getActivityIdentifier(item), referenceSubmissions || [])
let identifier = getActivityIdentifier(item);
let referenceSubmissions = identifierGroupedActivities.get(identifier);
if(referenceSubmissions === undefined && isExternalUrlSubmission(item)) {
// if external url sub then try by title
identifier = item.title;
referenceSubmissions = identifierGroupedActivities.get(identifier);
if(referenceSubmissions === undefined) {
// didn't get by title so go back to url since that's the default
identifier = getActivityIdentifier(item);
}
}
applicableGroupedActivities.set(identifier, referenceSubmissions || [])
}
const {operator, value: thresholdValue} = parseGenericValueComparison(this.threshold);
@@ -280,6 +331,20 @@ interface RepeatActivityConfig extends ActivityWindow, ReferenceSubmission {
* @default false
* */
keepRemoved?: boolean
/**
* For activities that are text-based this is the minimum number of words required for the activity to be considered for a repeat
*
* EX if `minimumWordCount=5` and a comment is `what about you` then it is ignored because `3 is less than 5`
*
* **For self-text submissions** -- title + body text
*
* **For comments* -- body text
*
* @default 1
* @example [1]
* */
minWordCount?: number,
}
export interface RepeatActivityOptions extends RepeatActivityConfig, RuleOptions {

View File

@@ -77,35 +77,45 @@ export abstract class Rule implements IRule, Triggerable {
}
async run(item: Comment | Submission, existingResults: RuleResult[] = []): Promise<[(boolean | null), RuleResult]> {
const existingResult = findResultByPremise(this.getPremise(), existingResults);
if (existingResult) {
this.logger.debug(`Returning existing result of ${existingResult.triggered ? '✔️' : '❌'}`);
return Promise.resolve([existingResult.triggered, {...existingResult, name: this.name}]);
}
const [itemPass, crit] = isItem(item, this.itemIs, this.logger);
if(!itemPass) {
this.logger.verbose(`(Skipped) Item did not pass 'itemIs' test`);
return Promise.resolve([null, this.getResult(null, {result: `Item did not pass 'itemIs' test`})]);
}
if (this.authorIs.include !== undefined && this.authorIs.include.length > 0) {
for (const auth of this.authorIs.include) {
if (await this.resources.testAuthorCriteria(item, auth)) {
return this.process(item);
}
try {
const existingResult = findResultByPremise(this.getPremise(), existingResults);
if (existingResult) {
this.logger.debug(`Returning existing result of ${existingResult.triggered ? '✔️' : '❌'}`);
return Promise.resolve([existingResult.triggered, {...existingResult, name: this.name}]);
}
this.logger.verbose('(Skipped) Inclusive author criteria not matched');
return Promise.resolve([null, this.getResult(null, {result: 'Inclusive author criteria not matched'})]);
}
if (this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
for (const auth of this.authorIs.exclude) {
if (await this.resources.testAuthorCriteria(item, auth, false)) {
return this.process(item);
}
const [itemPass, crit] = isItem(item, this.itemIs, this.logger);
if (!itemPass) {
this.logger.verbose(`(Skipped) Item did not pass 'itemIs' test`);
return Promise.resolve([null, this.getResult(null, {result: `Item did not pass 'itemIs' test`})]);
}
this.logger.verbose('(Skipped) Exclusive author criteria not matched');
return Promise.resolve([null, this.getResult(null, {result: 'Exclusive author criteria not matched'})]);
if (this.authorIs.include !== undefined && this.authorIs.include.length > 0) {
for (const auth of this.authorIs.include) {
if (await this.resources.testAuthorCriteria(item, auth)) {
return this.process(item);
}
}
this.logger.verbose('(Skipped) Inclusive author criteria not matched');
return Promise.resolve([null, this.getResult(null, {result: 'Inclusive author criteria not matched'})]);
}
if (this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
for (const auth of this.authorIs.exclude) {
if (await this.resources.testAuthorCriteria(item, auth, false)) {
return this.process(item);
}
}
this.logger.verbose('(Skipped) Exclusive author criteria not matched');
return Promise.resolve([null, this.getResult(null, {result: 'Exclusive author criteria not matched'})]);
}
} catch (err) {
this.logger.error('Error occurred during Rule pre-process checks');
throw err;
}
try {
return this.process(item);
} catch (err) {
this.logger.error('Error occurred while processing rule');
throw err;
}
return this.process(item);
}
protected abstract process(item: Comment | Submission): Promise<[boolean, RuleResult]>;
@@ -234,6 +244,6 @@ export interface RuleJSONConfig extends IRule {
* The kind of rule to run
* @examples ["recentActivity", "repeatActivity", "author", "attribution", "history"]
*/
kind: 'recentActivity' | 'repeatActivity' | 'author' | 'attribution' | 'history'
kind: 'recentActivity' | 'repeatActivity' | 'author' | 'attribution' | 'history' | 'regex'
}

View File

@@ -30,7 +30,7 @@
"type": "string"
}
],
"description": "A value that specifies the amount of time to subtract from `NOW` to create a time range IE `NOW <---> [duration] ago`\n\nAcceptable values:\n\n**A `string` consisting of a value and a [Day.js](https://day.js.org/docs/en/durations/creating) time unit**\n\n* EX `9 days` => Range is `NOW <---> 9 days ago`\n\n**A [Day.js](https://day.js.org/docs/en/durations/creating) `object`**\n\n* EX `{\"days\": 90, \"minutes\": 15}` => Range is `NOW <---> 90 days and 15 minutes ago`\n\n**An [ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) `string`**\n\n* EX `PT15M` => 15 minutes => Range is `NOW <----> 15 minutes ago`",
"description": "A value that specifies the amount of time to subtract from `NOW` to create a time range IE `NOW <---> [duration] ago`\n\nAcceptable values:\n\n**A `string` consisting of a value and a [Day.js](https://day.js.org/docs/en/durations/creating) time unit** ([test your value](https://regexr.com/61em3))\n\n* EX `9 days` => Range is `NOW <---> 9 days ago`\n\n**A [Day.js](https://day.js.org/docs/en/durations/creating) `object`**\n\n* EX `{\"days\": 90, \"minutes\": 15}` => Range is `NOW <---> 90 days and 15 minutes ago`\n\n**An [ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) `string`** ([test your value](https://regexr.com/61em9))\n\n* EX `PT15M` => 15 minutes => Range is `NOW <----> 15 minutes ago`",
"examples": [
"90 days",
"PT15M",
@@ -670,6 +670,43 @@
],
"type": "object"
},
"CacheOptions": {
"properties": {
"auth_pass": {
"type": "string"
},
"db": {
"type": "number"
},
"host": {
"type": "string"
},
"max": {
"type": "number"
},
"port": {
"type": "number"
},
"store": {
"$ref": "#/definitions/CacheProvider"
},
"ttl": {
"type": "number"
}
},
"required": [
"store"
],
"type": "object"
},
"CacheProvider": {
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
},
"CommentActionJson": {
"description": "Reply to the Activity. For a submission the reply will be a top-level comment.",
"properties": {
@@ -887,6 +924,11 @@
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
"type": "string"
},
"notifyOnTrigger": {
"default": false,
"description": "If notifications are configured and this is `true` then an `eventActioned` event will be sent when this check is triggered.",
"type": "boolean"
},
"rules": {
"description": "A list of Rules to run.\n\nIf `Rule` objects are triggered based on `condition` then `actions` will be performed.\n\nCan be `Rule`, `RuleSet`, or the `name` of any **named** `Rule` in your subreddit's configuration.\n\n**If `rules` is an empty array or not present then `actions` are performed immediately.**",
"items": {
@@ -906,6 +948,9 @@
{
"$ref": "#/definitions/HistoryJSONConfig"
},
{
"$ref": "#/definitions/RegexRuleJSONConfig"
},
{
"$ref": "#/definitions/RuleSetJson"
},
@@ -963,6 +1008,28 @@
},
"type": "object"
},
"DiscordProviderConfig": {
"properties": {
"name": {
"type": "string"
},
"type": {
"enum": [
"discord"
],
"type": "string"
},
"url": {
"type": "string"
}
},
"required": [
"name",
"type",
"url"
],
"type": "object"
},
"DurationObject": {
"additionalProperties": false,
"description": "A [Day.js duration object](https://day.js.org/docs/en/durations/creating)",
@@ -1313,6 +1380,70 @@
],
"type": "object"
},
"NotificationConfig": {
"properties": {
"events": {
"items": {
"anyOf": [
{
"$ref": "#/definitions/NotificationEventConfig"
},
{
"items": {
"enum": [
"configUpdated",
"eventActioned",
"pollingError",
"runStateChanged"
],
"type": "string"
},
"type": "array"
}
]
},
"type": "array"
},
"providers": {
"items": {
"$ref": "#/definitions/DiscordProviderConfig"
},
"type": "array"
}
},
"required": [
"events",
"providers"
],
"type": "object"
},
"NotificationEventConfig": {
"properties": {
"providers": {
"items": {
"type": "string"
},
"type": "array"
},
"types": {
"items": {
"enum": [
"configUpdated",
"eventActioned",
"pollingError",
"runStateChanged"
],
"type": "string"
},
"type": "array"
}
},
"required": [
"providers",
"types"
],
"type": "object"
},
"PollingOptions": {
"description": "A configuration for where, how, and when to poll Reddit for Activities to process",
"examples": [
@@ -1323,24 +1454,28 @@
}
],
"properties": {
"delayUntil": {
"description": "Delay processing Activity until it is `N` seconds old\n\nUseful if there are other bots that may process an Activity and you want this bot to run first/last/etc.\n\nIf the Activity is already `N` seconds old when it is initially retrieved no refresh of the Activity occurs (no API request is made) and it is immediately processed.",
"type": "number"
},
"interval": {
"default": 20000,
"description": "Amount of time, in milliseconds, to wait between requests",
"default": 30,
"description": "Amount of time, in seconds, to wait between requests",
"examples": [
20000
30
],
"type": "number"
},
"limit": {
"default": 25,
"default": 50,
"description": "The maximum number of Activities to get on every request",
"examples": [
25
50
],
"type": "number"
},
"pollOn": {
"description": "What source to get Activities from. The source you choose will modify how the bots behaves so choose carefully.\n\n### unmoderated (default)\n\nActivities that have yet to be approved/removed by a mod. This includes all modqueue (reports/spam) **and new submissions**.\n\nUse this if you want the bot to act like a regular moderator and act on anything that can be seen from mod tools.\n\n**Note:** Does NOT include new comments, only comments that are reported/filtered by Automoderator. If you want to process all unmoderated AND all new comments then use some version of `polling: [\"unmoderated\",\"newComm\"]`\n\n### modqueue\n\nActivities requiring moderator review, such as reported things and items caught by the spam filter.\n\nUse this if you only want the Bot to process reported/filtered Activities.\n\n### newSub\n\nGet only `Submissions` that show up in `/r/mySubreddit/new`\n\nUse this if you want the bot to process Submissions only when:\n\n* they are not initially filtered by Automoderator or\n* after they have been manually approved from modqueue\n\n## newComm\n\nGet only new `Comments`\n\nUse this if you want the bot to process Comments only when:\n\n* they are not initially filtered by Automoderator or\n* after they have been manually approved from modqueue",
"description": "What source to get Activities from. The source you choose will modify how the bots behaves so choose carefully.\n\n### unmoderated (default)\n\nActivities that have yet to be approved/removed by a mod. This includes all modqueue (reports/spam) **and new submissions**.\n\nUse this if you want the bot to act like a regular moderator and act on anything that can be seen from mod tools.\n\n**Note:** Does NOT include new comments, only comments that are reported/filtered by Automoderator. If you want to process all unmoderated AND all new comments then use some version of `polling: [\"unmoderated\",\"newComm\"]`\n\n### modqueue\n\nActivities requiring moderator review, such as reported things and items caught by the spam filter.\n\nUse this if you only want the Bot to process reported/filtered Activities.\n\n### newSub\n\nGet only `Submissions` that show up in `/r/mySubreddit/new`\n\nUse this if you want the bot to process Submissions only when:\n\n* they are not initially filtered by Automoderator or\n* after they have been manually approved from modqueue\n\n### newComm\n\nGet only new `Comments`\n\nUse this if you want the bot to process Comments only when:\n\n* they are not initially filtered by Automoderator or\n* after they have been manually approved from modqueue",
"enum": [
"modqueue",
"newComm",
@@ -1464,6 +1599,192 @@
],
"type": "object"
},
"RegexCriteria": {
"properties": {
"activityMatchThreshold": {
"default": "> 0",
"description": "An string containing a comparison operator and a value to determine how many Activities need to be \"matched\" (based on `matchThreshold` condition) to trigger the rule\n\n**Only useful when used in conjunction with `window`**. If no `window` is specified only the Activity being checked is tested (so the default should/will be used).\n\nTo disable (you are only using `totalMatchThreshold`) set to `null`\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 Activities met the `matchThreshold` condition, Rule is triggered\n* EX `<= 10%` => less than 10% of all Activities retrieved from `window` met the `matchThreshold` condition, Rule is triggered",
"examples": [
"> 0"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"lookAt": {
"default": "all",
"description": "**When used with `window`** determines what type of Activities to retrieve",
"enum": [
"all",
"comments",
"submissions"
],
"type": "string"
},
"matchThreshold": {
"default": "> 0",
"description": "A string containing a comparison operator and a value to determine when an Activity is determined \"matched\"\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 7 => greater than 7 matches found in the Activity, Activity is matched\n* EX `<= 3` => less than 3 matches found in the Activity, Activity is matched",
"examples": [
"> 0"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s+.*)*$",
"type": "string"
},
"name": {
"description": "A descriptive name that will be used in logging and be available for templating",
"examples": [
"swear words"
],
"type": "string"
},
"regex": {
"description": "A valid Regular Expression to test content against\n\nDo not wrap expression in forward slashes\n\nEX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`",
"examples": [
"reddit|FoxxMD"
],
"type": "string"
},
"regexFlags": {
"description": "Regex flags to use",
"type": "string"
},
"testOn": {
"default": [
"title",
"body"
],
"description": "Which content from an Activity to test the regex against\n\nOnly used if the Activity being tested is a Submission -- Comments are only tested against their content (duh)",
"items": {
"enum": [
"body",
"title",
"url"
],
"type": "string"
},
"type": "array"
},
"totalMatchThreshold": {
"default": "null",
"description": "A string containing a comparison operator and a value to determine how many total matches satisfies the criteria.\n\nIf both this and `activityMatchThreshold` are present then whichever is satisfied first will be used.\n\nIf not using `window` then this should not be used as running `matchThreshold` on one Activity is effectively the same behavior ( but I'm not gonna stop ya ¯\\\\\\_(ツ)\\_/¯ )\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 7` => greater than 7 matches found in Activity + Author history `window`\n* EX `<= 3` => less than 3 matches found in the Activity + Author history `window`",
"examples": [
"> 0"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s+.*)*$",
"type": "string"
},
"window": {
"anyOf": [
{
"$ref": "#/definitions/ActivityWindowCriteria"
},
{
"$ref": "#/definitions/DurationObject"
},
{
"type": [
"string",
"number"
]
}
],
"description": "A value to define the range of Activities to retrieve.\n\nAcceptable values:\n\n**`ActivityWindowCriteria` object**\n\nAllows specify multiple range properties and more specific behavior\n\n**A `number` of Activities to retrieve**\n\n* EX `100` => 100 Activities\n\n*****\n\nAny of the below values that specify the amount of time to subtract from `NOW` to create a time range IE `NOW <---> [duration] ago`\n\nAcceptable values:\n\n**A `string` consisting of a value and a [Day.js](https://day.js.org/docs/en/durations/creating#list-of-all-available-units) time UNIT**\n\n* EX `9 days` => Range is `NOW <---> 9 days ago`\n\n**A [Day.js](https://day.js.org/docs/en/durations/creating) `object`**\n\n* EX `{\"days\": 90, \"minutes\": 15}` => Range is `NOW <---> 90 days and 15 minutes ago`\n\n**An [ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) `string`**\n\n* EX `PT15M` => 15 minutes => Range is `NOW <----> 15 minutes ago`",
"examples": [
"90 days"
]
}
},
"required": [
"regex"
],
"type": "object"
},
"RegexRuleJSONConfig": {
"description": "Test a (list of) Regular Expression against the contents or title of an Activity\n\nOptionally, specify a `window` of the User's history to additionally test against\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):",
"properties": {
"authorIs": {
"$ref": "#/definitions/AuthorOptions",
"description": "If present then these Author criteria are checked before running the rule. If criteria fails then the rule is skipped.",
"examples": [
{
"include": [
{
"flairText": [
"Contributor",
"Veteran"
]
},
{
"isMod": true
}
]
}
]
},
"condition": {
"default": "OR",
"description": "* If `OR` then any set of Criteria that pass will trigger the Rule\n* If `AND` then all Criteria sets must pass to trigger the Rule",
"enum": [
"AND",
"OR"
],
"type": "string"
},
"criteria": {
"description": "A list of Regular Expressions and conditions under which tested Activity(ies) are matched",
"examples": [
{
"matchThreshold": "> 3",
"regex": "/reddit/"
}
],
"items": {
"$ref": "#/definitions/RegexCriteria"
},
"minItems": 1,
"type": "array"
},
"itemIs": {
"anyOf": [
{
"items": {
"$ref": "#/definitions/SubmissionState"
},
"type": "array"
},
{
"items": {
"$ref": "#/definitions/CommentState"
},
"type": "array"
}
],
"description": "A list of criteria to test the state of the `Activity` against before running the Rule.\n\nIf any set of criteria passes the Rule will be run. If the criteria fails then the Rule is skipped."
},
"kind": {
"description": "The kind of rule to run",
"enum": [
"regex"
],
"examples": [
"regex"
],
"type": "string"
},
"name": {
"description": "An optional, but highly recommended, friendly name for this rule. If not present will default to `kind`.\n\nCan only contain letters, numbers, underscore, spaces, and dashes\n\nname is used to reference Rule result data during Action content templating. See CommentAction or ReportAction for more details.",
"examples": [
"myNewRule"
],
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
"type": "string"
}
},
"required": [
"criteria",
"kind"
],
"type": "object"
},
"RemoveActionJson": {
"description": "Remove the Activity",
"properties": {
@@ -1621,6 +1942,11 @@
],
"type": "string"
},
"minWordCount": {
"default": 1,
"description": "For activities that are text-based this is the minimum number of words required for the activity to be considered for a repeat\n\nEX if `minimumWordCount=5` and a comment is `what about you` then it is ignored because `3 is less than 5`\n\n**For self-text submissions** -- title + body text\n\n**For comments* -- body text",
"type": "number"
},
"name": {
"description": "An optional, but highly recommended, friendly name for this rule. If not present will default to `kind`.\n\nCan only contain letters, numbers, underscore, spaces, and dashes\n\nname is used to reference Rule result data during Action content templating. See CommentAction or ReportAction for more details.",
"examples": [
@@ -1777,6 +2103,9 @@
{
"$ref": "#/definitions/HistoryJSONConfig"
},
{
"$ref": "#/definitions/RegexRuleJSONConfig"
},
{
"type": "string"
}
@@ -1942,6 +2271,11 @@
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
"type": "string"
},
"notifyOnTrigger": {
"default": false,
"description": "If notifications are configured and this is `true` then an `eventActioned` event will be sent when this check is triggered.",
"type": "boolean"
},
"rules": {
"description": "A list of Rules to run.\n\nIf `Rule` objects are triggered based on `condition` then `actions` will be performed.\n\nCan be `Rule`, `RuleSet`, or the `name` of any **named** `Rule` in your subreddit's configuration.\n\n**If `rules` is an empty array or not present then `actions` are performed immediately.**",
"items": {
@@ -1961,6 +2295,9 @@
{
"$ref": "#/definitions/HistoryJSONConfig"
},
{
"$ref": "#/definitions/RegexRuleJSONConfig"
},
{
"$ref": "#/definitions/RuleSetJson"
},
@@ -2031,26 +2368,41 @@
"SubredditCacheConfig": {
"properties": {
"authorTTL": {
"default": 10000,
"description": "Amount of time, in milliseconds, author activities (Comments/Submission) should be cached",
"default": 60,
"description": "Amount of time, in seconds, author activities (Comments/Submission) should be cached",
"examples": [
10000
60
],
"type": "number"
},
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
]
},
"userNotesTTL": {
"default": 60000,
"default": 300,
"description": "Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached",
"examples": [
60000
300
],
"type": "number"
},
"wikiTTL": {
"default": 300000,
"description": "Amount of time, in milliseconds, wiki content pages should be cached",
"default": 300,
"description": "Amount of time, in seconds, wiki content pages should be cached",
"examples": [
300000
300
],
"type": "number"
}
@@ -2191,17 +2543,7 @@
},
"properties": {
"caching": {
"anyOf": [
{
"$ref": "#/definitions/SubredditCacheConfig"
},
{
"enum": [
false
],
"type": "boolean"
}
],
"$ref": "#/definitions/SubredditCacheConfig",
"description": "Per-subreddit config for caching TTL values. If set to `false` caching is disabled."
},
"checks": {
@@ -2246,13 +2588,16 @@
"nickname": {
"type": "string"
},
"notifications": {
"$ref": "#/definitions/NotificationConfig"
},
"polling": {
"default": [
[
"unmoderated"
]
],
"description": "An array of sources to process Activities from\n\nValues in the array may be either:\n\n**A `string` representing the `pollOn` value to use**\n\nOne of:\n\n* `unmoderated`\n* `modqueue`\n* `newSub`\n* `newComm`\n\nwith the rest of the `PollingOptions` properties as defaults\n\n**A `PollingOptions` object**\n\nIf you want to specify non-default preoperties\n\n****\nIf not specified the default is `[\"unmoderated\"]`",
"description": "An array of sources to process Activities from\n\nValues in the array may be either:\n\n**A `string` representing the `pollOn` value to use**\n\nOne of:\n\n* `unmoderated`\n* `modqueue`\n* `newSub`\n* `newComm`\n\nwith the rest of the `PollingOptions` properties as defaults\n\n**A `PollingOptions` object**\n\nIf you want to specify non-default properties\n\n****\nIf not specified the default is `[\"unmoderated\"]`",
"items": {
"anyOf": [
{

View File

@@ -0,0 +1,374 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"CacheOptions": {
"properties": {
"auth_pass": {
"type": "string"
},
"db": {
"type": "number"
},
"host": {
"type": "string"
},
"max": {
"type": "number"
},
"port": {
"type": "number"
},
"store": {
"$ref": "#/definitions/CacheProvider"
},
"ttl": {
"type": "number"
}
},
"required": [
"store"
],
"type": "object"
},
"CacheProvider": {
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
},
"DiscordProviderConfig": {
"properties": {
"name": {
"type": "string"
},
"type": {
"enum": [
"discord"
],
"type": "string"
},
"url": {
"type": "string"
}
},
"required": [
"name",
"type",
"url"
],
"type": "object"
},
"NotificationConfig": {
"properties": {
"events": {
"items": {
"anyOf": [
{
"$ref": "#/definitions/NotificationEventConfig"
},
{
"items": {
"enum": [
"configUpdated",
"eventActioned",
"pollingError",
"runStateChanged"
],
"type": "string"
},
"type": "array"
}
]
},
"type": "array"
},
"providers": {
"items": {
"$ref": "#/definitions/DiscordProviderConfig"
},
"type": "array"
}
},
"required": [
"events",
"providers"
],
"type": "object"
},
"NotificationEventConfig": {
"properties": {
"providers": {
"items": {
"type": "string"
},
"type": "array"
},
"types": {
"items": {
"enum": [
"configUpdated",
"eventActioned",
"pollingError",
"runStateChanged"
],
"type": "string"
},
"type": "array"
}
},
"required": [
"providers",
"types"
],
"type": "object"
},
"PollingDefaults": {
"properties": {
"delayUntil": {
"description": "Delay processing Activity until it is `N` seconds old\n\nUseful if there are other bots that may process an Activity and you want this bot to run first/last/etc.\n\nIf the Activity is already `N` seconds old when it is initially retrieved no refresh of the Activity occurs (no API request is made) and it is immediately processed.",
"type": "number"
},
"interval": {
"default": 30,
"description": "Amount of time, in seconds, to wait between requests",
"examples": [
30
],
"type": "number"
},
"limit": {
"default": 50,
"description": "The maximum number of Activities to get on every request",
"examples": [
50
],
"type": "number"
}
},
"type": "object"
}
},
"properties": {
"api": {
"properties": {
"hardLimit": {
"type": "number"
},
"softLimit": {
"type": "number"
}
},
"type": "object"
},
"caching": {
"anyOf": [
{
"properties": {
"authorTTL": {
"default": 10000,
"description": "Amount of time, in milliseconds, author activities (Comments/Submission) should be cached",
"examples": [
10000
],
"type": "number"
},
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
]
},
"userNotesTTL": {
"default": 60000,
"description": "Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached",
"examples": [
60000
],
"type": "number"
},
"wikiTTL": {
"default": 300000,
"description": "Amount of time, in milliseconds, wiki content pages should be cached",
"examples": [
300000
],
"type": "number"
}
},
"type": "object"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
]
},
"credentials": {
"properties": {
"accessToken": {
"type": "string"
},
"clientId": {
"type": "string"
},
"clientSecret": {
"type": "string"
},
"redirectUri": {
"type": "string"
},
"refreshToken": {
"type": "string"
}
},
"type": "object"
},
"logging": {
"properties": {
"level": {
"enum": [
"debug",
"error",
"info",
"verbose",
"warn"
],
"type": "string"
},
"path": {
"type": "string"
}
},
"type": "object"
},
"notifications": {
"$ref": "#/definitions/NotificationConfig"
},
"operator": {
"properties": {
"display": {
"type": "string"
},
"name": {
"type": "string"
}
},
"type": "object"
},
"polling": {
"allOf": [
{
"$ref": "#/definitions/PollingDefaults"
},
{
"properties": {
"interval": {
"type": "number"
},
"limit": {
"type": "number"
},
"sharedMod": {
"type": "boolean"
}
},
"type": "object"
}
]
},
"snoowrap": {
"properties": {
"debug": {
"type": "boolean"
},
"proxy": {
"type": "string"
}
},
"type": "object"
},
"subreddits": {
"properties": {
"dryRun": {
"type": "boolean"
},
"heartbeatInterval": {
"type": "number"
},
"names": {
"items": {
"type": "string"
},
"type": "array"
},
"wikiConfig": {
"type": "string"
}
},
"type": "object"
},
"web": {
"properties": {
"enabled": {
"type": "boolean"
},
"logLevel": {
"enum": [
"debug",
"error",
"info",
"verbose",
"warn"
],
"type": "string"
},
"maxLogs": {
"type": "number"
},
"port": {
"type": "number"
},
"session": {
"properties": {
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"redis"
],
"type": "string"
}
]
},
"secret": {
"type": "string"
}
},
"type": "object"
}
},
"type": "object"
}
},
"type": "object"
}

View File

@@ -16,6 +16,9 @@
{
"$ref": "#/definitions/HistoryJSONConfig"
},
{
"$ref": "#/definitions/RegexRuleJSONConfig"
},
{
"type": "string"
}
@@ -50,7 +53,7 @@
"type": "string"
}
],
"description": "A value that specifies the amount of time to subtract from `NOW` to create a time range IE `NOW <---> [duration] ago`\n\nAcceptable values:\n\n**A `string` consisting of a value and a [Day.js](https://day.js.org/docs/en/durations/creating) time unit**\n\n* EX `9 days` => Range is `NOW <---> 9 days ago`\n\n**A [Day.js](https://day.js.org/docs/en/durations/creating) `object`**\n\n* EX `{\"days\": 90, \"minutes\": 15}` => Range is `NOW <---> 90 days and 15 minutes ago`\n\n**An [ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) `string`**\n\n* EX `PT15M` => 15 minutes => Range is `NOW <----> 15 minutes ago`",
"description": "A value that specifies the amount of time to subtract from `NOW` to create a time range IE `NOW <---> [duration] ago`\n\nAcceptable values:\n\n**A `string` consisting of a value and a [Day.js](https://day.js.org/docs/en/durations/creating) time unit** ([test your value](https://regexr.com/61em3))\n\n* EX `9 days` => Range is `NOW <---> 9 days ago`\n\n**A [Day.js](https://day.js.org/docs/en/durations/creating) `object`**\n\n* EX `{\"days\": 90, \"minutes\": 15}` => Range is `NOW <---> 90 days and 15 minutes ago`\n\n**An [ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) `string`** ([test your value](https://regexr.com/61em9))\n\n* EX `PT15M` => 15 minutes => Range is `NOW <----> 15 minutes ago`",
"examples": [
"90 days",
"PT15M",
@@ -856,6 +859,192 @@
],
"type": "object"
},
"RegexCriteria": {
"properties": {
"activityMatchThreshold": {
"default": "> 0",
"description": "An string containing a comparison operator and a value to determine how many Activities need to be \"matched\" (based on `matchThreshold` condition) to trigger the rule\n\n**Only useful when used in conjunction with `window`**. If no `window` is specified only the Activity being checked is tested (so the default should/will be used).\n\nTo disable (you are only using `totalMatchThreshold`) set to `null`\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 Activities met the `matchThreshold` condition, Rule is triggered\n* EX `<= 10%` => less than 10% of all Activities retrieved from `window` met the `matchThreshold` condition, Rule is triggered",
"examples": [
"> 0"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"lookAt": {
"default": "all",
"description": "**When used with `window`** determines what type of Activities to retrieve",
"enum": [
"all",
"comments",
"submissions"
],
"type": "string"
},
"matchThreshold": {
"default": "> 0",
"description": "A string containing a comparison operator and a value to determine when an Activity is determined \"matched\"\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 7 => greater than 7 matches found in the Activity, Activity is matched\n* EX `<= 3` => less than 3 matches found in the Activity, Activity is matched",
"examples": [
"> 0"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s+.*)*$",
"type": "string"
},
"name": {
"description": "A descriptive name that will be used in logging and be available for templating",
"examples": [
"swear words"
],
"type": "string"
},
"regex": {
"description": "A valid Regular Expression to test content against\n\nDo not wrap expression in forward slashes\n\nEX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`",
"examples": [
"reddit|FoxxMD"
],
"type": "string"
},
"regexFlags": {
"description": "Regex flags to use",
"type": "string"
},
"testOn": {
"default": [
"title",
"body"
],
"description": "Which content from an Activity to test the regex against\n\nOnly used if the Activity being tested is a Submission -- Comments are only tested against their content (duh)",
"items": {
"enum": [
"body",
"title",
"url"
],
"type": "string"
},
"type": "array"
},
"totalMatchThreshold": {
"default": "null",
"description": "A string containing a comparison operator and a value to determine how many total matches satisfies the criteria.\n\nIf both this and `activityMatchThreshold` are present then whichever is satisfied first will be used.\n\nIf not using `window` then this should not be used as running `matchThreshold` on one Activity is effectively the same behavior ( but I'm not gonna stop ya ¯\\\\\\_(ツ)\\_/¯ )\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 7` => greater than 7 matches found in Activity + Author history `window`\n* EX `<= 3` => less than 3 matches found in the Activity + Author history `window`",
"examples": [
"> 0"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s+.*)*$",
"type": "string"
},
"window": {
"anyOf": [
{
"$ref": "#/definitions/ActivityWindowCriteria"
},
{
"$ref": "#/definitions/DurationObject"
},
{
"type": [
"string",
"number"
]
}
],
"description": "A value to define the range of Activities to retrieve.\n\nAcceptable values:\n\n**`ActivityWindowCriteria` object**\n\nAllows specify multiple range properties and more specific behavior\n\n**A `number` of Activities to retrieve**\n\n* EX `100` => 100 Activities\n\n*****\n\nAny of the below values that specify the amount of time to subtract from `NOW` to create a time range IE `NOW <---> [duration] ago`\n\nAcceptable values:\n\n**A `string` consisting of a value and a [Day.js](https://day.js.org/docs/en/durations/creating#list-of-all-available-units) time UNIT**\n\n* EX `9 days` => Range is `NOW <---> 9 days ago`\n\n**A [Day.js](https://day.js.org/docs/en/durations/creating) `object`**\n\n* EX `{\"days\": 90, \"minutes\": 15}` => Range is `NOW <---> 90 days and 15 minutes ago`\n\n**An [ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) `string`**\n\n* EX `PT15M` => 15 minutes => Range is `NOW <----> 15 minutes ago`",
"examples": [
"90 days"
]
}
},
"required": [
"regex"
],
"type": "object"
},
"RegexRuleJSONConfig": {
"description": "Test a (list of) Regular Expression against the contents or title of an Activity\n\nOptionally, specify a `window` of the User's history to additionally test against\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):",
"properties": {
"authorIs": {
"$ref": "#/definitions/AuthorOptions",
"description": "If present then these Author criteria are checked before running the rule. If criteria fails then the rule is skipped.",
"examples": [
{
"include": [
{
"flairText": [
"Contributor",
"Veteran"
]
},
{
"isMod": true
}
]
}
]
},
"condition": {
"default": "OR",
"description": "* If `OR` then any set of Criteria that pass will trigger the Rule\n* If `AND` then all Criteria sets must pass to trigger the Rule",
"enum": [
"AND",
"OR"
],
"type": "string"
},
"criteria": {
"description": "A list of Regular Expressions and conditions under which tested Activity(ies) are matched",
"examples": [
{
"matchThreshold": "> 3",
"regex": "/reddit/"
}
],
"items": {
"$ref": "#/definitions/RegexCriteria"
},
"minItems": 1,
"type": "array"
},
"itemIs": {
"anyOf": [
{
"items": {
"$ref": "#/definitions/SubmissionState"
},
"type": "array"
},
{
"items": {
"$ref": "#/definitions/CommentState"
},
"type": "array"
}
],
"description": "A list of criteria to test the state of the `Activity` against before running the Rule.\n\nIf any set of criteria passes the Rule will be run. If the criteria fails then the Rule is skipped."
},
"kind": {
"description": "The kind of rule to run",
"enum": [
"regex"
],
"examples": [
"regex"
],
"type": "string"
},
"name": {
"description": "An optional, but highly recommended, friendly name for this rule. If not present will default to `kind`.\n\nCan only contain letters, numbers, underscore, spaces, and dashes\n\nname is used to reference Rule result data during Action content templating. See CommentAction or ReportAction for more details.",
"examples": [
"myNewRule"
],
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
"type": "string"
}
},
"required": [
"criteria",
"kind"
],
"type": "object"
},
"RepeatActivityJSONConfig": {
"description": "Checks a user's history for Submissions with identical content\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):\n\n```\ncount => Total number of repeat Submissions\nthreshold => The threshold you configured for this Rule to trigger\nurl => Url of the submission that triggered the rule\n```",
"properties": {
@@ -944,6 +1133,11 @@
],
"type": "string"
},
"minWordCount": {
"default": 1,
"description": "For activities that are text-based this is the minimum number of words required for the activity to be considered for a repeat\n\nEX if `minimumWordCount=5` and a comment is `what about you` then it is ignored because `3 is less than 5`\n\n**For self-text submissions** -- title + body text\n\n**For comments* -- body text",
"type": "number"
},
"name": {
"description": "An optional, but highly recommended, friendly name for this rule. If not present will default to `kind`.\n\nCan only contain letters, numbers, underscore, spaces, and dashes\n\nname is used to reference Rule result data during Action content templating. See CommentAction or ReportAction for more details.",
"examples": [

View File

@@ -30,7 +30,7 @@
"type": "string"
}
],
"description": "A value that specifies the amount of time to subtract from `NOW` to create a time range IE `NOW <---> [duration] ago`\n\nAcceptable values:\n\n**A `string` consisting of a value and a [Day.js](https://day.js.org/docs/en/durations/creating) time unit**\n\n* EX `9 days` => Range is `NOW <---> 9 days ago`\n\n**A [Day.js](https://day.js.org/docs/en/durations/creating) `object`**\n\n* EX `{\"days\": 90, \"minutes\": 15}` => Range is `NOW <---> 90 days and 15 minutes ago`\n\n**An [ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) `string`**\n\n* EX `PT15M` => 15 minutes => Range is `NOW <----> 15 minutes ago`",
"description": "A value that specifies the amount of time to subtract from `NOW` to create a time range IE `NOW <---> [duration] ago`\n\nAcceptable values:\n\n**A `string` consisting of a value and a [Day.js](https://day.js.org/docs/en/durations/creating) time unit** ([test your value](https://regexr.com/61em3))\n\n* EX `9 days` => Range is `NOW <---> 9 days ago`\n\n**A [Day.js](https://day.js.org/docs/en/durations/creating) `object`**\n\n* EX `{\"days\": 90, \"minutes\": 15}` => Range is `NOW <---> 90 days and 15 minutes ago`\n\n**An [ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) `string`** ([test your value](https://regexr.com/61em9))\n\n* EX `PT15M` => 15 minutes => Range is `NOW <----> 15 minutes ago`",
"examples": [
"90 days",
"PT15M",
@@ -836,6 +836,192 @@
],
"type": "object"
},
"RegexCriteria": {
"properties": {
"activityMatchThreshold": {
"default": "> 0",
"description": "An string containing a comparison operator and a value to determine how many Activities need to be \"matched\" (based on `matchThreshold` condition) to trigger the rule\n\n**Only useful when used in conjunction with `window`**. If no `window` is specified only the Activity being checked is tested (so the default should/will be used).\n\nTo disable (you are only using `totalMatchThreshold`) set to `null`\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 Activities met the `matchThreshold` condition, Rule is triggered\n* EX `<= 10%` => less than 10% of all Activities retrieved from `window` met the `matchThreshold` condition, Rule is triggered",
"examples": [
"> 0"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"lookAt": {
"default": "all",
"description": "**When used with `window`** determines what type of Activities to retrieve",
"enum": [
"all",
"comments",
"submissions"
],
"type": "string"
},
"matchThreshold": {
"default": "> 0",
"description": "A string containing a comparison operator and a value to determine when an Activity is determined \"matched\"\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 7 => greater than 7 matches found in the Activity, Activity is matched\n* EX `<= 3` => less than 3 matches found in the Activity, Activity is matched",
"examples": [
"> 0"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s+.*)*$",
"type": "string"
},
"name": {
"description": "A descriptive name that will be used in logging and be available for templating",
"examples": [
"swear words"
],
"type": "string"
},
"regex": {
"description": "A valid Regular Expression to test content against\n\nDo not wrap expression in forward slashes\n\nEX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`",
"examples": [
"reddit|FoxxMD"
],
"type": "string"
},
"regexFlags": {
"description": "Regex flags to use",
"type": "string"
},
"testOn": {
"default": [
"title",
"body"
],
"description": "Which content from an Activity to test the regex against\n\nOnly used if the Activity being tested is a Submission -- Comments are only tested against their content (duh)",
"items": {
"enum": [
"body",
"title",
"url"
],
"type": "string"
},
"type": "array"
},
"totalMatchThreshold": {
"default": "null",
"description": "A string containing a comparison operator and a value to determine how many total matches satisfies the criteria.\n\nIf both this and `activityMatchThreshold` are present then whichever is satisfied first will be used.\n\nIf not using `window` then this should not be used as running `matchThreshold` on one Activity is effectively the same behavior ( but I'm not gonna stop ya ¯\\\\\\_(ツ)\\_/¯ )\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 7` => greater than 7 matches found in Activity + Author history `window`\n* EX `<= 3` => less than 3 matches found in the Activity + Author history `window`",
"examples": [
"> 0"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s+.*)*$",
"type": "string"
},
"window": {
"anyOf": [
{
"$ref": "#/definitions/ActivityWindowCriteria"
},
{
"$ref": "#/definitions/DurationObject"
},
{
"type": [
"string",
"number"
]
}
],
"description": "A value to define the range of Activities to retrieve.\n\nAcceptable values:\n\n**`ActivityWindowCriteria` object**\n\nAllows specify multiple range properties and more specific behavior\n\n**A `number` of Activities to retrieve**\n\n* EX `100` => 100 Activities\n\n*****\n\nAny of the below values that specify the amount of time to subtract from `NOW` to create a time range IE `NOW <---> [duration] ago`\n\nAcceptable values:\n\n**A `string` consisting of a value and a [Day.js](https://day.js.org/docs/en/durations/creating#list-of-all-available-units) time UNIT**\n\n* EX `9 days` => Range is `NOW <---> 9 days ago`\n\n**A [Day.js](https://day.js.org/docs/en/durations/creating) `object`**\n\n* EX `{\"days\": 90, \"minutes\": 15}` => Range is `NOW <---> 90 days and 15 minutes ago`\n\n**An [ISO 8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) `string`**\n\n* EX `PT15M` => 15 minutes => Range is `NOW <----> 15 minutes ago`",
"examples": [
"90 days"
]
}
},
"required": [
"regex"
],
"type": "object"
},
"RegexRuleJSONConfig": {
"description": "Test a (list of) Regular Expression against the contents or title of an Activity\n\nOptionally, specify a `window` of the User's history to additionally test against\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):",
"properties": {
"authorIs": {
"$ref": "#/definitions/AuthorOptions",
"description": "If present then these Author criteria are checked before running the rule. If criteria fails then the rule is skipped.",
"examples": [
{
"include": [
{
"flairText": [
"Contributor",
"Veteran"
]
},
{
"isMod": true
}
]
}
]
},
"condition": {
"default": "OR",
"description": "* If `OR` then any set of Criteria that pass will trigger the Rule\n* If `AND` then all Criteria sets must pass to trigger the Rule",
"enum": [
"AND",
"OR"
],
"type": "string"
},
"criteria": {
"description": "A list of Regular Expressions and conditions under which tested Activity(ies) are matched",
"examples": [
{
"matchThreshold": "> 3",
"regex": "/reddit/"
}
],
"items": {
"$ref": "#/definitions/RegexCriteria"
},
"minItems": 1,
"type": "array"
},
"itemIs": {
"anyOf": [
{
"items": {
"$ref": "#/definitions/SubmissionState"
},
"type": "array"
},
{
"items": {
"$ref": "#/definitions/CommentState"
},
"type": "array"
}
],
"description": "A list of criteria to test the state of the `Activity` against before running the Rule.\n\nIf any set of criteria passes the Rule will be run. If the criteria fails then the Rule is skipped."
},
"kind": {
"description": "The kind of rule to run",
"enum": [
"regex"
],
"examples": [
"regex"
],
"type": "string"
},
"name": {
"description": "An optional, but highly recommended, friendly name for this rule. If not present will default to `kind`.\n\nCan only contain letters, numbers, underscore, spaces, and dashes\n\nname is used to reference Rule result data during Action content templating. See CommentAction or ReportAction for more details.",
"examples": [
"myNewRule"
],
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
"type": "string"
}
},
"required": [
"criteria",
"kind"
],
"type": "object"
},
"RepeatActivityJSONConfig": {
"description": "Checks a user's history for Submissions with identical content\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):\n\n```\ncount => Total number of repeat Submissions\nthreshold => The threshold you configured for this Rule to trigger\nurl => Url of the submission that triggered the rule\n```",
"properties": {
@@ -924,6 +1110,11 @@
],
"type": "string"
},
"minWordCount": {
"default": 1,
"description": "For activities that are text-based this is the minimum number of words required for the activity to be considered for a repeat\n\nEX if `minimumWordCount=5` and a comment is `what about you` then it is ignored because `3 is less than 5`\n\n**For self-text submissions** -- title + body text\n\n**For comments* -- body text",
"type": "number"
},
"name": {
"description": "An optional, but highly recommended, friendly name for this rule. If not present will default to `kind`.\n\nCan only contain letters, numbers, underscore, spaces, and dashes\n\nname is used to reference Rule result data during Action content templating. See CommentAction or ReportAction for more details.",
"examples": [
@@ -1122,6 +1313,9 @@
{
"$ref": "#/definitions/HistoryJSONConfig"
},
{
"$ref": "#/definitions/RegexRuleJSONConfig"
},
{
"type": "string"
}

85
src/Server/helper.ts Normal file
View File

@@ -0,0 +1,85 @@
import {addAsync, Router} from '@awaitjs/express';
import express from 'express';
import Snoowrap from "snoowrap";
import {permissions} from "../util";
import {getLogger} from "../Utils/loggerFactory";
import {OperatorConfig} from "../Common/interfaces";
const app = addAsync(express());
const router = Router();
app.set('views', `${__dirname}/views`);
app.set('view engine', 'ejs');
app.use(router);
const helperServer = async function (options: OperatorConfig) {
let rUri: string;
const {
credentials: {
clientId,
clientSecret,
redirectUri
},
web: {
port
}
} = options;
const server = await app.listen(port);
const logger = getLogger(options);
logger.info(`Helper UI started: http://localhost:${port}`);
app.getAsync('/', async (req, res) => {
res.render('helper', {
redirectUri
});
});
app.getAsync('/auth', async (req, res) => {
rUri = req.query.redirect as string;
let permissionsList = permissions;
const includeWikiEdit = (req.query.wikiEdit as any).toString() === "1";
if (!includeWikiEdit) {
permissionsList = permissionsList.filter(x => x !== 'wikiedit');
}
const authUrl = Snoowrap.getAuthUrl({
clientId,
scope: permissionsList,
redirectUri: rUri as string,
permanent: true,
});
return res.redirect(authUrl);
});
app.getAsync(/.*callback$/, async (req, res) => {
const {error, code} = req.query as any;
if (error !== undefined) {
let errContent: string;
switch (error) {
case 'access_denied':
errContent = 'You must <b>Allow</b> this application to connect in order to proceed.';
break;
default:
errContent = error;
}
return res.render('error', {error: errContent, });
}
const client = await Snoowrap.fromAuthCode({
userAgent: `web:contextBot:web`,
clientId,
clientSecret,
redirectUri: rUri,
code: code as string,
});
// @ts-ignore
const user = await client.getMe();
res.render('callback', {
accessToken: client.accessToken,
refreshToken: client.refreshToken,
});
});
}
export default helperServer;

View File

@@ -0,0 +1,85 @@
/*https://codepen.io/bheberer/pen/BaNZKmq*/
.toggle-checkbox {
position: absolute;
opacity: 0;
cursor: pointer;
height: 0;
width: 0;
}
.toggle-slot {
position: relative;
height: 10em;
width: 20em;
border: 2px solid #e4e7ec;
padding: 2.5px;
border-radius: 10em;
background-color: white;
/*box-shadow: 0px 10px 25px #e4e7ec;*/
transition: background-color 250ms;
}
.toggle-checkbox:checked ~ .toggle-slot {
background-color: transparent;
}
.toggle-button {
transform: translate(9.75em, 1em);
position: absolute;
height: 4.5em;
width: 4.5em;
border-radius: 50%;
background-color: #ffeccf;
box-shadow: inset 0px 0px 0px 0.75em #ffbb52;
transition: background-color 250ms, border-color 250ms, transform 500ms cubic-bezier(.26,2,.46,.71);
}
.toggle-checkbox:checked ~ .toggle-slot .toggle-button {
background-color: #485367;
box-shadow: inset 0px 0px 0px 0.75em white;
transform: translate(1.75em, 1em);
}
.sun-icon {
position: absolute;
height: 6em;
width: 6em;
color: #ffbb52;
}
.sun-icon-wrapper {
position: absolute;
height: 6em;
width: 6em;
opacity: 1;
transform: translate(1.1em, 0.1em) rotate(15deg);
transform-origin: 50% 50%;
transition: opacity 150ms, transform 500ms cubic-bezier(.26,2,.46,.71);
}
.toggle-checkbox:checked ~ .toggle-slot .sun-icon-wrapper {
opacity: 0;
transform: translate(3em, 2em) rotate(0deg);
}
.moon-icon {
position: absolute;
height: 6em;
width: 6em;
color: white;
}
.moon-icon-wrapper {
position: absolute;
height: 6em;
width: 6em;
opacity: 0;
transform: translate(11em, 1em) rotate(0deg);
transform-origin: 50% 50%;
transition: opacity 150ms, transform 500ms cubic-bezier(.26,2.5,.46,.71);
}
.toggle-checkbox:checked ~ .toggle-slot .moon-icon-wrapper {
opacity: 1;
transform: translate(9em, 0em) rotate(-15deg);
}

706
src/Server/server.ts Normal file
View File

@@ -0,0 +1,706 @@
import {addAsync, Router} from '@awaitjs/express';
import express from 'express';
import bodyParser from 'body-parser';
import session from 'express-session';
import {Cache} from 'cache-manager';
// @ts-ignore
import CacheManagerStore from 'express-session-cache-manager'
import Snoowrap from "snoowrap";
import {App} from "../App";
import dayjs from 'dayjs';
import {Writable} from "stream";
import winston from 'winston';
import {Server as SocketServer} from 'socket.io';
import sharedSession from 'express-socket.io-session';
import Submission from "snoowrap/dist/objects/Submission";
import EventEmitter from "events";
import tcpUsed from 'tcp-port-used';
import {
boolToString, cacheStats,
COMMENT_URL_ID, createCacheManager,
filterLogBySubreddit,
formatLogLineToHtml, formatNumber,
isLogLineMinLevel,
LogEntry,
parseLinkIdentifier,
parseSubredditLogName, parseSubredditName,
pollingInfo, SUBMISSION_URL_ID
} from "../util";
import {Manager} from "../Subreddit/Manager";
import {getLogger} from "../Utils/loggerFactory";
import LoggedError from "../Utils/LoggedError";
import {OperatorConfig, ResourceStats, RUNNING, STOPPED, SYSTEM, USER} from "../Common/interfaces";
import http from "http";
import SimpleError from "../Utils/SimpleError";
const app = addAsync(express());
const router = Router();
app.use(router);
app.use(bodyParser.json());
app.set('views', `${__dirname}/views`);
app.set('view engine', 'ejs');
interface ConnectedUserInfo {
subreddits: string[],
level?: string,
user: string
}
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);
const connectedUsers: Map<string, ConnectedUserInfo> = new Map();
const availableLevels = ['error', 'warn', 'info', 'verbose', 'debug'];
let operatorSessionId: (string | undefined);
declare module 'express-session' {
interface SessionData {
user: string,
subreddits: string[],
lastCheck?: number,
limit?: number,
sort?: string,
level?: string,
}
}
const subLogMap: Map<string, LogEntry[]> = new Map();
const emitter = new EventEmitter();
const stream = new Writable()
const rcbServer = async function (options: OperatorConfig) {
const {
credentials: {
clientId,
clientSecret,
redirectUri
},
operator: {
name,
display,
},
web: {
port,
session: {
provider,
secret,
},
maxLogs,
},
} = options;
let botSubreddits: string[] = [];
stream._write = (chunk, encoding, next) => {
let logLine = chunk.toString();
const now = Date.now();
const logEntry: LogEntry = [now, logLine];
const subName = parseSubredditLogName(logLine);
if (subName !== undefined && (botSubreddits.length === 0 || botSubreddits.includes(subName))) {
const subLogs = subLogMap.get(subName) || [];
subLogs.unshift(logEntry);
subLogMap.set(subName, subLogs.slice(0, maxLogs + 1));
} else {
const appLogs = subLogMap.get('app') || [];
appLogs.unshift(logEntry);
subLogMap.set('app', appLogs.slice(0, maxLogs + 1));
}
emitter.emit('log', logLine);
next();
}
const streamTransport = new winston.transports.Stream({
stream,
})
const logger = getLogger({...options.logging, additionalTransports: [streamTransport]})
if (await tcpUsed.check(port)) {
throw new SimpleError(`Specified port for web interface (${port}) is in use or not available. Cannot start web server.`);
}
let server: http.Server,
io: SocketServer;
try {
server = await app.listen(port);
io = new SocketServer(server);
} catch (err) {
logger.error('Error occurred while initializing web or socket.io server', err);
err.logged = true;
throw err;
}
logger.info(`Web UI started: http://localhost:${port}`);
const bot = new App(options);
await bot.testClient();
app.use('/public', express.static(`${__dirname}/public`));
await bot.buildManagers();
botSubreddits = bot.subManagers.map(x => x.displayLabel);
// TODO potentially prune subLogMap of user keys? shouldn't have happened this early though
if(provider.store === 'none') {
logger.warn(`Cannot use 'none' for session store or else no one can use the interface...falling back to 'memory'`);
provider.store = 'memory';
}
const sessionObj = session({
cookie: {
maxAge: provider.ttl,
},
store: new CacheManagerStore(createCacheManager(provider) as Cache),
resave: false,
saveUninitialized: false,
secret,
});
app.use(sessionObj);
io.use(sharedSession(sessionObj));
io.on("connection", function (socket) {
// @ts-ignore
if (socket.handshake.session.user !== undefined) {
// @ts-ignore
socket.join(socket.handshake.session.id);
// @ts-ignore
connectedUsers.set(socket.handshake.session.id, {
// @ts-ignore
subreddits: socket.handshake.session.subreddits,
// @ts-ignore
level: socket.handshake.session.level,
// @ts-ignore
user: socket.handshake.session.user
});
// @ts-ignore
if (name !== undefined && socket.handshake.session.user.toLowerCase() === name.toLowerCase()) {
// @ts-ignore
operatorSessionId = socket.handshake.session.id;
}
}
});
io.on('disconnect', (socket) => {
// @ts-ignore
connectedUsers.delete(socket.handshake.session.id);
if (operatorSessionId === socket.handshake.session.id) {
operatorSessionId = undefined;
}
});
const redditUserMiddleware = async (req: express.Request, res: express.Response, next: Function) => {
if (req.session.user === undefined) {
return res.redirect('/login');
}
next();
}
const booleanMiddle = (boolParams: string[] = []) => async (req: express.Request, res: express.Response, next: Function) => {
if (req.query !== undefined) {
for (const b of boolParams) {
const bVal = req.query[b] as any;
if (bVal !== undefined) {
let truthyVal: boolean;
if (bVal === 'true' || bVal === true || bVal === 1 || bVal === '1') {
truthyVal = true;
} else if (bVal === 'false' || bVal === false || bVal === 0 || bVal === '0') {
truthyVal = false;
} else {
res.status(400);
res.send(`Expected query parameter ${b} to be a truthy value. Got "${bVal}" but must be one of these: true/false, 1/0`);
return;
}
// @ts-ignore
req.query[b] = truthyVal;
}
}
}
next();
}
app.getAsync('/logout', async (req, res) => {
// @ts-ignore
req.session.destroy();
res.send('Bye!');
})
app.getAsync('/login', async (req, res) => {
if(redirectUri === undefined) {
return res.render('error', {error: `No <b>redirectUri</b> was specified through environmental variables or program argument. This must be provided in order to use the web interface.`});
}
const authUrl = Snoowrap.getAuthUrl({
clientId,
scope: ['identity', 'mysubreddits'],
redirectUri: redirectUri as string,
permanent: false,
});
return res.redirect(authUrl);
});
app.getAsync(/.*callback$/, async (req, res) => {
const {error, code} = req.query as any;
if (error !== undefined) {
let errContent: string;
switch (error) {
case 'access_denied':
errContent = 'You must <b>Allow</b> this application to connect in order to proceed.';
break;
default:
errContent = error;
}
return res.render('error', {error: errContent, operatorDisplay: display});
}
const client = await Snoowrap.fromAuthCode({
userAgent: `web:contextBot:web`,
clientId,
clientSecret,
redirectUri: redirectUri as string,
code: code as string,
});
// @ts-ignore
const user = await client.getMe().name as string;
const subs = await client.getModeratedSubreddits();
req.session['user'] = user;
// @ts-ignore
req.session['subreddits'] = name !== undefined && name.toLowerCase() === user.toLowerCase() ? bot.subManagers.map(x => x.displayLabel) : subs.reduce((acc: string[], x) => {
const sm = bot.subManagers.find(y => y.subreddit.display_name === x.display_name);
if (sm !== undefined) {
return acc.concat(sm.displayLabel);
}
return acc;
}, []);
req.session['lastCheck'] = dayjs().unix();
res.redirect('/');
});
app.use('/', redditUserMiddleware);
app.getAsync('/', async (req, res) => {
const {
subreddits = [],
user: userVal,
limit = 200,
level = 'verbose',
sort = 'descending',
lastCheck
} = req.session;
const user = userVal as string;
const isOperator = name !== undefined && name.toLowerCase() === user.toLowerCase()
if ((req.session.subreddits as string[]).length === 0 && !isOperator) {
return res.render('noSubs', {operatorDisplay: display});
}
const logs = filterLogBySubreddit(subLogMap, req.session.subreddits, {
level,
operator: isOperator,
user,
// @ts-ignore
sort,
limit
});
const subManagerData = [];
for (const s of subreddits) {
const m = bot.subManagers.find(x => x.displayLabel === s) as Manager;
const sd = {
name: s,
//linkName: s.replace(/\W/g, ''),
logs: logs.get(s) || [], // provide a default empty value in case we truly have not logged anything for this subreddit yet
botState: m.botState,
eventsState: m.eventsState,
queueState: m.queueState,
indicator: 'gray',
queuedActivities: m.queue.length(),
runningActivities: m.queue.running(),
maxWorkers: m.queue.concurrency,
validConfig: boolToString(m.validConfigLoaded),
dryRun: boolToString(m.dryRun === true),
pollingInfo: m.pollOptions.length === 0 ? ['nothing :('] : m.pollOptions.map(pollingInfo),
checks: {
submissions: m.submissionChecks === undefined ? 0 : m.submissionChecks.length,
comments: m.commentChecks === undefined ? 0 : m.commentChecks.length,
},
wikiLocation: m.wikiLocation,
wikiHref: `https://reddit.com/r/${m.subreddit.display_name}/wiki/${m.wikiLocation}`,
wikiRevisionHuman: m.lastWikiRevision === undefined ? 'N/A' : `${dayjs.duration(dayjs().diff(m.lastWikiRevision)).humanize()} ago`,
wikiRevision: m.lastWikiRevision === undefined ? 'N/A' : m.lastWikiRevision.local().format('MMMM D, YYYY h:mm A Z'),
wikiLastCheckHuman: `${dayjs.duration(dayjs().diff(m.lastWikiCheck)).humanize()} ago`,
wikiLastCheck: m.lastWikiCheck.local().format('MMMM D, YYYY h:mm A Z'),
stats: await m.getStats(),
startedAt: 'Not Started',
startedAtHuman: 'Not Started',
delayBy: m.delayBy === undefined ? 'No' : `Delayed by ${m.delayBy} sec`,
};
// TODO replace indicator data with js on client page
let indicator;
if (m.botState.state === RUNNING && m.queueState.state === RUNNING && m.eventsState.state === RUNNING) {
indicator = 'green';
} else if (m.botState.state === STOPPED && m.queueState.state === STOPPED && m.eventsState.state === STOPPED) {
indicator = 'red';
} else {
indicator = 'yellow';
}
sd.indicator = indicator;
if (m.startedAt !== undefined) {
const dur = dayjs.duration(dayjs().diff(m.startedAt));
sd.startedAtHuman = `${dur.humanize()} ago`;
sd.startedAt = m.startedAt.local().format('MMMM D, YYYY h:mm A Z');
if(sd.stats.cache.totalRequests > 0) {
const minutes = dur.asMinutes();
if(minutes < 10) {
sd.stats.cache.requestRate = formatNumber((10/minutes) * sd.stats.cache.totalRequests, {toFixed: 0, round: {enable: true, indicate: true}});
} else {
sd.stats.cache.requestRate = formatNumber((minutes/10) * sd.stats.cache.totalRequests, {toFixed: 0, round: {enable: true, indicate: true}});
}
} else {
sd.stats.cache.requestRate = 0;
}
}
subManagerData.push(sd);
}
const totalStats = subManagerData.reduce((acc, curr) => {
return {
checks: {
submissions: acc.checks.submissions + curr.checks.submissions,
comments: acc.checks.comments + curr.checks.comments,
},
eventsCheckedTotal: acc.eventsCheckedTotal + curr.stats.eventsCheckedTotal,
checksRunTotal: acc.checksRunTotal + curr.stats.checksRunTotal,
checksTriggeredTotal: acc.checksTriggeredTotal + curr.stats.checksTriggeredTotal,
rulesRunTotal: acc.rulesRunTotal + curr.stats.rulesRunTotal,
rulesCachedTotal: acc.rulesCachedTotal + curr.stats.rulesCachedTotal,
rulesTriggeredTotal: acc.rulesTriggeredTotal + curr.stats.rulesTriggeredTotal,
actionsRunTotal: acc.actionsRunTotal + curr.stats.actionsRunTotal,
};
}, {
checks: {
submissions: 0,
comments: 0,
},
eventsCheckedTotal: 0,
checksRunTotal: 0,
checksTriggeredTotal: 0,
rulesRunTotal: 0,
rulesCachedTotal: 0,
rulesTriggeredTotal: 0,
actionsRunTotal: 0,
});
const {checks, ...rest} = totalStats;
let cumRaw = subManagerData.reduce((acc, curr) => {
Object.keys(curr.stats.cache.types as ResourceStats).forEach((k) => {
acc[k].requests += curr.stats.cache.types[k].requests;
acc[k].miss += curr.stats.cache.types[k].miss;
});
return acc;
}, cacheStats());
cumRaw = Object.keys(cumRaw).reduce((acc, curr) => {
const per = acc[curr].miss === 0 ? 0 : formatNumber(acc[curr].miss / acc[curr].requests) * 100;
// @ts-ignore
acc[curr].missPercent = `${formatNumber(per, {toFixed: 0})}%`;
return acc;
}, cumRaw);
let allManagerData: any = {
name: 'All',
linkName: 'All',
indicator: 'green',
botState: {
state: RUNNING,
causedBy: SYSTEM
},
dryRun: boolToString(bot.dryRun === true),
logs: logs.get('all'),
checks: checks,
softLimit: bot.softLimit,
hardLimit: bot.hardLimit,
stats: {
...rest,
cache: {
currentKeyCount: await bot.subManagers[0].resources.getCacheKeyCount(),
isShared: false,
totalRequests: subManagerData.reduce((acc, curr) => acc + curr.stats.cache.totalRequests, 0),
types: {
...cumRaw,
}
}
},
};
if (allManagerData.logs === undefined) {
// this should happen but saw an edge case where potentially did
logger.warn(`Logs for 'all' were undefined found but should always have a default empty value`);
}
// if(isOperator) {
allManagerData.startedAt = bot.startedAt.local().format('MMMM D, YYYY h:mm A Z');
allManagerData.heartbeatHuman = dayjs.duration({seconds: bot.heartbeatInterval}).humanize();
allManagerData.heartbeat = bot.heartbeatInterval;
allManagerData = {...allManagerData, ...opStats(bot)};
//}
const botDur = dayjs.duration(dayjs().diff(bot.startedAt))
if(allManagerData.stats.cache.totalRequests > 0) {
const minutes = botDur.asMinutes();
if(minutes < 10) {
allManagerData.stats.cache.requestRate = formatNumber((10/minutes) * allManagerData.stats.cache.totalRequests, {toFixed: 0, round: {enable: true, indicate: true}});
} else {
allManagerData.stats.cache.requestRate = formatNumber(allManagerData.stats.cache.totalRequests / (minutes/10), {toFixed: 0, round: {enable: true, indicate: true}});
}
} else {
allManagerData.stats.cache.requestRate = 0;
}
const data = {
userName: user,
system: {
startedAt: bot.startedAt.local().format('MMMM D, YYYY h:mm A Z'),
...opStats(bot),
},
subreddits: [allManagerData, ...subManagerData],
show: 'All',
botName: bot.botName,
operatorDisplay: display,
isOperator,
logSettings: {
//limit: [10, 20, 50, 100, 200].map(x => `<a class="capitalize ${limit === x ? 'font-bold no-underline pointer-events-none' : ''}" data-limit="${x}" href="logs/settings/update?limit=${x}">${x}</a>`).join(' | '),
limitSelect: [10, 20, 50, 100, 200].map(x => `<option ${limit === x ? 'selected' : ''} class="capitalize ${limit === x ? 'font-bold' : ''}" data-value="${x}">${x}</option>`).join(' | '),
//sort: ['ascending', 'descending'].map(x => `<a class="capitalize ${sort === x ? 'font-bold no-underline pointer-events-none' : ''}" data-sort="${x}" href="logs/settings/update?sort=${x}">${x}</a>`).join(' | '),
sortSelect: ['ascending', 'descending'].map(x => `<option ${sort === x ? 'selected' : ''} class="capitalize ${sort === x ? 'font-bold' : ''}" data-value="${x}">${x}</option>`).join(' '),
//level: availableLevels.map(x => `<a class="capitalize log-${x} ${level === x ? `font-bold no-underline pointer-events-none` : ''}" data-log="${x}" href="logs/settings/update?level=${x}">${x}</a>`).join(' | '),
levelSelect: availableLevels.map(x => `<option ${level === x ? 'selected' : ''} class="capitalize log-${x} ${level === x ? `font-bold` : ''}" data-value="${x}">${x}</option>`).join(' '),
},
};
if(req.query.sub !== undefined) {
const encoded = encodeURI(req.query.sub as string).toLowerCase();
const shouldShow = data.subreddits.find(x => x.name.toLowerCase() === encoded);
if(shouldShow !== undefined) {
data.show = shouldShow.name;
}
}
res.render('status', data);
});
app.getAsync('/logs/settings/update', async function (req, res) {
const e = req.query;
for (const [setting, val] of Object.entries(req.query)) {
switch (setting) {
case 'limit':
req.session.limit = Number.parseInt(val as string);
break;
case 'sort':
req.session.sort = val as string;
break;
case 'level':
req.session.level = val as string;
break;
}
}
const {limit = 200, level = 'verbose', sort = 'descending', user} = req.session;
res.send('OK');
const subMap = filterLogBySubreddit(subLogMap, req.session.subreddits, {
level,
operator: name !== undefined && name.toLowerCase() === (user as string).toLowerCase(),
user,
limit,
sort: (sort as 'descending' | 'ascending'),
});
const subArr: any = [];
subMap.forEach((v: string[], k: string) => {
subArr.push({name: k, logs: v.join('')});
});
io.emit('logClear', subArr);
});
app.use('/action', booleanMiddle(['force']));
app.getAsync('/action', async (req, res) => {
const {type, action, subreddit, force = false} = req.query as any;
let subreddits: string[] = [];
if (subreddit === 'All') {
subreddits = req.session.subreddits as string[];
} else if ((req.session.subreddits as string[]).includes(subreddit)) {
subreddits = [subreddit];
}
for (const s of subreddits) {
const manager = bot.subManagers.find(x => x.displayLabel === s);
if (manager === undefined) {
logger.warn(`Manager for ${s} does not exist`, {subreddit: `/u/${req.session.user}`});
continue;
}
const mLogger = manager.logger;
mLogger.info(`/u/${req.session.user} invoked '${action}' action for ${type} on ${manager.displayLabel}`);
try {
switch (action) {
case 'start':
if (type === 'bot') {
await manager.start('user');
} else if (type === 'queue') {
manager.startQueue('user');
} else {
await manager.startEvents('user');
}
break;
case 'stop':
if (type === 'bot') {
await manager.stop('user');
} else if (type === 'queue') {
await manager.stopQueue('user');
} else {
manager.stopEvents('user');
}
break;
case 'pause':
if (type === 'queue') {
await manager.pauseQueue('user');
} else {
manager.pauseEvents('user');
}
break;
case 'reload':
const prevQueueState = manager.queueState.state;
const newConfig = await manager.parseConfiguration('user', force);
if (newConfig === false) {
mLogger.info('Config was up-to-date');
}
if (newConfig && prevQueueState === RUNNING) {
await manager.startQueue(USER);
}
break;
case 'check':
if (type === 'unmoderated') {
const activities = await manager.subreddit.getUnmoderated({limit: 100});
for (const a of activities.reverse()) {
manager.queue.push({
checkType: a instanceof Submission ? 'Submission' : 'Comment',
activity: a,
});
}
} else {
const activities = await manager.subreddit.getModqueue({limit: 100});
for (const a of activities.reverse()) {
manager.queue.push({
checkType: a instanceof Submission ? 'Submission' : 'Comment',
activity: a,
});
}
}
break;
}
} catch (err) {
if (!(err instanceof LoggedError)) {
mLogger.error(err, {subreddit: manager.displayLabel});
}
}
}
res.send('OK');
});
app.use('/check', booleanMiddle(['dryRun']));
app.getAsync('/check', async (req, res) => {
const {url, dryRun, subreddit} = req.query as any;
let a;
const commentId = commentReg(url);
if (commentId !== undefined) {
// @ts-ignore
a = await bot.client.getComment(commentId);
}
if (a === undefined) {
const submissionId = submissionReg(url);
if (submissionId !== undefined) {
// @ts-ignore
a = await bot.client.getSubmission(submissionId);
}
}
if (a === undefined) {
logger.error('Could not parse Comment or Submission ID from given URL', {subreddit: `/u/${req.session.user}`});
return res.send('OK');
} else {
// @ts-ignore
const activity = await a.fetch();
const sub = await activity.subreddit.display_name;
let manager = subreddit === 'All' ? bot.subManagers.find(x => x.subreddit.display_name === sub) : bot.subManagers.find(x => x.displayLabel === subreddit);
if (manager === undefined || !(req.session.subreddits as string[]).includes(manager.displayLabel)) {
let msg = 'Activity does not belong to a subreddit you moderate or the bot runs on.';
if (subreddit === 'All') {
msg = `${msg} If you want to test an Activity against a Subreddit\'s config it does not belong to then switch to that Subreddit's tab first.`
}
logger.error(msg, {subreddit: `/u/${req.session.user}`});
return res.send('OK');
}
// will run dryrun if specified or if running activity on subreddit it does not belong to
const dr: boolean | undefined = (dryRun || manager.subreddit.display_name !== sub) ? true : undefined;
manager.logger.info(`/u/${req.session.user} running${dr === true ? ' DRY RUN ' : ' '}check on${manager.subreddit.display_name !== sub ? ' FOREIGN ACTIVITY ' : ' '}${url}`);
await manager.runChecks(activity instanceof Submission ? 'Submission' : 'Comment', activity, {dryRun: dr})
}
res.send('OK');
})
setInterval(() => {
// refresh op stats every 30 seconds
io.emit('opStats', opStats(bot));
// if (operatorSessionId !== undefined) {
// io.to(operatorSessionId).emit('opStats', opStats(bot));
// }
}, 30000);
emitter.on('log', (log) => {
const emittedSessions = [];
const subName = parseSubredditLogName(log);
if (subName !== undefined) {
for (const [id, info] of connectedUsers) {
const {subreddits, level = 'verbose', user} = info;
if (isLogLineMinLevel(log, level) && (subreddits.includes(subName) || subName.includes(user))) {
emittedSessions.push(id);
io.to(id).emit('log', formatLogLineToHtml(log));
}
}
}
if (operatorSessionId !== undefined) {
io.to(operatorSessionId).emit('opStats', opStats(bot));
if (subName === undefined || !emittedSessions.includes(operatorSessionId)) {
const {level = 'verbose'} = connectedUsers.get(operatorSessionId) || {};
if (isLogLineMinLevel(log, level)) {
io.to(operatorSessionId).emit('log', formatLogLineToHtml(log));
}
}
}
});
await bot.runManagers();
};
const opStats = (bot: App) => {
const limitReset = dayjs(bot.client.ratelimitExpiration);
const nextHeartbeat = bot.nextHeartbeat !== undefined ? bot.nextHeartbeat.local().format('MMMM D, YYYY h:mm A Z') : 'N/A';
const nextHeartbeatHuman = bot.nextHeartbeat !== undefined ? `in ${dayjs.duration(bot.nextHeartbeat.diff(dayjs())).humanize()}` : 'N/A'
return {
startedAtHuman: `${dayjs.duration(dayjs().diff(bot.startedAt)).humanize()}`,
nextHeartbeat,
nextHeartbeatHuman,
apiLimit: bot.client.ratelimitRemaining,
apiAvg: formatNumber(bot.apiRollingAvg),
nannyMode: bot.nannyMode || 'Off',
apiDepletion: bot.apiEstDepletion === undefined ? 'Not Calculated' : bot.apiEstDepletion.humanize(),
limitReset,
limitResetHuman: `in ${dayjs.duration(limitReset.diff(dayjs())).humanize()}`,
}
}
export default rcbServer;

View File

@@ -0,0 +1,39 @@
<html>
<%- include('partials/head', {title: 'RCB OAuth Helper'}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<%- include('partials/title', {title: ' OAuth Helper'}) %>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Congrats! You did the thing.</div>
<div class="space-y-3">
<ul class="list-inside list-disc">
<li>Access Token: <b><%= accessToken %></b></li>
<li>Refresh Token: <b><%= refreshToken %></b></li>
</ul>
<div>Copy these somewhere and then restart the application providing these as either arguments
or environmental variables as described in the <a
href="https://github.com/FoxxMD/reddit-context-bot#usage">usage section.</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<script>
if (document.querySelector('#redirectUri').value === '') {
document.querySelector('#redirectUri').value = `${document.location.href}callback`;
}
document.querySelector('#doAuth').addEventListener('click', e => {
e.preventDefault()
const url = `${document.location.href}auth?redirect=${document.querySelector('#redirectUri').value}`
window.location.href = url;
})
</script>
</body>
</html>

View File

@@ -0,0 +1,25 @@
<html>
<%- include('partials/head', {title: 'RCB'}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<%- include('partials/title', {title: ''}) %>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Oops 😬</div>
<div class="space-y-3">
<div>Something went wrong while processing that last request:</div>
<div><%- error %></div>
<% if(locals.operatorDisplay !== undefined && locals.operatorDisplay !== 'Anonymous') { %>
<div>Operated By: <%= operatorDisplay %></div>
<% } %>
</div>
</div>
</div>
</div>
</div>
</div>
</body>
</html>

View File

@@ -0,0 +1,60 @@
<html>
<%- include('partials/head', {title: 'RCB OAuth Helper'}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<%- include('partials/title', {title: ' OAuth Helper'}) %>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Hi! Looks like you're setting up your bot. To get running:</div>
<div class="text-lg text-semibold my-3">1. Set your redirect URL</div>
<input id="redirectUri" style="min-width:500px;"
class="text-black placeholder-gray-500 rounded mt-2 mb-3 p-2" value="<%= redirectUri %>">
<div class="my-2">
<input type="checkbox" id="wikiedit" name="wikiedit"
checked>
<label for="wikiedit">Include <span class="font-mono">wikiedit</span> permission for Toolbox
User Notes</label>
</div>
<div class="space-y-3">
<div>This is the URL Reddit will redirect you to once you have authorized an account to be used
with your application.
</div>
<div>The input field has been pre-filled with either:
<ul class="list-inside list-disc">
<li>What you provided to the program as an argument/environmental variable or</li>
<li>The current URL in your browser that would be used -- if you are using a reverse
proxy this may be different so double check
</li>
</ul>
</div>
<div>Make sure it matches what is found in the <b>redirect uri</b> for your <a target="_blank"
href="https://www.reddit.com/prefs/apps">application
on Reddit</a> and <b>it must end with "callback"</b></div>
</div>
<div class="text-lg text-semibold my-3">2. Login to Reddit with the account that will be the bot
</div>
Protip: Login to Reddit in an Incognito session, then open this URL in a new tab.
<div class="text-lg text-semibold my-3">3. <a id="doAuth" href="">Authorize your bot account</a>
</div>
</div>
</div>
</div>
</div>
</div>
<script>
if (document.querySelector('#redirectUri').value === '') {
document.querySelector('#redirectUri').value = `${document.location.href}callback`;
}
document.querySelector('#doAuth').addEventListener('click', e => {
e.preventDefault()
const wikiEdit = document.querySelector('#wikiedit').checked ? 1 : 0;
const url = `${document.location.href}auth?redirect=${document.querySelector('#redirectUri').value}&wikiEdit=${wikiEdit}`;
window.location.href = url;
})
</script>
</body>
</html>

View File

@@ -0,0 +1,29 @@
<html>
<%- include('partials/head', {title: 'RCB'}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<%- include('partials/title', {title: ''}) %>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Sorry!</div>
<div class="space-y-3">
<div>Your account was successfully logged in but you do not have access to this RCB instance because either:</div>
<ul class="list-inside list-disc">
<li>The Bot account used by this instance is not a Moderator of any Subreddits you are also a Moderator of or</li>
<li>the Bot is a Moderator of one of your Subreddits but the Operator of this instance is not currently running the instance on your Subreddits.</li>
</ul>
<div>Note: You must <a href="logout">Logout</a> in order for the instance to detect changes in your subreddits/moderator status</div>
<% if(operatorDisplay !== 'Anonymous') { %>
<div>Operated By: <%= operatorDisplay %></div>
<% } %>
</div>
</div>
</div>
</div>
</div>
</div>
</body>
</html>

View File

@@ -0,0 +1,33 @@
<div class="space-x-4 p-6 md:px-10 md:py-6 leading-6 font-semibold bg-gray-800 text-white">
<div class="container mx-auto">
<div class="flex items-center justify-between">
<div class="flex items-center flex-grow pr-4">
<div class="px-4 width-full relative">
<span>
<a href="https://github.com/FoxxMD/reddit-context-bot">RCB</a> for <a href="https://reddit.com/user/<%= botName %>">/u/<%= botName %></a>
</span>
<span class="inline-block -mb-3 ml-2">
<label style="font-size:2.5px;">
<input class='toggle-checkbox' type='checkbox' id="themeToggle" checked></input>
<div class='toggle-slot'>
<div class='sun-icon-wrapper'>
<div class="iconify sun-icon" data-icon="feather-sun" data-inline="false"></div>
</div>
<div class='toggle-button'></div>
<div class='moon-icon-wrapper'>
<div class="iconify moon-icon" data-icon="feather-moon" data-inline="false"></div>
</div>
</div>
</label>
</span>
<div class="text-small absolute pl-4">
Operated by <%= operatorDisplay %>
</div>
</div>
</div>
<div class="flex items-center flex-end text-sm">
<a href="logout">Logout</a>
</div>
</div>
</div>
</div>

View File

@@ -0,0 +1,86 @@
<head>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/tailwindcss/2.0.3/tailwind.min.css"
integrity="sha512-wl80ucxCRpLkfaCnbM88y4AxnutbGk327762eM9E/rRTvY/ZGAHWMZrYUq66VQBYMIYDFpDdJAOGSLyIPHZ2IQ=="
crossorigin="anonymous"/>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/tailwindcss/2.0.3/tailwind-dark.min.css"
integrity="sha512-WvyKyiVHgInX5UQt67447ExtRRZG/8GUijaq1MpqTNYp8wY4/EJOG5bI80sRp/5crDy4Z6bBUydZI2OFV3Vbtg=="
crossorigin="anonymous"/>
<script src="https://code.iconify.design/1/1.0.4/iconify.min.js"></script>
<link rel="stylesheet" href="public/themeToggle.css">
<style>
a {
text-decoration: underline;
}
.loading {
height: 35px;
fill: black;
display: none;
}
.connected .loading {
display: inline;
}
.dark .loading {
fill: white;
}
.sub {
display: none;
}
.sub.active {
display: inherit;
}
/*https://stackoverflow.com/a/48386400/1469797*/
.stats {
display: grid;
grid-template-columns: max-content auto;
grid-gap: 5px;
}
.stats label {
text-align: right;
}
.stats label:after {
content: ":";
}
.has-tooltip {
/*position: relative;*/
}
.tooltip {
transition-delay: 0.5s;
transition-property: visibility;
visibility: hidden;
position: absolute;
/*right: 0;*/
margin-top:-35px;
}
.has-tooltip:hover .tooltip {
visibility: visible;
transition-delay: 0.2s;
transition-property: visibility;
z-index: 100;
}
.pointer {
cursor: pointer;
}
.botStats.hidden {
display: none;
}
</style>
<title><%= title !== undefined ? title : `RCB for /u/${botName}`%></title>
<!--<title><%# `RCB for /u/${botName}`%></title>-->
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width,initial-scale=1.0">
<!--icons from https://heroicons.com -->
</head>

View File

@@ -0,0 +1,13 @@
<div class="space-x-4 p-6 md:px-10 md:py-6 leading-6 font-semibold bg-gray-800 text-white">
<div class="container mx-auto">
<div class="flex items-center justify-between">
<div class="flex items-center flex-grow pr-4">
<div class="px-4 width-full relative">
<div><a href="https://github.com/FoxxMD/reddit-context-bot">RCB</a> <%= title %></div>
</div>
</div>
<div class="flex items-center flex-end text-sm">
</div>
</div>
</div>
</div>

756
src/Server/views/status.ejs Normal file
View File

@@ -0,0 +1,756 @@
<html>
<%- include('partials/head', {title: undefined}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<%- include('partials/authTitle') %>
<div class="space-x-4 py-1 md:px-10 leading-6 font-semibold bg-gray-500 dark:bg-gray-700 text-white">
<div class="container mx-auto">
<ul id="tabs" class="inline-flex flex-wrap">
<% subreddits.forEach(function (data){ %>
<li class="my-3 px-3 dark:text-white">
<span data-subreddit="<%= data.name %>" class="rounded-md py-2 px-3 tabSelectWrapper">
<a class="tabSelect font-normal pointer hover:font-bold"
data-subreddit="<%= data.name %>">
<%= data.name %>
</a>
<% if ((data.name === 'All' && isOperator) || data.name !== 'All') { %>
<span class="inline-block mb-0.5 ml-0.5 w-2 h-2 bg-<%= data.indicator %>-400 rounded-full"></span>
<% } %>
</span>
</li>
<% }) %>
</ul>
</div>
</div>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="pb-6 pt-3 md:px-7">
<!-- <div class="flex items-center justify-around">-->
<!-- -->
<!-- </div>-->
<% subreddits.forEach(function (data){ %>
<div class="sub" data-subreddit="<%= data.name %>">
<div class="grid grid-cols-1 lg:grid-cols-2 xl:grid-cols-3 2xl:grid-cols-3 gap-5">
<div class="bg-white shadow-md rounded my-3 dark:bg-gray-500 dark:text-white">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 dark:bg-gray-700 dark:text-white">
<div class="flex items-center justify-between">
<h4>Overview</h4>
<% if (data.name === 'All') { %>
<div class="flex items-center flex-end">
<div>
<a class="action" data-action="start" data-type="bot"
data-subreddit="<%= data.name %>"
href="#">Start All</a> |
<a class="action" data-action="stop" data-type="bot"
data-subreddit="<%= data.name %>"
href="#">Stop All</a> |
<a class="action" data-action="reload"
data-subreddit="<%= data.name %>"
href="#">Update All</a>
</div>
</div>
<% } else { %>
<div class="flex items-center flex-end">
<div>
<span class="mr-2">Check:</span>
<a class="action" data-action="check" data-type="unmoderated"
data-subreddit="<%= data.name %>"
href="#">Unmoderated</a> |
<a class="action" data-action="check" data-type="modqueue"
data-subreddit="<%= data.name %>"
href="#">Modqueue</a>
</div>
</div>
<% } %>
</div>
</div>
<div class="p-4">
<div class="stats">
<% if (data.name !== 'All') { %>
<label>
<span class="has-tooltip">
<span class="tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2 space-y-3 p-2 text-left">
<div>The "main" process monitoring Activity for the subreddit.</div>
<div><b>Starting</b> or <b>Stopping</b> this will also Start/Stop <b>Queue/Events.</b></div>
<div>Additionally, the <b>Stopped</b> state will prevent configuration from being checked on heartbeat. This is useful if the subreddit's config is really broken and you want to essentially ignore this subreddit until manually interacted with.</div>
</span>
<span>
Bot
<svg xmlns="http://www.w3.org/2000/svg"
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
</span>
</span>
</label>
<div class="flex items-center justify-between">
<span class="font-semibold"><%= `${data.botState.state}${data.botState.causedBy === 'system' ? '' : ' (user)'}` %></span>
<div class="flex items-center flex-end">
<div>
<a class="action" data-action="start" data-type="bot"
data-subreddit="<%= data.name %>" href="#">Start</a> |
<a class="action" data-action="stop" data-type="bot"
data-subreddit="<%= data.name %>" href="#">Stop</a>
</div>
</div>
</div>
<label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2 space-y-3 p-2 text-left'>
<div>The <b>Queue</b> controls processing of <b>Activities</b> ingested from <b>Events.</b></div>
<ul class="list-inside list-disc">
<li><b>Starting</b> the Queue will being Processing (running checks on) queued Activities based on the max number of workers available</li>
<li><b>Stopping</b> the Queue will prevent queued Activities from being Processed, after any current Activities are finished Processing.</li>
</ul>
<div>If all available workers are processing Activities then new Activities returned from <b>Events</b> will be put marked as <b>Queued</b></div>
</span>
<span>
Queue
<svg xmlns="http://www.w3.org/2000/svg"
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
</span>
</span>
</label>
<div class="flex items-center justify-between">
<span class="font-semibold"><%= `${data.queueState.state}${data.queueState.causedBy === 'system' ? '' : ' (user)'}` %></span>
<div class="flex items-center flex-end">
<div>
<a class="action" data-action="start" data-type="queue"
data-subreddit="<%= data.name %>" href="#">Start</a> |
<a class="action" data-action="pause" data-type="queue"
data-subreddit="<%= data.name %>" href="#">Pause</a> |
<a class="action" data-action="stop" data-type="queue"
data-subreddit="<%= data.name %>" href="#">Stop</a>
</div>
</div>
</div>
<label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2 space-y-3 p-2 text-left'>
<div><b>Events</b> controls polling (monitoring) of <b>Activity Sources</b> (unmoderated, modqueue, comments, etc.)</div>
<ul class="list-inside list-disc">
<li><b>Starting</b> Events will cause polling to begin. Any new Activities discovered after polling begins will be sent to <b>Queue</b></li>
<li><b>Stopping</b> Events will cause polling to stop.</li>
</ul>
</span>
<span>
Events
<svg xmlns="http://www.w3.org/2000/svg"
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
</span>
</span>
</label>
<div class="flex items-center justify-between">
<span class="font-semibold"><%= `${data.eventsState.state}${data.eventsState.causedBy === 'system' ? '' : ' (user)'}` %></span>
<div class="flex items-center flex-end">
<div>
<a class="action" data-action="start" data-type="event"
data-subreddit="<%= data.name %>" href="#">Start</a> |
<a class="action" data-action="pause" data-type="event"
data-subreddit="<%= data.name %>" href="#">Pause</a> |
<a class="action" data-action="stop" data-type="event"
data-subreddit="<%= data.name %>" href="#">Stop</a>
</div>
</div>
</div>
<label>Activities</label>
<span class="has-tooltip">
<span style="margin-top:-55px"
class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black'>
<div>Max Concurrent Processing</div>
<div>Config: <%= data.maxWorkers %></div>
</span>
<span><%= `${data.runningActivities} Processing / ${data.queuedActivities} Queued` %></span>
</span>
<label>Slow Mode</label>
<span><%= data.delayBy %></span>
<% } %>
<% if (data.name === 'All') { %>
<label>Uptime</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<%= system.startedAt %>
</span>
<span>
<span id="startedAtHuman"><%= system.startedAtHuman %></span>
</span>
</span>
<label>Heartbeat Interval</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<%= data.heartbeat %> seconds
</span>
<%= data.heartbeatHuman %>
</span>
<label>Next Heartbeat</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span id="nextHeartbeat"><%= data.nextHeartbeat %></span>
</span>
<span id="nextHeartbeatHuman"><%= data.nextHeartbeatHuman %></span>
</span>
<% } %>
</div>
<% if (data.name !== 'All') { %>
<ul class="list-disc list-inside mt-4">
<% data.pollingInfo.forEach(function (i){ %>
<li>Polling <%- i %></li>
<% }) %>
</ul>
<% } %>
</div>
</div>
<% if (data.name === 'All') { %>
<div class="bg-white shadow-md rounded my-3 dark:bg-gray-500 dark:text-white">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 dark:bg-gray-700 dark:text-white">
<h4>API</h4>
</div>
<div class="p-4">
<div class="stats">
<label>Soft Limit</label>
<span>< <span id="softLimit"><%= data.softLimit %></span></span>
<label>Hard Limit</label>
<span>< <span id="softLimit"><%= data.hardLimit %></span></span>
<label>Api Nanny</label>
<span><b><span id="nannyMode"><%= data.nannyMode %></span></b></span>
<label>Api Usage</label>
<span><span id="apiLimit"><%= data.apiLimit %></span>/600 (~<span
id="apiAvg"><%= data.apiAvg %></span>req/s)</span>
<label>Depleted</label>
<span>in ~<span id="apiDepletion"><%= data.apiDepletion %></span></span>
<label>Limit Reset</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span id="limitReset"><%= data.limitReset %></span>
</span>
<span id="limitResetHuman"><%= data.limitResetHuman %></span>
</span>
</div>
</div>
</div>
<% } %>
<% if (data.name !== 'All') { %>
<div class="bg-white shadow-md rounded my-3 dark:bg-gray-500 dark:text-white">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 dark:bg-gray-700 dark:text-white">
<h4>Config
<span>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<%= data.wikiRevision %>
</span>
(Revised <%= data.wikiRevisionHuman %>)
</span>
</h4>
</div>
<div class="p-4">
<div class="stats">
<label>Valid</label>
<span class="font-semibold"><%= data.validConfig %></span>
<label>Checks</label>
<span><%= data.checks.submissions %> Submission | <%= data.checks.comments %> Comment </span>
<label>Dry Run</label>
<span><%= data.dryRun %></span>
<label>Updated</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<%= data.startedAt %>
</span>
<%= data.startedAtHuman %>
</span>
</span>
<label>Checked</label>
<span>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<%= data.wikiLastCheck %>
</span>
<%= data.wikiLastCheckHuman %>
</span>
<a class="action" data-action="reload"
data-subreddit="<%= data.name %>"
href="#">Update</a> | <a
class="action" data-action="reload" data-force="true"
data-subreddit="<%= data.name %>" href="#">Force</a>
</span>
<label>Location</label>
<span>
<a style="display: inline"
href="<%= data.wikiHref %>"><%= data.wikiLocation %></a>
</span>
</div>
</div>
</div>
<% } %>
<div class="bg-white shadow-md rounded my-3 dark:bg-gray-500 dark:text-white">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 dark:bg-gray-700 dark:text-white">
<h4>Usage</h4>
</div>
<div class="p-4">
<div class="flex items-top justify-evenly">
<div>
<div class="text-left font-semibold pb-2">Bot</div>
<% if (data.name !== 'All') { %><span class="text-center pb-2"><a
data-subreddit="<%= data.name %>"
data-stattype="reloadStats" class="statsToggle" href="">Last Reload</a> | <a
data-subreddit="<%= data.name %>" data-stattype="allStats"
class="statsToggle allStatsToggle"
href="">All Time</a> </span>
<% } %>
<% if (data.name !== 'All') { %>
<div data-subreddit="<%= data.name %>"
class="stats botStats reloadStats">
<label>Events</label>
<span><%= data.stats.eventsCheckedSinceStartTotal === undefined ? '-' : data.stats.eventsCheckedSinceStartTotal %></span>
<label>Checks</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.checksTriggeredSinceStartTotal %></span> Triggered / <span><%= data.stats.checksRunSinceStartTotal %></span> Run
</span>
<span><%= data.stats.checksTriggeredSinceStartTotal %></span> T / <span><%= data.stats.checksRunSinceStartTotal %></span> R
</span>
<label>Rules</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.rulesTriggeredSinceStartTotal %></span> Triggered / <span><%= data.stats.rulesCachedSinceStartTotal %></span> Cached / <span><%= data.stats.rulesRunSinceStartTotal %></span> Run
</span>
<span class="cursor-help">
<span><%= data.stats.rulesTriggeredSinceStartTotal %></span> T / <span><%= data.stats.rulesCachedSinceStartTotal %></span> C / <span><%= data.stats.rulesRunSinceStartTotal %></span> R</span>
</span>
<label>Actions</label>
<span class="cursor-help"><%= data.stats.actionsRunSinceStartTotal === undefined ? '-' : data.stats.actionsRunSinceStartTotal %></span>
</div>
<% } %>
<div data-subreddit="<%= data.name %>" class="stats botStats allStats">
<label>Events</label>
<span><%= data.stats.eventsCheckedTotal %></span>
<label>Checks</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.checksTriggeredTotal %></span> Triggered / <span><%= data.stats.checksRunTotal %></span> Run
</span>
<span class="cursor-help"><span><%= data.stats.checksTriggeredTotal %></span> T / <span><%= data.stats.checksRunTotal %></span> R</span>
</span>
<label>Rules</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.rulesTriggeredTotal %></span> Triggered / <span><%= data.stats.rulesCachedTotal %></span> Cached / <span><%= data.stats.rulesRunTotal %></span> Run
</span>
<span class="cursor-help"><span><%= data.stats.rulesTriggeredTotal %></span> T / <span><%= data.stats.rulesCachedTotal %></span> C / <span><%= data.stats.rulesRunTotal %></span> R</span>
</span>
<label>Actions</label>
<span><%= data.stats.actionsRunTotal %> Run</span>
</div>
</div>
<div>
<div class="text-left pb-2">
<% if (data.name === 'All') { %>
<span class="font-semibold">Cache</span>
<% } else { %>
<div>
<span class="font-semibold capitalize">Cache - <%= data.stats.cache.provider %><%= data.stats.cache.isShared ? ' (Shared)' : '' %></span>
<span class="has-tooltip">
<span style="right: 0;"
class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2 space-y-3 p-2'>
<% if (data.stats.cache.isShared) { %>
<div>This subreddit is using the default, <b>application-wide shared cache</b> because its <b
class="font-mono">caching</b> configuration is not specified.</div>
<div>Pros:
<ul class="list-inside list-disc">
<li>All subreddits can utilize any cached authors/etc., reduces overall api usage</li>
<li>Bot Operator can fine tune cache without subreddit interaction</li>
</ul>
</div>
<div>
Cons:
<ul class="list-inside list-disc">
<li>Subreddits must use default TTLs which may not fit use case for rules</li>
<li>Bots operating subreddits with dramatically contrasting caching requirements may suffer in performance/api usage</li>
</ul>
</div>
<% } else { %>
<div>This subreddit is using the default, <b>application-wide shared cache</b> because its <b
class="font-mono">caching</b> configuration is not specified.</div>
<div>Pros:
<ul class="list-inside list-disc">
<li>All subreddits can utilize any cached authors/etc., reduces overall api usage</li>
<li>Bot Operator can fine tune cache without subreddit interaction</li>
</ul>
</div>
<div>
Cons:
<ul class="list-inside list-disc">
<li>Subreddits must use default TTLs which may not fit use case for rules</li>
<li>Bots operating subreddits with dramatically contrasting caching requirements may suffer in performance/api usage</li>
</ul>
</div>
<% } %>
</span>
<span>
<svg xmlns="http://www.w3.org/2000/svg"
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
</span>
</span>
</div>
<% } %>
</div>
<div class="stats">
<label>Keys</label>
<span><%= data.stats.cache.currentKeyCount %></span>
<label>Calls</label>
<span><span><%= data.stats.cache.totalRequests %></span> <span>(<%= data.stats.cache.requestRate %>/10min)</span></span>
</div>
<div class="text-left py-2">
<span class="font-semibold">Calls Breakdown</span>
<span class="has-tooltip">
<span style="right: 0;"
class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2 space-y-3 p-2'>
<div>Number of calls to cache made for each type of cached item.</div>
<div>The <b>miss %</b> is the percentage of cache calls that were empty and data had to be fully acquired/processed.</div>
<div>
<ul class="list-inside list-disc">
<li><b>Author</b> - Cached history for an Activity's Author acquired from <span
class="font-mono">window</span> criteria. A missed cached call means at least one API call must be made.</li>
<li><b>Criteria</b> - Cached results of an <span
class="font-mono">authorIs</span> test. A missed cached call may require at least one API call.</li>
<li><b>Content</b> - Cached content for footer/comment/ban content. A missed cached call requires one API call.</li>
<li><b>UserNote</b> - Cached UserNotes. A missed cached call requires one API call.</li>
</ul>
</div>
<div>
Some tips/tricks for cache:
</div>
<ul class="list-inside list-disc">
<li>Only need to pay attention to caching if a subreddit uses the API/cache heavily IE high-volume comment checks or very large check sets for submissions</li>
<li>Increasing TTL will reduce cache misses and api usage at the expensive of a larger cache and stale results</li>
<li>Re-using <span
class="font-mono">window</span> and <span
class="font-mono">authorIs</span> values in configuration will enable the bot to re-use these results and thus reduce cache misses/api usage</li>
</ul>
</span>
<span>
<svg xmlns="http://www.w3.org/2000/svg"
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
</span>
</span>
</div>
<div class="stats">
<label>Author</label>
<span><%= data.stats.cache.types.author.requests %> (<%= data.stats.cache.types.author.missPercent %> miss)</span>
<label>Criteria</label>
<span><%= data.stats.cache.types.authorCrit.requests %> (<%= data.stats.cache.types.authorCrit.missPercent %> miss)</span>
<label>Content</label>
<span><%= data.stats.cache.types.content.requests %> (<%= data.stats.cache.types.content.missPercent %> miss)</span>
<label>UserNote</label>
<span><%= data.stats.cache.types.userNotes.requests %> (<%= data.stats.cache.types.userNotes.missPercent %> miss)</span>
</div>
</div>
</div>
</div>
</div>
</div>
<br/>
<div class="flex items-center justify-between flex-wrap">
<div class="inline-flex items-center">
<input data-subreddit="<%= data.name %>" style="min-width: 420px;"
class="border-gray-50 placeholder-gray-500 rounded mt-2 mb-3 p-2 text-black checkUrl"
placeholder="<%= data.name === 'All' ? 'Run Bot on a permalink from any moderated Subreddit' : `Run Bot on a permalink using this Subreddit's config` %>"/>
<span class="mx-2">
<input type="checkbox" class="dryrunCheck" data-subreddit="<%= data.name %>"
name="dryrunCheck">
<label for="dryrunCheck">Dry Run?</label>
</span>
<a class="runCheck" data-subreddit="<%= data.name %>" href="">Run</a>
</div>
<div class="flex items-center flex-end space-x-2">
<span>
<label for="level-select">Level: </label>
<select class="logSettingSelect rounded capitalize text-black" data-type="level"
id="levels-select">
<%- logSettings.levelSelect %>
</select>
</span>
<span>
<label for="sort-select">Sort: </label>
<select class="logSettingSelect rounded capitalize text-black" data-type="sort"
id="sort-select">
<%- logSettings.sortSelect %>
</select>
</span>
<span>
<label for="limit-select">Limit: </label>
<select class="logSettingSelect rounded capitalize text-black" data-type="limit"
id="limit-select">
<%- logSettings.limitSelect %>
</select>
</span>
</div>
</div>
<svg class="loading" version="1.1" id="L9" xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 100 100" xml:space="preserve">
<path
d="M73,50c0-12.7-10.3-23-23-23S27,37.3,27,50 M30.9,50c0-10.5,8.5-19.1,19.1-19.1S69.1,39.5,69.1,50">
<animateTransform
attributeName="transform"
attributeType="XML"
type="rotate"
dur="1s"
from="0 50 50"
to="360 50 50"
repeatCount="indefinite"/>
</path>
</svg>
<div data-subreddit="<%= data.name %>" class="logs font-mono text-sm">
<% data.logs.forEach(function (logEntry){ %>
<%- logEntry %>
<% }) %>
</div>
</div>
<% }) %>
</div>
<!--<div class="w-full flex-auto flex min-h-0 overflow-auto">
<div class="w-full relative flex-auto">
</div>
</div>-->
</div>
</div>
</div>
</div>
<script>
/* const appendUser = causedBy => causedBy === 'system' ? '' : ' (user)';
const initialData =
const updateOverview = (sub, data) => {
}*/
window.sort = 'desc';
document.querySelectorAll('.theme').forEach(el => {
el.addEventListener('click', e => {
e.preventDefault();
if (e.target.id === 'dark') {
document.body.classList.add('dark');
localStorage.setItem('ms-dark', 'yes');
} else {
document.body.classList.remove('dark');
localStorage.setItem('ms-dark', 'no');
}
document.querySelectorAll('.theme').forEach(el => {
el.classList.remove('font-bold', 'no-underline', 'pointer-events-none');
});
e.target.classList.add('font-bold', 'no-underline', 'pointer-events-none');
})
})
document.querySelector("#themeToggle").checked = localStorage.getItem('ms-dark') !== 'no';
document.querySelector("#themeToggle").onchange = (e) => {
if (e.target.checked === true) {
document.body.classList.add('dark');
localStorage.setItem('ms-dark', 'yes');
} else {
document.body.classList.remove('dark');
localStorage.setItem('ms-dark', 'no');
}
}
// if (localStorage.getItem('ms-dark') === 'no') {
// document.querySelector('#light.theme').classList.add('font-bold', 'no-underline', 'pointer-events-none');
// document.body.classList.remove('dark')
// } else {
// document.querySelector('#dark.theme').classList.add('font-bold', 'no-underline', 'pointer-events-none');
// }
document.querySelectorAll('.logSettingSelect').forEach(el => {
el.onchange = (e) => {
action = e.target.dataset.type;
value = e.target.value;
fetch(`logs/settings/update?${action}=${value}`);
document.querySelectorAll(`#${e.target.id}.logSettingSelect option`).forEach(el => {
el.classList.remove('font-bold');
});
document.querySelector(`#${e.target.id}.logSettingSelect option[data-value="${e.target.value}"]`).classList.add('font-bold');
}
});
document.querySelectorAll('.action').forEach(el => {
el.addEventListener('click', e => {
e.preventDefault()
action = e.target.dataset.action;
subreddit = e.target.dataset.subreddit;
type = e.target.dataset.type;
force = e.target.dataset.force === 'true';
fetch(`action?action=${action}&subreddit=${subreddit}&force=${force}&type=${type}`);
});
})
document.querySelectorAll(".runCheck").forEach(el => {
el.addEventListener('click', e => {
e.preventDefault();
const subreddit = e.target.dataset.subreddit;
const urlInput = document.querySelector(`[data-subreddit="${subreddit}"].checkUrl`);
const dryRunCheck = document.querySelector(`[data-subreddit="${subreddit}"].dryrunCheck`);
const url = urlInput.value;
const dryRun = dryRunCheck.checked ? 1 : 0;
const fetchUrl = `check?url=${url}&dryRun=${dryRun}&subreddit=${subreddit}`;
fetch(fetchUrl);
urlInput.value = '';
dryRunCheck.checked = false;
});
});
document.querySelectorAll('.statsToggle').forEach(el => {
el.addEventListener('click', e => {
const subreddit = e.target.dataset.subreddit;
const statsClass = e.target.dataset.stattype;
e.preventDefault();
document.querySelectorAll(`[data-subreddit="${subreddit}"].statsToggle`).forEach(el => {
el.classList.remove('font-bold', 'no-underline', 'pointer-events-none');
});
e.target.classList.add('font-bold', 'no-underline', 'pointer-events-none');
document.querySelectorAll(`[data-subreddit="${subreddit}"].botStats`).forEach(el => {
el.classList.add('hidden');
});
document.querySelector(`[data-subreddit="${subreddit}"].botStats.${statsClass}`).classList.remove('hidden');
});
})
document.querySelectorAll('[data-subreddit].tabSelect').forEach(el => {
el.addEventListener('click', e => {
e.preventDefault();
document.querySelectorAll('[data-subreddit].tabSelect').forEach(el => {
el.classList.remove('font-bold', 'no-underline', 'pointer-events-none');
});
document.querySelectorAll('[data-subreddit].sub').forEach(el => {
el.classList.remove('active');
});
e.target.classList.add('font-bold', 'no-underline', 'pointer-events-none');
document.querySelector(`[data-subreddit="${e.target.dataset.subreddit}"].sub`).classList.add('active');
document.querySelectorAll('.tabSelectWrapper').forEach(el => {
el.classList.remove('border-2');
el.classList.add('border');
})
const wrapper = document.querySelector(`[data-subreddit="${e.target.dataset.subreddit}"].tabSelectWrapper`);
wrapper.classList.remove('border');
wrapper.classList.add('border-2');
if ('URLSearchParams' in window) {
var searchParams = new URLSearchParams(window.location.search)
searchParams.set("sub", e.target.dataset.subreddit);
var newRelativePathQuery = window.location.pathname + '?' + searchParams.toString();
history.pushState(null, '', newRelativePathQuery);
}
});
})
document.querySelector('[data-subreddit="<%= show %>"].tabSelect').classList.add('font-bold', 'no-underline', 'pointer-events-none');
document.querySelectorAll('.tabSelectWrapper').forEach(el => el.classList.add('border'));
document.querySelector('[data-subreddit="<%= show %>"].sub').classList.add('active')
const wrapper = document.querySelector(`[data-subreddit="<%= show %>"].tabSelectWrapper`);
wrapper.classList.remove('border');
wrapper.classList.add('border-2');
document.querySelectorAll('.stats.reloadStats').forEach(el => el.classList.add('hidden'));
document.querySelectorAll('.allStatsToggle').forEach(el => el.classList.add('font-bold', 'no-underline', 'pointer-events-none'));
</script>
<script src="https://cdn.socket.io/3.1.3/socket.io.min.js"
integrity="sha384-cPwlPLvBTa3sKAgddT6krw0cJat7egBga3DJepJyrLl4Q9/5WLra3rrnMcyTyOnh"
crossorigin="anonymous"></script>
<script>
const SUBREDDIT_NAME_LOG_REGEX = /{(.+?)}/;
const parseSubredditLogName = (val) => {
const matches = val.match(SUBREDDIT_NAME_LOG_REGEX);
if (matches === null) {
return undefined;
}
return matches[1];
}
let socket = io({
reconnectionAttempts: 5, // bail after 5 attempts
});
socket.on("connect", () => {
document.body.classList.add('connected')
socket.on("log", data => {
const selectors = ['[data-subreddit="All"].logs'];
const sub = parseSubredditLogName(data);
if (sub !== undefined) {
selectors.push(`[data-subreddit="${sub}"].logs`);
}
selectors.forEach(sel => {
const el = document.querySelector(sel);
if (el !== null) {
const currLogs = el.innerHTML;
document.querySelector(sel).innerHTML = window.sort === 'desc' ? data.concat(currLogs) : currLogs.concat(data)
}
});
});
socket.on("logClear", data => {
data.forEach((obj) => {
const n = obj.name === 'all' ? 'All' : obj.name;
document.querySelector(`[data-subreddit="${n}"].logs`).innerHTML = obj.logs;
})
//document.querySelector('.logs').innerHTML = data.join().replaceAll(/<br\s*\/?>\,/g,'<br />');
});
socket.on('opStats', (data) => {
for (const [k, v] of Object.entries(data)) {
document.querySelector(`#${k}`).innerHTML = v;
}
});
});
socket.on('disconnect', () => {
document.body.classList.remove('connected');
});
</script>
</body>
</html>

File diff suppressed because it is too large Load Diff

View File

@@ -1,16 +1,100 @@
import { Poll, SnooStormOptions } from "snoostorm"
import {Poll, SnooStormOptions} from "snoostorm"
import Snoowrap from "snoowrap";
import {EventEmitter} from "events";
import {PollConfiguration} from "snoostorm/out/util/Poll";
import {DEFAULT_POLLING_INTERVAL} from "../Common/interfaces";
export class UnmoderatedStream extends Poll<
Snoowrap.Submission | Snoowrap.Comment
> {
type Awaitable<T> = Promise<T> | T;
export class SPoll<T extends object> extends Poll<T> {
identifier: keyof T;
getter: () => Awaitable<T[]>;
frequency;
running: boolean = false;
constructor(options: PollConfiguration<T>) {
super(options);
this.identifier = options.identifier;
this.getter = options.get;
this.frequency = options.frequency;
clearInterval(this.interval);
}
startInterval = () => {
this.running = true;
this.interval = setInterval(async () => {
try {
const batch = await this.getter();
const newItems: T[] = [];
for (const item of batch) {
const id = item[this.identifier];
if (this.processed.has(id)) continue;
// Emit for new items and add it to the list
newItems.push(item);
this.processed.add(id);
this.emit("item", item);
}
// Emit the new listing of all new items
this.emit("listing", newItems);
} catch (err) {
this.emit('error', err);
this.end();
}
}, this.frequency);
}
end = () => {
this.running = false;
super.end();
}
}
export class UnmoderatedStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: SnooStormOptions & { subreddit: string }) {
super({
frequency: options.pollTime || 20000,
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getSubreddit(options.subreddit).getUnmoderated(options),
identifier: "id",
});
}
}
export class ModQueueStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: SnooStormOptions & { subreddit: string }) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getSubreddit(options.subreddit).getModqueue(options),
identifier: "id",
});
}
}
export class SubmissionStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: SnooStormOptions & { subreddit: string }) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getNew(options.subreddit, options),
identifier: "id",
});
}
}
export class CommentStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: SnooStormOptions & { subreddit: string }) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getNewComments(options.subreddit, options),
identifier: "id",
});
}
}

View File

@@ -1,5 +1,4 @@
import Snoowrap, {RedditUser, Comment, Submission} from "snoowrap";
import cache from 'memory-cache';
import objectHash from 'object-hash';
import {
AuthorActivitiesOptions,
@@ -10,45 +9,82 @@ import {
import Subreddit from 'snoowrap/dist/objects/Subreddit';
import winston, {Logger} from "winston";
import fetch from 'node-fetch';
import {mergeArr, parseExternalUrl, parseWikiContext} from "../util";
import {
buildCacheOptionsFromProvider,
cacheStats, createCacheManager,
formatNumber,
mergeArr,
parseExternalUrl,
parseWikiContext
} from "../util";
import LoggedError from "../Utils/LoggedError";
import {Footer, SubredditCacheConfig} from "../Common/interfaces";
import {
CacheOptions,
Footer, OperatorConfig, ResourceStats,
SubredditCacheConfig, TTLConfig
} from "../Common/interfaces";
import UserNotes from "./UserNotes";
import Mustache from "mustache";
import he from "he";
import {AuthorCriteria} from "../Author/Author";
import {SPoll} from "./Streams";
import {Cache} from 'cache-manager';
export const DEFAULT_FOOTER = '\r\n*****\r\nThis action was performed by [a bot.]({{botLink}}) Mention a moderator or [send a modmail]({{modmailLink}}) if you any ideas, questions, or concerns about this action.';
export interface SubredditResourceOptions extends SubredditCacheConfig, Footer {
enabled: boolean;
export interface SubredditResourceConfig extends Footer {
caching?: SubredditCacheConfig,
subreddit: Subreddit,
logger: Logger;
}
interface SubredditResourceOptions extends Footer {
ttl: Required<TTLConfig>
cache?: Cache
cacheType: string;
cacheSettingsHash: string
subreddit: Subreddit,
logger: Logger;
}
export interface SubredditResourceSetOptions extends SubredditCacheConfig, Footer {
enabled: boolean;
}
export class SubredditResources {
enabled!: boolean;
//enabled!: boolean;
protected authorTTL!: number;
protected useSubredditAuthorCache!: boolean;
protected wikiTTL!: number;
name: string;
protected logger: Logger;
userNotes: UserNotes;
footer!: false | string;
footer: false | string = DEFAULT_FOOTER;
subreddit: Subreddit
cache?: Cache
cacheType: string
cacheSettingsHash?: string;
stats: { cache: ResourceStats };
constructor(name: string, options: SubredditResourceOptions) {
const {
subreddit,
logger,
enabled = true,
userNotesTTL = 60000,
ttl: {
userNotesTTL,
authorTTL,
wikiTTL,
},
cache,
cacheType,
cacheSettingsHash,
} = options || {};
this.cacheSettingsHash = cacheSettingsHash;
this.cache = cache;
this.cacheType = cacheType;
this.authorTTL = authorTTL;
this.wikiTTL = wikiTTL;
this.subreddit = subreddit;
this.name = name;
if (logger === undefined) {
@@ -58,57 +94,62 @@ export class SubredditResources {
this.logger = logger.child({labels: ['Resource Cache']}, mergeArr);
}
this.userNotes = new UserNotes(enabled ? userNotesTTL : 0, this.subreddit, this.logger)
this.setOptions(options);
this.stats = {
cache: cacheStats()
};
const cacheUseCB = (miss: boolean) => {
this.stats.cache.userNotes.requests++;
this.stats.cache.userNotes.miss += miss ? 1 : 0;
}
this.userNotes = new UserNotes(userNotesTTL, this.subreddit, this.logger, this.cache, cacheUseCB)
}
setOptions (options: SubredditResourceSetOptions) {
const {
enabled = true,
authorTTL,
userNotesTTL = 60000,
wikiTTL = 300000, // 5 minutes
footer = DEFAULT_FOOTER
} = options || {};
this.footer = footer;
this.enabled = manager.enabled ? enabled : false;
if (authorTTL === undefined) {
this.useSubredditAuthorCache = false;
this.authorTTL = manager.authorTTL;
} else {
this.useSubredditAuthorCache = true;
this.authorTTL = authorTTL;
async getCacheKeyCount() {
if (this.cache !== undefined && this.cache.store.keys !== undefined) {
return (await this.cache.store.keys()).length;
}
return 0;
}
getStats() {
return {
cache: {
// TODO could probably combine these two
totalRequests: Object.values(this.stats.cache).reduce((acc, curr) => acc + curr.requests, 0),
types: Object.keys(this.stats.cache).reduce((acc, curr) => {
const per = acc[curr].miss === 0 ? 0 : formatNumber(acc[curr].miss / acc[curr].requests) * 100;
// @ts-ignore
acc[curr].missPercent = `${formatNumber(per, {toFixed: 0})}%`;
return acc;
}, this.stats.cache)
}
}
this.wikiTTL = wikiTTL;
this.userNotes.notesTTL = enabled ? userNotesTTL : 0;
}
async getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
const useCache = this.enabled && this.authorTTL > 0;
let hash;
if (useCache) {
if (this.cache !== undefined && this.authorTTL > 0) {
const userName = user.name;
const hashObj: any = {...options, userName};
if (this.useSubredditAuthorCache) {
hashObj.subreddit = this.name;
}
hash = objectHash.sha1({...options, userName});
const hash = objectHash.sha1({...options, userName});
const cacheVal = cache.get(hash);
if (null !== cacheVal) {
this.stats.cache.author.requests++;
let miss = false;
const cacheVal = await this.cache.wrap(hash, async () => {
miss = true;
return await getAuthorActivities(user, options);
}, {ttl: this.authorTTL});
if (!miss) {
this.logger.debug(`Cache Hit: ${userName} (${options.type || 'overview'})`);
return cacheVal as Array<Submission | Comment>;
} else {
this.stats.cache.author.miss++;
}
return cacheVal as Array<Submission | Comment>;
}
const items = await getAuthorActivities(user, options);
if (useCache) {
cache.put(hash, items, this.authorTTL);
}
return Promise.resolve(items);
return await getAuthorActivities(user, options);
}
async getAuthorComments(user: RedditUser, options: AuthorActivitiesOptions): Promise<Comment[]> {
@@ -138,14 +179,16 @@ export class SubredditResources {
return val;
}
const useCache = this.enabled && this.wikiTTL > 0;
// try to get cached value first
let hash = `${subreddit.display_name}-${cacheKey}`;
if (useCache) {
const cachedContent = cache.get(hash);
if (cachedContent !== null) {
if (this.cache !== undefined && this.wikiTTL > 0) {
this.stats.cache.content.requests++;
const cachedContent = await this.cache.get(hash);
if (cachedContent !== undefined) {
this.logger.debug(`Cache Hit: ${cacheKey}`);
return cachedContent;
return cachedContent as string;
} else {
this.stats.cache.content.miss++;
}
}
@@ -180,37 +223,37 @@ export class SubredditResources {
}
}
if (useCache) {
cache.put(hash, wikiContent, this.wikiTTL);
if (this.cache !== undefined && this.wikiTTL > 0) {
this.cache.set(hash, wikiContent, this.wikiTTL);
}
return wikiContent;
}
async testAuthorCriteria(item: (Comment | Submission), authorOpts: AuthorCriteria, include = true) {
const useCache = this.enabled && this.authorTTL > 0;
let hash;
if (useCache) {
if (this.cache !== undefined && this.authorTTL > 0) {
const hashObj = {itemId: item.id, ...authorOpts, include};
hash = `authorCrit-${objectHash.sha1(hashObj)}`;
const cachedAuthorTest = cache.get(hash);
if (null !== cachedAuthorTest) {
const hash = `authorCrit-${objectHash.sha1(hashObj)}`;
this.stats.cache.authorCrit.requests++;
let miss = false;
const cachedAuthorTest = await this.cache.wrap(hash, async () => {
miss = true;
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
}, {ttl: this.authorTTL});
if (!miss) {
this.logger.debug(`Cache Hit: Author Check on ${item.id}`);
return cachedAuthorTest;
} else {
this.stats.cache.authorCrit.miss++;
}
return cachedAuthorTest;
}
const result = await testAuthorCriteria(item, authorOpts, include, this.userNotes);
if (useCache) {
cache.put(hash, result, this.authorTTL);
}
return result;
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
}
async generateFooter(item: Submission | Comment, actionFooter?: false | string)
{
async generateFooter(item: Submission | Comment, actionFooter?: false | string) {
let footer = actionFooter !== undefined ? actionFooter : this.footer;
if(footer === false) {
if (footer === false) {
return '';
}
const subName = await item.subreddit.display_name;
@@ -226,6 +269,32 @@ class SubredditResourcesManager {
resources: Map<string, SubredditResources> = new Map();
authorTTL: number = 10000;
enabled: boolean = true;
modStreams: Map<string, SPoll<Snoowrap.Submission | Snoowrap.Comment>> = new Map();
defaultCache?: Cache;
cacheType: string = 'none';
ttlDefaults!: Required<TTLConfig>;
setDefaultsFromConfig(config: OperatorConfig) {
const {
caching: {
authorTTL,
userNotesTTL,
wikiTTL,
provider,
},
} = config;
this.setDefaultCache(provider);
this.setTTLDefaults({authorTTL, userNotesTTL, wikiTTL});
}
setDefaultCache(options: CacheOptions) {
this.cacheType = options.store;
this.defaultCache = createCacheManager(options);
}
setTTLDefaults(def: Required<TTLConfig>) {
this.ttlDefaults = def;
}
get(subName: string): SubredditResources | undefined {
if (this.resources.has(subName)) {
@@ -234,9 +303,56 @@ class SubredditResourcesManager {
return undefined;
}
set(subName: string, initOptions: SubredditResourceOptions): SubredditResources {
const resource = new SubredditResources(subName, initOptions);
this.resources.set(subName, resource);
set(subName: string, initOptions: SubredditResourceConfig): SubredditResources {
let hash = 'default';
const { caching, ...init } = initOptions;
let opts: SubredditResourceOptions;
if(caching !== undefined) {
const {provider = 'memory', ...rest} = caching;
let cacheConfig = {
provider: buildCacheOptionsFromProvider(provider),
ttl: {
...this.ttlDefaults,
...rest
},
}
hash = objectHash.sha1(cacheConfig);
const {provider: trueProvider, ...trueRest} = cacheConfig;
opts = {
cache: createCacheManager(trueProvider),
cacheType: trueProvider.store,
cacheSettingsHash: hash,
...init,
...trueRest,
};
} else {
opts = {
cache: this.defaultCache,
cacheType: this.cacheType,
cacheSettingsHash: hash,
ttl: this.ttlDefaults,
...init,
}
}
let resource: SubredditResources;
const res = this.get(subName);
if(res === undefined || res.cacheSettingsHash !== hash) {
if(res !== undefined && res.cache !== undefined) {
res.cache.reset();
}
resource = new SubredditResources(subName, opts);
this.resources.set(subName, resource);
} else {
// just set non-cache related settings
resource = res;
if(opts.footer !== resource.footer) {
resource.footer = opts.footer || DEFAULT_FOOTER;
}
// reset cache stats when configuration is reloaded
resource.stats.cache = cacheStats();
}
return resource;
}
}

View File

@@ -1,12 +1,12 @@
import dayjs, {Dayjs} from "dayjs";
import {Comment, RedditUser, WikiPage} from "snoowrap";
import cache from 'memory-cache';
import {COMMENT_URL_ID, deflateUserNotes, inflateUserNotes, parseLinkIdentifier, SUBMISSION_URL_ID} from "../util";
import Subreddit from "snoowrap/dist/objects/Subreddit";
import {Logger} from "winston";
import LoggedError from "../Utils/LoggedError";
import Submission from "snoowrap/dist/objects/Submission";
import {RichContent} from "../Common/interfaces";
import {Cache} from 'cache-manager';
interface RawUserNotesPayload {
ver: number,
@@ -54,15 +54,19 @@ export class UserNotes {
moderators?: RedditUser[];
logger: Logger;
identifier: string;
cache?: Cache
cacheCB: Function;
users: Map<string, UserNote[]> = new Map();
constructor(ttl: number, subreddit: Subreddit, logger: Logger) {
constructor(ttl: number, subreddit: Subreddit, logger: Logger, cache: Cache | undefined, cacheCB: Function) {
this.notesTTL = ttl;
this.subreddit = subreddit;
this.logger = logger;
this.wiki = subreddit.getWikiPage('usernotes');
this.identifier = `${this.subreddit.display_name}-usernotes`;
this.cache = cache;
this.cacheCB = cacheCB;
}
async getUserNotes(user: RedditUser): Promise<UserNote[]> {
@@ -85,7 +89,7 @@ export class UserNotes {
const notes = rawNotes.ns.map(x => UserNote.fromRaw(x, payload.constants, this.moderators as RedditUser[]));
// sort in ascending order by time
notes.sort((a, b) => a.time.isBefore(b.time) ? -1 : 1);
if (this.notesTTL > 0) {
if (this.notesTTL > 0 && this.cache !== undefined) {
this.users.set(user.name, notes);
}
return notes;
@@ -118,7 +122,7 @@ export class UserNotes {
payload.blob[item.author.name].ns.push(newNote.toRaw(payload.constants));
await this.saveData(payload);
if(this.notesTTL > 0) {
if(this.notesTTL > 0 && this.cache !== undefined) {
const currNotes = this.users.get(item.author.name) || [];
currNotes.push(newNote);
this.users.set(item.author.name, currNotes);
@@ -133,11 +137,13 @@ export class UserNotes {
}
async retrieveData(): Promise<RawUserNotesPayload> {
if (this.notesTTL > 0) {
const cachedPayload = cache.get(this.identifier);
if (cachedPayload !== null) {
return cachedPayload as RawUserNotesPayload;
if (this.notesTTL > 0 && this.cache !== undefined) {
const cachedPayload = await this.cache.get(this.identifier);
if (cachedPayload !== undefined) {
this.cacheCB(false);
return cachedPayload as unknown as RawUserNotesPayload;
}
this.cacheCB(true);
}
try {
@@ -149,10 +155,9 @@ export class UserNotes {
userNotes.blob = inflateUserNotes(userNotes.blob);
if (this.notesTTL > 0) {
cache.put(`${this.subreddit.display_name}-usernotes`, userNotes, this.notesTTL, () => {
this.users = new Map();
});
if (this.notesTTL > 0 && this.cache !== undefined) {
await this.cache.set(`${this.subreddit.display_name}-usernotes`, userNotes, this.notesTTL);
this.users = new Map();
}
return userNotes as RawUserNotesPayload;
@@ -173,10 +178,9 @@ export class UserNotes {
//this.wiki = await this.wiki.refresh();
// @ts-ignore
this.wiki = await this.subreddit.getWikiPage('usernotes').edit({text: JSON.stringify(wikiPayload), reason: 'ContextBot edited usernotes'});
if (this.notesTTL > 0) {
cache.put(this.identifier, payload, this.notesTTL, () => {
this.users = new Map();
});
if (this.notesTTL > 0 && this.cache !== undefined) {
await this.cache.set(this.identifier, payload, this.notesTTL);
this.users = new Map();
}
return payload as RawUserNotesPayload;

View File

@@ -1,74 +1,71 @@
import commander, {InvalidOptionArgumentError} from "commander";
import {argParseInt, parseBool} from "../util";
import {argParseInt, parseBool, parseBoolWithDefault} from "../util";
export const clientId = new commander.Option('-c, --clientId <id>', 'Client ID for your Reddit application (default: process.env.CLIENT_ID)')
.default(process.env.CLIENT_ID);
clientId.required = true;
export const clientId = new commander.Option('-i, --clientId <id>', 'Client ID for your Reddit application (default: process.env.CLIENT_ID)');
export const clientSecret = new commander.Option('-e, --clientSecret <secret>', 'Client Secret for your Reddit application (default: process.env.CLIENT_SECRET)')
.default(process.env.CLIENT_SECRET);
clientSecret.required = true;
export const clientSecret = new commander.Option('-e, --clientSecret <secret>', 'Client Secret for your Reddit application (default: process.env.CLIENT_SECRET)');
export const accessToken = new commander.Option('-a, --accessToken <token>', 'Access token retrieved from authenticating an account with your Reddit Application (default: process.env.ACCESS_TOKEN)')
.default(process.env.ACCESS_TOKEN);
accessToken.required = true;
export const redirectUri = new commander.Option('-u, --redirectUri <uri>', 'Redirect URI for your Reddit application (default: process.env.REDIRECT_URI)');
export const refreshToken = new commander.Option('-r, --refreshToken <token>', 'Refresh token retrieved from authenticating an account with your Reddit Application (default: process.env.REFRESH_TOKEN)')
.default(process.env.REFRESH_TOKEN);
refreshToken.required = true;
export const sessionSecret = new commander.Option('-t, --sessionSecret <secret>', 'Secret use to encrypt session id/data (default: process.env.SESSION_SECRET || a random string)');
export const subreddits = new commander.Option('-s, --subreddits <list...>', 'List of subreddits to run on. Bot will run on all subs it has access to if not defined')
.default(process.env.SUBREDDITS || [], 'process.env.SUBREDDITS (comma-seperated)');
export const createAccessTokenOption = () => new commander.Option('-a, --accessToken <token>', 'Access token retrieved from authenticating an account with your Reddit Application (default: process.env.ACCESS_TOKEN)');
export const logDir = new commander.Option('-d, --logDir <dir>', 'Absolute path to directory to store rotated logs in')
.default(process.env.LOG_DIR || `${process.cwd()}/logs`, 'process.env.LOG_DIR || process.cwd()/logs');
export const createRefreshTokenOption = () => new commander.Option('-r, --refreshToken <token>', 'Refresh token retrieved from authenticating an account with your Reddit Application (default: process.env.REFRESH_TOKEN)');
export const logLevel = new commander.Option('-l, --logLevel <level>', 'Log level')
.default(process.env.LOG_LEVEL || 'verbose', 'process.env.LOG_LEVEL || verbose');
export const subreddits = new commander.Option('-s, --subreddits <list...>', 'List of subreddits to run on. Bot will run on all subs it has access to if not defined (default: process.env.SUBREDDITS)');
export const wikiConfig = new commander.Option('-w, --wikiConfig <path>', 'Relative url to contextbot wiki page EX https://reddit.com/r/subreddit/wiki/<path>')
.default(process.env.WIKI_CONFIG || 'botconfig/contextbot', "process.env.WIKI_CONFIG || 'botconfig/contextbot'");
export const logDir = new commander.Option('-d, --logDir [dir]', 'Absolute path to directory to store rotated logs in. Leaving undefined disables rotating logs (default: process.env.LOG_DIR)');
export const snooDebug = new commander.Option('--snooDebug', 'Set Snoowrap to debug')
.argParser(parseBool)
.default(process.env.SNOO_DEBUG || false, 'process.env.SNOO_DEBUG || false');
export const logLevel = new commander.Option('-l, --logLevel <level>', 'Minimum level to log at (default: process.env.LOG_LEVEL || verbose)');
export const authorTTL = new commander.Option('--authorTTL <ms>', 'Set the TTL (ms) for the Author Activities shared cache')
.argParser(argParseInt)
.default(process.env.AUTHOR_TTL || 10000, 'process.env.AUTHOR_TTL || 10000');
export const wikiConfig = new commander.Option('-w, --wikiConfig <path>', `Relative url to contextbot wiki page EX https://reddit.com/r/subreddit/wiki/<path> (default: process.env.WIKI_CONFIG || 'botconfig/contextbot')`);
export const heartbeat = new commander.Option('--heartbeat <s>', 'Interval, in seconds, between heartbeat logs. Set to 0 to disable')
.argParser(argParseInt)
//heartbeat.defaultValueDescription = 'process.env.HEARTBEAT || 300';
.default(process.env.HEARTBEAT || 300, 'process.env.HEARTBEAT || 300');
export const snooDebug = new commander.Option('--snooDebug', `Set Snoowrap to debug. If undefined will be on if logLevel='debug' (default: process.env.SNOO_DEBUG)`)
.argParser(parseBool);
export const apiRemaining = new commander.Option('--apiLimitWarning <remaining>', 'When API limit remaining (600/10min) is lower than this value log statements for limit will be raised to WARN level')
.argParser(argParseInt)
.default(process.env.API_REMAINING || 250, 'process.env.API_REMAINING || 250');
export const authorTTL = new commander.Option('--authorTTL <ms>', 'Set the TTL (ms) for the Author Activities shared cache (default: process.env.AUTHOR_TTL || 60000)')
.argParser(argParseInt);
export const dryRun = new commander.Option('--dryRun', 'Set dryRun=true for all checks/actions on all subreddits (overrides any existing)')
.argParser(parseBool)
.default(process.env.DRYRUN || false, 'process.env.DRYRUN || false');
export const caching = new commander.Option('--caching <provider>', `Set the caching provider to use. Options 'memory', 'redis', or 'none' to disable (default: process.env.CACHING || memory)`)
.argParser(argParseInt);
export const disableCache = new commander.Option('--disableCache', 'Disable caching for all subreddits')
.argParser(parseBool)
.default(process.env.DISABLE_CACHE || false, 'process.env.DISABLE_CACHE || false');
export const heartbeat = new commander.Option('--heartbeat <s>', 'Interval, in seconds, between heartbeat checks. (default: process.env.HEARTBEAT || 300)')
.argParser(argParseInt);
export const softLimit = new commander.Option('--softLimit <limit>', 'When API limit remaining (600/10min) is lower than this subreddits will have SLOW MODE enabled (default: process.env.SOFT_LIMIT || 250)')
.argParser(argParseInt);
export const hardLimit = new commander.Option('--hardLimit <limit>', 'When API limit remaining (600/10min) is lower than this all subreddit polling will be paused until api limit reset (default: process.env.SOFT_LIMIT || 250)')
.argParser(argParseInt);
export const dryRun = new commander.Option('--dryRun', 'Set all subreddits in dry run mode, overriding configurations (default: process.env.DRYRUN || false)')
.argParser(parseBoolWithDefault(undefined));
export const checks = new commander.Option('-h, --checks <checkNames...>', 'An optional list of Checks, by name, that should be run. If none are specified all Checks for the Subreddit the Activity is in will be run');
export const limit = new commander.Option('--limit <limit>', 'Limit the number of unmoderated activities pulled for each subreddit')
.argParser(parseInt);
export const proxy = new commander.Option('--proxy <proxyEndpoint>', 'Proxy Snoowrap requests through this endpoint (default: process.env.PROXY)');
export const getUniversalOptions = (): commander.Option[] => {
let options = [];
export const operator = new commander.Option('--operator <name>', 'Username of the reddit user operating this application, used for displaying OP level info/actions in UI (default: process.env.OPERATOR)');
options.push(dryRun);
export const operatorDisplay = new commander.Option('--operatorDisplay <name>', 'An optional name to display who is operating this application in the UI (default: process.env.OPERATOR_DISPLAY || Anonymous)');
options = [
export const port = new commander.Option('-p, --port <port>', 'Port for web server to listen on (default: process.env.PORT || 8085)')
.argParser(argParseInt);
export const sharedMod = new commander.Option('-q, --shareMod', `If enabled then all subreddits using the default settings to poll "unmoderated" or "modqueue" will retrieve results from a shared request to /r/mod (default: process.env.SHARE_MOD || false)`)
.argParser(parseBool);
export const operatorConfig = new commander.Option('-c, --operatorConfig <path>', 'An absolute path to a JSON file to load all parameters from (default: process.env.OPERATOR_CONFIG)');
export const getUniversalWebOptions = (): commander.Option[] => {
return [
clientId,
clientSecret,
accessToken,
refreshToken,
createAccessTokenOption(),
createRefreshTokenOption(),
redirectUri,
sessionSecret,
subreddits,
logDir,
logLevel,
@@ -76,11 +73,50 @@ export const getUniversalOptions = (): commander.Option[] => {
snooDebug,
authorTTL,
heartbeat,
apiRemaining,
softLimit,
hardLimit,
dryRun,
disableCache
]
return options;
proxy,
operator,
operatorDisplay,
port,
sharedMod,
operatorConfig,
];
}
export const getUniversalCLIOptions = (): commander.Option[] => {
const at = createAccessTokenOption();
at.required = true;
const rt = createRefreshTokenOption();
rt.required = true;
return [
clientId,
clientSecret,
at,
rt,
subreddits,
logDir,
logLevel,
wikiConfig,
snooDebug,
authorTTL,
heartbeat,
softLimit,
hardLimit,
dryRun,
proxy,
sharedMod,
operatorConfig,
]
}
export const addOptions = (com: commander.Command, options: commander.Option[]): commander.Command => {
return options.reduce((c, opt) => c.addOption(opt), com);
}
// TODO
export const subredditConfig = new commander.Option('--subredditsConfig <path>', 'An absolute path to a JSON file to load subreddit configs from');

View File

@@ -0,0 +1,44 @@
import Snoowrap from "snoowrap";
// const proxyFactory = (endpoint: string) => {
// return class ProxiedSnoowrap extends Snoowrap {
// rawRequest(options: any) {
// // send all requests through a proxy
// return super.rawRequest(Object.assign(options, {
// proxy: endpoint,
// tunnel: false
// }))
// }
// }
// }
export class RequestTrackingSnoowrap extends Snoowrap {
requestCount: number = 0;
oauthRequest(...args: any) {
// send all requests through a proxy
if(args[1] === undefined || args[1] === 1) {
this.requestCount++;
}
// @ts-ignore
return super.oauthRequest(...args);
}
}
export class ProxiedSnoowrap extends RequestTrackingSnoowrap {
proxyEndpoint: string;
constructor(args: any) {
super(args);
const {proxy} = args;
this.proxyEndpoint = proxy;
}
rawRequest(options: any) {
// send all requests through a proxy
return super.rawRequest(Object.assign(options, {
proxy: this.proxyEndpoint,
tunnel: false
}))
}
}

View File

@@ -497,35 +497,13 @@ export const itemContentPeek = async (item: (Comment | Submission), peekLength =
peek = `${truncatePeek(item.title)} by ${author} https://reddit.com${item.permalink}`;
} else if (item instanceof Comment) {
content = truncatePeek(item.body)
try {
// @ts-ignore
const client = item._r as Snoowrap; // protected? idgaf
// @ts-ignore
const commentSub = await client.getSubmission(item.link_id);
const [p, {submissionTitle: subTitle}] = await itemContentPeek(commentSub);
submissionTitle = subTitle;
peek = `${truncatePeek(content)} in ${subTitle} by ${author} https://reddit.com${item.permalink}`;
} catch (err) {
// possible comment is not on a submission, just swallow
}
content = truncatePeek(item.body);
peek = `${truncatePeek(content)} by ${author} in https://reddit.com${item.permalink}`;
}
return [peek, {submissionTitle, content, author, permalink: item.permalink}];
}
// @ts-ignore
export const getSubmissionFromComment = async (item: Comment): Promise<Submission> => {
try {
// @ts-ignore
const client = item._r as Snoowrap; // protected? idgaf
// @ts-ignore
return client.getSubmission(item.link_id);
} catch (err) {
// possible comment is not on a submission, just swallow
}
}
const SPOTIFY_PODCAST_AUTHOR_REGEX: RegExp = /this episode from (?<author>.*?) on Spotify./;
const SPOTIFY_PODCAST_AUTHOR_REGEX_URL = 'https://regexr.com/61c2f';

View File

@@ -0,0 +1,55 @@
import {labelledFormat, logLevels} from "../util";
import winston, {Logger} from "winston";
const {transports} = winston;
export const getLogger = (options: any, name = 'default'): Logger => {
if(!winston.loggers.has(name)) {
const {
path,
level,
additionalTransports = [],
} = options || {};
const consoleTransport = new transports.Console();
const myTransports = [
consoleTransport,
];
let errorTransports = [consoleTransport];
for (const a of additionalTransports) {
myTransports.push(a);
errorTransports.push(a);
}
if (path !== undefined && path !== '') {
const rotateTransport = new winston.transports.DailyRotateFile({
dirname: path,
createSymlink: true,
symlinkName: 'contextBot-current.log',
filename: 'contextBot-%DATE%.log',
datePattern: 'YYYY-MM-DD',
maxSize: '5m'
});
// @ts-ignore
myTransports.push(rotateTransport);
// @ts-ignore
errorTransports.push(rotateTransport);
}
const loggerOptions = {
level: level || 'info',
format: labelledFormat(),
transports: myTransports,
levels: logLevels,
exceptionHandlers: errorTransports,
rejectionHandlers: errorTransports,
};
winston.loggers.add(name, loggerOptions);
}
return winston.loggers.get(name);
}

View File

@@ -7,11 +7,23 @@ import relTime from 'dayjs/plugin/relativeTime.js';
import sameafter from 'dayjs/plugin/isSameOrAfter.js';
import samebefore from 'dayjs/plugin/isSameOrBefore.js';
import {Manager} from "./Subreddit/Manager";
import {Command} from 'commander';
import {checks, getUniversalOptions, limit} from "./Utils/CommandConfig";
import {Command, Argument} from 'commander';
import {
addOptions,
checks,
getUniversalCLIOptions,
getUniversalWebOptions,
operatorConfig
} from "./Utils/CommandConfig";
import {App} from "./App";
import createWebServer from './Server/server';
import createHelperServer from './Server/helper';
import Submission from "snoowrap/dist/objects/Submission";
import {COMMENT_URL_ID, parseLinkIdentifier, SUBMISSION_URL_ID} from "./util";
import LoggedError from "./Utils/LoggedError";
import {getLogger} from "./Utils/loggerFactory";
import {buildOperatorConfigWithDefaults, parseOperatorConfigFromSources} from "./ConfigBuilder";
dayjs.extend(utc);
dayjs.extend(dduration);
@@ -22,33 +34,76 @@ dayjs.extend(samebefore);
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);
const preRunCmd = new Command();
preRunCmd.addOption(operatorConfig);
preRunCmd.allowUnknownOption();
const program = new Command();
for (const o of getUniversalOptions()) {
program.addOption(o);
}
(async function () {
try {
program
let runCommand = program
.command('run')
.description('Runs bot normally')
.action(async (run, command) => {
const app = new App(program.opts());
await app.buildManagers();
await app.runManagers();
});
.addArgument(new Argument('[interface]', 'Which interface to start the bot with').choices(['web', 'cli']).default(undefined, 'process.env.WEB || true'))
.description('Monitor new activities from configured subreddits.')
.allowUnknownOption();
runCommand = addOptions(runCommand, getUniversalWebOptions());
runCommand.action(async (interfaceVal, opts) => {
const config = buildOperatorConfigWithDefaults(await parseOperatorConfigFromSources({...opts, web: interfaceVal !== undefined ? interfaceVal === 'web': undefined}));
const {
credentials: {
redirectUri,
clientId,
clientSecret,
accessToken,
refreshToken,
},
web: {
enabled: web,
},
logging,
} = config;
const logger = getLogger(logging, 'init');
const hasClient = clientId !== undefined && clientSecret !== undefined;
const hasNoTokens = accessToken === undefined && refreshToken === undefined;
try {
if (web) {
if (hasClient && hasNoTokens) {
// run web helper
const server = createHelperServer(config);
await server;
} else {
if (redirectUri === undefined) {
logger.warn(`No 'redirectUri' found in arg/env. Bot will still run but web interface will not be accessible.`);
}
const server = createWebServer(config);
await server;
}
} else {
const app = new App(config);
await app.buildManagers();
await app.runManagers();
}
} catch (err) {
throw err;
}
});
program
let checkCommand = program
.command('check <activityIdentifier> [type]')
.allowUnknownOption()
.description('Run check(s) on a specific activity', {
activityIdentifier: 'Either a permalink URL or the ID of the Comment or Submission',
type: `If activityIdentifier is not a permalink URL then the type of activity ('comment' or 'submission'). May also specify 'submission' type when using a permalink to a comment to get the Submission`,
})
});
checkCommand = addOptions(checkCommand, getUniversalCLIOptions());
checkCommand
.addOption(checks)
.action(async (activityIdentifier, type, commandOptions = {}) => {
const config = buildOperatorConfigWithDefaults(await parseOperatorConfigFromSources(commandOptions));
const {checks = []} = commandOptions;
const app = new App(program.opts());
const app = new App(config);
let a;
const commentId = commentReg(activityIdentifier);
@@ -93,42 +148,52 @@ for (const o of getUniversalOptions()) {
await app.buildManagers([sub]);
if (app.subManagers.length > 0) {
const manager = app.subManagers.find(x => x.subreddit.display_name === sub) as Manager;
await manager.runChecks(type === 'comment' ? 'Comment' : 'Submission', activity, checks);
await manager.runChecks(type === 'comment' ? 'Comment' : 'Submission', activity, {checkNames: checks});
}
});
program.command('unmoderated <subreddits...>')
let unmodCommand = program.command('unmoderated <subreddits...>')
.description('Run checks on all unmoderated activity in the modqueue', {
subreddits: 'The list of subreddits to run on. If not specified will run on all subreddits the account has moderation access to.'
})
.allowUnknownOption();
unmodCommand = addOptions(unmodCommand, getUniversalCLIOptions());
unmodCommand
.addOption(checks)
.addOption(limit)
.action(async (subreddits = [], commandOptions = {}) => {
const {checks = [], limit = 100} = commandOptions;
const app = new App(program.opts());
.action(async (subreddits = [], opts = {}) => {
const config = buildOperatorConfigWithDefaults(await parseOperatorConfigFromSources(opts));
const {checks = []} = opts;
const {subreddits: {names}} = config;
const app = new App(config);
await app.buildManagers(subreddits);
await app.buildManagers(names);
for(const manager of app.subManagers) {
const activities = await manager.subreddit.getUnmoderated({limit});
for(const a of activities.reverse()) {
await manager.runChecks(a instanceof Submission ? 'Submission' : 'Comment', a, checks);
for (const manager of app.subManagers) {
const activities = await manager.subreddit.getUnmoderated();
for (const a of activities.reverse()) {
manager.queue.push({
checkType: a instanceof Submission ? 'Submission' : 'Comment',
activity: a,
options: {checkNames: checks}
});
}
}
});
await program.parseAsync();
} catch (err) {
const logger = winston.loggers.get('default');
if (err.name === 'StatusCodeError' && err.response !== undefined) {
const authHeader = err.response.headers['www-authenticate'];
if (authHeader !== undefined && authHeader.includes('insufficient_scope')) {
logger.error('Reddit responded with a 403 insufficient_scope, did you choose the correct scopes?');
if (!err.logged && !(err instanceof LoggedError)) {
const logger = winston.loggers.get('default');
if (err.name === 'StatusCodeError' && err.response !== undefined) {
const authHeader = err.response.headers['www-authenticate'];
if (authHeader !== undefined && authHeader.includes('insufficient_scope')) {
logger.error('Reddit responded with a 403 insufficient_scope, did you choose the correct scopes?');
}
}
console.log(err);
}
console.log(err);
process.kill(process.pid, 'SIGTERM');
}
}());
export {Author} from "./Author/Author";

View File

@@ -9,11 +9,22 @@ import {InvalidOptionArgumentError} from "commander";
import Submission from "snoowrap/dist/objects/Submission";
import {Comment} from "snoowrap";
import {inflateSync, deflateSync} from "zlib";
import {ActivityWindowCriteria, DurationComparison, GenericComparison, StringOperator} from "./Common/interfaces";
import {
ActivityWindowCriteria, CacheOptions, CacheProvider,
DurationComparison,
GenericComparison, NamedGroup,
PollingOptionsStrong, RegExResult, ResourceStats,
StringOperator
} from "./Common/interfaces";
import JSON5 from "json5";
import yaml, {JSON_SCHEMA} from "js-yaml";
import SimpleError from "./Utils/SimpleError";
import InvalidRegexError from "./Utils/InvalidRegexError";
import {constants, promises} from "fs";
import {cacheOptDefaults} from "./Common/defaults";
import cacheManager from "cache-manager";
import redisStore from "cache-manager-redis-store";
import crypto from "crypto";
const {format} = winston;
const {combine, printf, timestamp, label, splat, errors} = format;
@@ -32,6 +43,28 @@ const errorAwareFormat = {
transform: (info: any, opts: any) => {
// don't need to log stack trace if we know the error is just a simple message (we handled it)
const stack = !(info instanceof SimpleError) && !(info.message instanceof SimpleError);
const {name, response, message, stack: errStack, error, statusCode} = info;
if(name === 'StatusCodeError' && response !== undefined && response.headers !== undefined && response.headers['content-type'].includes('html')) {
// reddit returns html even when we specify raw_json in the querystring (via snoowrap)
// which means the html gets set as the message for the error AND gets added to the stack as the message
// and we end up with a h u g e log statement full of noisy html >:(
const errorSample = error.slice(0, 10);
const messageBeforeIndex = message.indexOf(errorSample);
let newMessage = `Status Error ${statusCode} from Reddit`;
if(messageBeforeIndex > 0) {
newMessage = `${message.slice(0, messageBeforeIndex)} - ${newMessage}`;
}
let cleanStack = errStack;
// try to get just stacktrace by finding beginning of what we assume is the actual trace
if(errStack) {
cleanStack = `${newMessage}\n${errStack.slice(errStack.indexOf('at new StatusCodeError'))}`;
}
// now put it all together so its nice and clean
info.message = newMessage;
info.stack = cleanStack;
}
return errors().transform(info, { stack });
}
}
@@ -45,6 +78,7 @@ export const defaultFormat = printf(({
level,
message,
labels = ['App'],
subreddit,
leaf,
itemId,
timestamp,
@@ -77,7 +111,7 @@ export const defaultFormat = printf(({
}
const labelContent = `${nodes.map((x: string) => `[${x}]`).join(' ')}`;
return `${timestamp} ${level.padEnd(7)}: ${labelContent} ${msg}${stringifyValue !== '' ? ` ${stringifyValue}` : ''}${stackMsg}`;
return `${timestamp} ${level.padEnd(7)}: ${subreddit !== undefined ? `{${subreddit}} ` : ''}${labelContent} ${msg}${stringifyValue !== '' ? ` ${stringifyValue}` : ''}${stackMsg}`;
});
@@ -327,6 +361,8 @@ export function parseBool(value: any, prev: any = false): boolean {
throw new InvalidOptionArgumentError('Not a boolean value.');
}
export const parseBoolWithDefault = (defaultValue: any) => (arg: any) => parseBool(arg, defaultValue);
export function activityWindowText(activities: (Submission | Comment)[], suffix = false): (string | undefined) {
if (activities.length === 0) {
return undefined;
@@ -344,8 +380,6 @@ export function normalizeName(val: string) {
// https://github.com/toolbox-team/reddit-moderator-toolbox/wiki/Subreddit-Wikis%3A-usernotes#working-with-the-blob
export const inflateUserNotes = (blob: string) => {
//const binaryData = Buffer.from(blob, 'base64').toString('binary');
//const str = pako.inflate(binaryData, {to: 'string'});
const buffer = Buffer.from(blob, 'base64');
const str = inflateSync(buffer).toString('utf-8');
@@ -358,7 +392,6 @@ export const deflateUserNotes = (usersObject: object) => {
const jsonString = JSON.stringify(usersObject);
// Deflate/compress the string
//const binaryData = pako.deflate(jsonString);
const binaryData = deflateSync(jsonString);
// Convert binary data to a base64 string with a Buffer
@@ -553,3 +586,331 @@ export const parseExternalUrl = (val: string) => {
}
return (matches.groups as any).url as string;
}
export interface RetryOptions {
maxRequestRetry: number,
maxOtherRetry: number,
}
export const createRetryHandler = (opts: RetryOptions, logger: Logger) => {
const {maxRequestRetry, maxOtherRetry} = opts;
let timeoutCount = 0;
let otherRetryCount = 0;
let lastErrorAt: Dayjs | undefined;
return async (err: any): Promise<boolean> => {
if (lastErrorAt !== undefined && dayjs().diff(lastErrorAt, 'minute') >= 3) {
// if its been longer than 5 minutes since last error clear counters
timeoutCount = 0;
otherRetryCount = 0;
}
lastErrorAt = dayjs();
if(err.name === 'RequestError' || err.name === 'StatusCodeError') {
if (err.statusCode === undefined || ([401, 500, 503, 502, 504, 522].includes(err.statusCode))) {
timeoutCount++;
if (timeoutCount > maxRequestRetry) {
logger.error(`Reddit request error retries (${timeoutCount}) exceeded max allowed (${maxRequestRetry})`);
return false;
}
// exponential backoff
const ms = (Math.pow(2, timeoutCount - 1) + (Math.random() - 0.3) + 1) * 1000;
logger.warn(`Error occurred while making a request to Reddit (${timeoutCount} in 3 minutes). Will wait ${formatNumber(ms / 1000)} seconds before retrying`);
await sleep(ms);
return true;
} else {
return false;
}
} else {
// linear backoff
otherRetryCount++;
if (maxOtherRetry < otherRetryCount) {
return false;
}
const ms = (4 * 1000) * otherRetryCount;
logger.warn(`Non-request error occurred. Will wait ${formatNumber(ms / 1000)} seconds before retrying`);
await sleep(ms);
return true;
}
}
}
const LABELS_REGEX: RegExp = /(\[.+?])*/g;
export const parseLabels = (log: string): string[] => {
return Array.from(log.matchAll(LABELS_REGEX), m => m[0]).map(x => x.substring(1, x.length - 1));
}
const SUBREDDIT_NAME_LOG_REGEX: RegExp = /{(.+?)}/;
export const parseSubredditLogName = (val:string): string | undefined => {
const matches = val.match(SUBREDDIT_NAME_LOG_REGEX);
if (matches === null) {
return undefined;
}
return matches[1] as string;
}
export const LOG_LEVEL_REGEX: RegExp = /\s*(debug|warn|info|error|verbose)\s*:/i
export const isLogLineMinLevel = (line: string, minLevelText: string): boolean => {
const lineLevelMatch = line.match(LOG_LEVEL_REGEX);
if (lineLevelMatch === null) {
return false;
}
// @ts-ignore
const minLevel = logLevels[minLevelText];
// @ts-ignore
const level = logLevels[lineLevelMatch[1] as string];
return level <= minLevel;
}
// https://regexr.com/3e6m0
const HYPERLINK_REGEX: RegExp = /(http(s)?:\/\/.)?(www\.)?[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]{2,6}\b([-a-zA-Z0-9@:%_\+.~#?&//=]*)/;
export const formatLogLineToHtml = (val: string) => {
return val
.replace(/(\s*debug\s*):/i, '<span class="debug text-pink-400">$1</span>:')
.replace(/(\s*warn\s*):/i, '<span class="warn text-yellow-400">$1</span>:')
.replace(/(\s*info\s*):/i, '<span class="info text-blue-300">$1</span>:')
.replace(/(\s*error\s*):/i, '<span class="error text-red-400">$1</span>:')
.replace(/(\s*verbose\s*):/i, '<span class="error text-purple-400">$1</span>:')
.replaceAll('\n', '<br />')
.replace(HYPERLINK_REGEX, '<a target="_blank" href="$&">$&</a>');
}
export type LogEntry = [number, string];
export interface LogOptions {
limit: number,
level: string,
sort: 'ascending' | 'descending',
operator?: boolean,
user?: string,
}
export const filterLogBySubreddit = (logs: Map<string, LogEntry[]>, subreddits: string[] = [], options: LogOptions): Map<string, string[]> => {
const {
limit,
level,
sort,
operator = false,
user
} = options;
// get map of valid subreddits
const validSubMap: Map<string, LogEntry[]> = new Map();
for(const [k, v] of logs) {
if(subreddits.includes(k)) {
validSubMap.set(k, v);
}
}
// derive 'all'
let allLogs = (logs.get('app') || []);
if(!operator) {
if(user === undefined) {
allLogs = [];
} else {
allLogs.filter(([time, l]) => {
const sub = parseSubredditLogName(l);
return sub !== undefined && sub.includes(user);
});
}
}
allLogs = Array.from(validSubMap.values()).reduce((acc, logs) => {
return acc.concat(logs);
},allLogs);
validSubMap.set('all', allLogs);
const sortFunc = sort === 'ascending' ? (a: LogEntry, b: LogEntry) => a[0] - b[0] : (a: LogEntry, b: LogEntry) => b[0] - a[0];
const preparedMap: Map<string, string[]> = new Map();
// iterate each entry and
// sort, filter by level, slice to limit, then map to html string
for(const [k,v] of validSubMap.entries()) {
let preparedEntries = v.filter(([time, l]) => isLogLineMinLevel(l, level));
preparedEntries.sort(sortFunc);
preparedMap.set(k, preparedEntries.slice(0, limit + 1).map(([time, l]) => formatLogLineToHtml(l)));
}
return preparedMap;
}
export const logLevels = {
error: 0,
warn: 1,
info: 2,
http: 3,
verbose: 4,
debug: 5,
trace: 5,
silly: 6
};
export const pollingInfo = (opt: PollingOptionsStrong) => {
return `${opt.pollOn.toUpperCase()} every ${opt.interval} seconds${opt.delayUntil !== undefined ? ` | wait until Activity is ${opt.delayUntil} seconds old` : ''} | maximum of ${opt.limit} Activities`
}
export const totalFromMapStats = (val: Map<any, number>): number => {
return Array.from(val.entries()).reduce((acc: number, [k, v]) => {
return acc + v;
}, 0);
}
export const permissions = [
'edit',
'flair',
'history',
'identity',
'modcontributors',
'modflair',
'modposts',
'modself',
'mysubreddits',
'read',
'report',
'submit',
'wikiread',
'wikiedit'
];
export const boolToString = (val: boolean): string => {
return val ? 'Yes' : 'No';
}
export const isRedditMedia = (act: Submission): boolean => {
return act.is_reddit_media_domain || act.is_video || ['v.redd.it','i.redd.it'].includes(act.domain);
}
export const isExternalUrlSubmission = (act: Comment | Submission): boolean => {
return act instanceof Submission && !act.is_self && !isRedditMedia(act);
}
export const parseRegex = (r: string | RegExp, val: string, flags?: string): RegExResult => {
const reg = r instanceof RegExp ? r : new RegExp(r, flags);
if(reg.global) {
const g = Array.from(val.matchAll(reg));
const global = g.map(x => {
return {
match: x[0],
groups: x.slice(1),
named: x.groups,
}
});
return {
matched: g.length > 0,
matches: g.length > 0 ? g.map(x => x[0]) : [],
global: g.length > 0 ? global : [],
};
}
const m = val.match(reg)
return {
matched: m !== null,
matches: m !== null ? m.slice(0) : [],
global: [],
}
}
export async function readJson(path: string, opts: any) {
const {log, throwOnNotFound = true} = opts;
try {
await promises.access(path, constants.R_OK);
const data = await promises.readFile(path);
return JSON.parse(data as unknown as string);
} catch (e) {
const {code} = e;
if (code === 'ENOENT') {
if (throwOnNotFound) {
if (log) {
log.warn('No file found at given path', {filePath: path});
}
throw e;
} else {
return;
}
} else if (log) {
log.warn(`Encountered error while parsing file`, {filePath: path});
log.error(e);
}
throw e;
}
}
// export function isObject(item: any): boolean {
// return (item && typeof item === 'object' && !Array.isArray(item));
// }
export const overwriteMerge = (destinationArray: any[], sourceArray: any[], options: any): any[] => sourceArray;
export const removeUndefinedKeys = (obj: any) => {
let newObj: any = {};
Object.keys(obj).forEach((key) => {
if(Array.isArray(obj[key])) {
newObj[key] = obj[key];
} else if (obj[key] === Object(obj[key])) {
newObj[key] = removeUndefinedKeys(obj[key]);
} else if (obj[key] !== undefined) {
newObj[key] = obj[key];
}
});
if(Object.keys(newObj).length === 0) {
return undefined;
}
Object.keys(newObj).forEach(key => {
if(newObj[key] === undefined || (null !== newObj[key] && typeof newObj[key] === 'object' && Object.keys(newObj[key]).length === 0)) {
delete newObj[key]
}
});
//Object.keys(newObj).forEach(key => newObj[key] === undefined || newObj[key] && delete newObj[key])
return newObj;
}
export const cacheStats = (): ResourceStats => {
return {
author: {requests: 0, miss: 0},
authorCrit: {requests: 0, miss: 0},
content: {requests: 0, miss: 0},
userNotes: {requests: 0, miss: 0},
};
}
export const buildCacheOptionsFromProvider = (provider: CacheProvider | any): CacheOptions => {
if(typeof provider === 'string') {
return {
store: provider as CacheProvider,
...cacheOptDefaults
}
}
return {
store: 'memory',
...cacheOptDefaults,
...provider,
}
}
export const createCacheManager = (options: CacheOptions) => {
const {store, max, ttl = 60, host = 'localhost', port, auth_pass, db} = options;
switch (store) {
case 'none':
return undefined;
case 'redis':
return cacheManager.caching({
store: redisStore,
host,
port,
auth_pass,
db,
ttl
});
case 'memory':
default:
return cacheManager.caching({store: 'memory', max, ttl});
}
}
export const randomId = () => crypto.randomBytes(20).toString('hex');