mirror of
https://github.com/FoxxMD/context-mod.git
synced 2026-01-14 07:57:57 -05:00
Compare commits
186 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
910f7f79ef | ||
|
|
641892cd3e | ||
|
|
1dfb9779e7 | ||
|
|
40111c54a2 | ||
|
|
b4745e3b45 | ||
|
|
838da497ce | ||
|
|
01755eada5 | ||
|
|
1ff59ad6e8 | ||
|
|
d8fd8e6140 | ||
|
|
255ffdb417 | ||
|
|
f0199366a0 | ||
|
|
20c724cab5 | ||
|
|
a670975f14 | ||
|
|
ee13feaf57 | ||
|
|
23a24b4448 | ||
|
|
a11b667d5e | ||
|
|
269b1620b9 | ||
|
|
6dee734440 | ||
|
|
3aea422eff | ||
|
|
e707e5a9a8 | ||
|
|
2a24eea3a5 | ||
|
|
8ad8297c0e | ||
|
|
0b94a14ac1 | ||
|
|
a04e0d2a9b | ||
|
|
3a1348c370 | ||
|
|
507818037f | ||
|
|
2c1f6daf4f | ||
|
|
fef79472fe | ||
|
|
885e3fa765 | ||
|
|
0b2c0e6451 | ||
|
|
15806b5f1f | ||
|
|
bf42cdf356 | ||
|
|
e21acd86db | ||
|
|
5dca1c9602 | ||
|
|
5274584d92 | ||
|
|
1d386c53a5 | ||
|
|
d6e351b195 | ||
|
|
ea32dc0b62 | ||
|
|
dca57bb19e | ||
|
|
43919f7f9c | ||
|
|
a176b51148 | ||
|
|
75ac5297df | ||
|
|
0ef2b99bd6 | ||
|
|
9596a476b5 | ||
|
|
92f52cada5 | ||
|
|
a482e852c5 | ||
|
|
e9055e5205 | ||
|
|
df2c40d9c1 | ||
|
|
fc4eeb47fa | ||
|
|
9fb3eaa611 | ||
|
|
23394ab5c2 | ||
|
|
5417b26417 | ||
|
|
b6d638d6c5 | ||
|
|
af1dd09e2d | ||
|
|
c42e56c68f | ||
|
|
561a007850 | ||
|
|
465c3c9acf | ||
|
|
6cee8691f5 | ||
|
|
cfb228de73 | ||
|
|
82a1a393de | ||
|
|
2fd1ffed19 | ||
|
|
7b00e1c54b | ||
|
|
bb2c5f076c | ||
|
|
8a9212def2 | ||
|
|
a9a5bd0066 | ||
|
|
f27b4a03e9 | ||
|
|
ce87285283 | ||
|
|
220c6cdd8b | ||
|
|
17440025b9 | ||
|
|
2655ae6041 | ||
|
|
a5d7b473a0 | ||
|
|
67a04c6cc6 | ||
|
|
c687ddbe57 | ||
|
|
980ff7da02 | ||
|
|
0f84a7cf6b | ||
|
|
51a93439bb | ||
|
|
18f115987b | ||
|
|
34faf56d5d | ||
|
|
d09a2df1e0 | ||
|
|
5349171913 | ||
|
|
e283d81fdf | ||
|
|
a606d6558c | ||
|
|
cc058388d0 | ||
|
|
4bbd170c1d | ||
|
|
c817716aa1 | ||
|
|
33f9b4a091 | ||
|
|
8d8e4405e0 | ||
|
|
ee302ee430 | ||
|
|
acbac54903 | ||
|
|
3858070cee | ||
|
|
ac5ace1f61 | ||
|
|
3d79a9217a | ||
|
|
4b6261517c | ||
|
|
d1960c68bb | ||
|
|
a8cc40e95d | ||
|
|
5c76f9ab1c | ||
|
|
a5d3c809aa | ||
|
|
3b905e6961 | ||
|
|
707547effc | ||
|
|
6b02350d96 | ||
|
|
7ff8094156 | ||
|
|
82c673c8a6 | ||
|
|
7f742d3a30 | ||
|
|
2442fc2483 | ||
|
|
e762cc29ef | ||
|
|
88db6767eb | ||
|
|
161251a943 | ||
|
|
6e4b1b68e3 | ||
|
|
a6212897b3 | ||
|
|
7b8a89e918 | ||
|
|
efd31c5f21 | ||
|
|
868bac9f1a | ||
|
|
adf18cc7ee | ||
|
|
3f1d1bc6d0 | ||
|
|
ce4cb96d9a | ||
|
|
4457e3957d | ||
|
|
c317f95953 | ||
|
|
2eda6c5fe1 | ||
|
|
1108216a50 | ||
|
|
b9215e944a | ||
|
|
a976171e3a | ||
|
|
b773afbe38 | ||
|
|
045e2c1d33 | ||
|
|
ad45f75267 | ||
|
|
643790d3bd | ||
|
|
a531d7e4e0 | ||
|
|
be065f919c | ||
|
|
8d5d44bf0d | ||
|
|
bbd8a6633e | ||
|
|
038e5d086b | ||
|
|
5422b181c0 | ||
|
|
d0e0515990 | ||
|
|
931dfa67fd | ||
|
|
af1ea5543e | ||
|
|
fd7a6edeb6 | ||
|
|
0a3409cfef | ||
|
|
89b2932495 | ||
|
|
3a05e43ce9 | ||
|
|
8b1d3cb170 | ||
|
|
90df5f45a8 | ||
|
|
ba4b4a69a7 | ||
|
|
e3d4ffa36d | ||
|
|
cdddd8de48 | ||
|
|
7f1429395c | ||
|
|
f598215d88 | ||
|
|
c92e6775cb | ||
|
|
2a5f812dba | ||
|
|
54905da782 | ||
|
|
5f30dd8ce9 | ||
|
|
547f57b99f | ||
|
|
bf336ca55a | ||
|
|
4716ac8c0a | ||
|
|
79a518edbc | ||
|
|
b72a3fea7f | ||
|
|
58603f17f4 | ||
|
|
99b5a01835 | ||
|
|
fd41c23128 | ||
|
|
3230c4b30b | ||
|
|
38507c8990 | ||
|
|
136098354b | ||
|
|
29fc9a3a2d | ||
|
|
0c7218571c | ||
|
|
fd4c2a38e7 | ||
|
|
f89dca5d77 | ||
|
|
acc7c49e0e | ||
|
|
7175965e3d | ||
|
|
3ec7d3530d | ||
|
|
01839512d5 | ||
|
|
d37958e5c8 | ||
|
|
bfbbb3466a | ||
|
|
775613374b | ||
|
|
44c8bd9a6a | ||
|
|
45e61b8bc7 | ||
|
|
4680640b0c | ||
|
|
897802b234 | ||
|
|
82b353c6d9 | ||
|
|
254d2ca896 | ||
|
|
5a531f0122 | ||
|
|
0afd87ab1b | ||
|
|
c1ab3b11f4 | ||
|
|
222fe0aeac | ||
|
|
ceb98d04bb | ||
|
|
b813ebdd96 | ||
|
|
4865259ae8 | ||
|
|
2616439f5f | ||
|
|
0eddac35fa |
@@ -5,3 +5,4 @@ Dockerfile
|
||||
.git
|
||||
src/logs
|
||||
/docs
|
||||
.github
|
||||
|
||||
49
.github/workflows/dockerhub.yml
vendored
Normal file
49
.github/workflows/dockerhub.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: Publish Docker image to Dockerhub
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'edge'
|
||||
tags:
|
||||
- '*.*.*'
|
||||
# don't trigger if just updating docs
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
|
||||
jobs:
|
||||
push_to_registry:
|
||||
name: Push Docker image to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: foxxmd/context-mod
|
||||
# generate Docker tags based on the following events/attributes
|
||||
tags: |
|
||||
type=raw,value=latest,enable=${{ endsWith(github.ref, 'master') }}
|
||||
type=ref,event=branch,enable=${{ !endsWith(github.ref, 'master') }}
|
||||
type=semver,pattern={{version}}
|
||||
flavor: |
|
||||
latest=false
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
243
README.md
243
README.md
@@ -1,10 +1,8 @@
|
||||
# reddit-context-bot
|
||||
|
||||
[](https://github.com/FoxxMD/reddit-context-bot/releases)
|
||||
[](https://github.com/FoxxMD/context-mod/releases)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://hub.docker.com/r/foxxmd/reddit-context-bot)
|
||||
[](https://hub.docker.com/r/foxxmd/context-mod)
|
||||
|
||||
**Context Bot** is an event-based, [reddit](https://reddit.com) moderation bot built on top of [snoowrap](https://github.com/not-an-aardvark/snoowrap) and written in [typescript](https://www.typescriptlang.org/).
|
||||
**Context Mod** (CM) is an event-based, [reddit](https://reddit.com) moderation bot built on top of [snoowrap](https://github.com/not-an-aardvark/snoowrap) and written in [typescript](https://www.typescriptlang.org/).
|
||||
|
||||
It is designed to help fill in the gaps for [automoderator](https://www.reddit.com/wiki/automoderator/full-documentation) in regard to more complex behavior with a focus on **user-history based moderation.**
|
||||
|
||||
@@ -17,29 +15,35 @@ An example of the above that Context Bot can do now:
|
||||
|
||||
Some feature highlights:
|
||||
* Simple rule-action behavior can be combined to create any level of complexity in behavior
|
||||
* One instance can handle managing many subreddits (as many as it has moderator permissions in!)
|
||||
* Per-subreddit configuration is handled by JSON stored in the subreddit wiki
|
||||
* Any text-based actions (comment, submission, message, usernotes, etc...) can be configured via a wiki page or raw text in JSON
|
||||
* All text-based actions support [mustache](https://mustache.github.io) templating
|
||||
* Server/client architecture
|
||||
* Default/no configuration runs "All In One" behavior
|
||||
* Additional configuration allows web interface to connect to multiple servers
|
||||
* Each server instance can run multiple reddit accounts as bots
|
||||
* **Per-subreddit configuration** is handled by JSON stored in the subreddit wiki
|
||||
* Any text-based actions (comment, submission, message, usernotes, ban, etc...) can be configured via a wiki page or raw text in JSON and support [mustache](https://mustache.github.io) [templating](/docs/actionTemplating.md)
|
||||
* History-based rules support multiple "valid window" types -- [ISO 8601 Durations](https://en.wikipedia.org/wiki/ISO_8601#Durations), [Day.js Durations](https://day.js.org/docs/en/durations/creating), and submission/comment count limits.
|
||||
* Checks/Rules support skipping behavior based on:
|
||||
* author criteria (name, css flair/text, moderator status, and [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes))
|
||||
* Support Activity skipping based on:
|
||||
* Author criteria (name, css flair/text, age, karma, moderator status, and [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes))
|
||||
* Activity state (removed, locked, distinguished, etc.)
|
||||
* Rules and Actions support named references so you write rules/actions once and reference them anywhere
|
||||
* User-configurable global/subreddit-level API caching with optional redis-backend
|
||||
* Rules and Actions support named references (write once, reference anywhere)
|
||||
* Global/subreddit-level **API caching**
|
||||
* Support for [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) as criteria or Actions (writing notes)
|
||||
* Docker container support
|
||||
* Event notification via Discord
|
||||
* **Web interface** for monitoring, administration, and oauth bot authentication
|
||||
|
||||
# Table of Contents
|
||||
|
||||
* [How It Works](#how-it-works)
|
||||
* [Installation](#installation)
|
||||
* [Configuration And Docs](#configuration)
|
||||
* [Usage](#usage)
|
||||
* [Getting Started](#getting-started)
|
||||
* [Configuration And Documentation](#configuration-and-documentation)
|
||||
* [Web UI and Screenshots](#web-ui-and-screenshots)
|
||||
|
||||
### How It Works
|
||||
|
||||
Context Bot's configuration is made up of a list of **Checks**. Each **Check** consists of :
|
||||
Each subreddit using the RCB bot configures its behavior via their own wiki page.
|
||||
|
||||
When a monitored **Event** (new comment/submission, new modqueue item, etc.) is detected the bot runs through a list of **Checks** to determine what to do with the **Activity** from that Event. Each **Check** consists of :
|
||||
|
||||
#### Kind
|
||||
|
||||
@@ -47,202 +51,87 @@ Is this check for a submission or comment?
|
||||
|
||||
#### Rules
|
||||
|
||||
A list of **Rule** objects to run against the activity. Triggered Rules can cause the whole Check to trigger and run its **Actions**
|
||||
A list of **Rule** objects to run against the **Activity**. Triggered Rules can cause the whole Check to trigger and run its **Actions**
|
||||
|
||||
#### Actions
|
||||
|
||||
A list of **Action** objects that describe what the bot should do with the activity or author of the activity. The bot will run **all** Actions in this list.
|
||||
A list of **Action** objects that describe what the bot should do with the **Activity** or **Author** of the activity (comment, remove, approve, etc.). The bot will run **all** Actions in this list.
|
||||
|
||||
___
|
||||
|
||||
The **Checks** for a subreddit are split up into **Submission Checks** and **Comment Checks** based on their **kind**. Each list of checks is run independently based on when events happen (submission or comment).
|
||||
|
||||
When an event occurs all Checks of that type are run in the order they were listed in the configuration. When one check is triggered (an action is performed) the remaining checks will not be run.
|
||||
When an Event occurs all Checks of that type are run in the order they were listed in the configuration. When one check is triggered (an Action is performed) the remaining checks will not be run.
|
||||
|
||||
## Installation
|
||||
___
|
||||
|
||||
[Learn more about the RCB lifecycle and core concepts in the docs.](/docs#how-it-works)
|
||||
|
||||
### Locally
|
||||
## Getting Started
|
||||
|
||||
Clone this repository somewhere and then install from the working directory
|
||||
#### Operators
|
||||
|
||||
```bash
|
||||
git clone https://github.com/FoxxMD/reddit-context-bot.git .
|
||||
cd reddit-context-bot
|
||||
npm install
|
||||
```
|
||||
This guide is for users who want to **run their own bot on a ContextMod instance.**
|
||||
|
||||
### [Docker](https://hub.docker.com/r/foxxmd/reddit-context-bot)
|
||||
See the [Operator's Getting Started Guide](/docs/gettingStartedOperator.md)
|
||||
|
||||
```
|
||||
foxxmd/reddit-context-bot:latest
|
||||
```
|
||||
#### Moderators
|
||||
|
||||
Adding [**environmental variables**](#usage) to your `docker run` command will pass them through to the app EX:
|
||||
```
|
||||
docker run -e "CLIENT_ID=myId" ... foxxmd/reddit-context-bot
|
||||
```
|
||||
This guide is for **reddit moderators** who want to configure an existing CM bot to run on their subreddit.
|
||||
|
||||
### [Heroku Quick Deploy](https://heroku.com/about)
|
||||
[](https://dashboard.heroku.com/new?template=https://github.com/FoxxMD/reddit-context-bot)
|
||||
See the [Moderator's Getting Started Guide](/docs/gettingStartedMod.md)
|
||||
|
||||
## Configuration and Documentation
|
||||
|
||||
## Configuration
|
||||
Context Bot's configuration can be written in JSON, [JSON5](https://json5.org/) or YAML. Its schema conforms to [JSON Schema Draft 7](https://json-schema.org/). Additionally, many **operator** settings can be passed via command line or environmental variables.
|
||||
|
||||
[**Check the docs for in-depth explanations of all concepts and examples**](/docs)
|
||||
* For **operators** (running the bot instance) see the [Operator Configuration](/docs/operatorConfiguration.md) guide
|
||||
* For **moderators** consult the [app schema and examples folder](/docs/#configuration-and-usage)
|
||||
|
||||
Context Bot's configuration can be written in JSON, [JSON5](https://json5.org/) or YAML. It's [schema](/src/Schema/App.json) conforms to [JSON Schema Draft 7](https://json-schema.org/).
|
||||
[**Check the full docs for in-depth explanations of all concepts and examples**](/docs)
|
||||
|
||||
I suggest using [Atlassian JSON Schema Viewer](https://json-schema.app/start) ([direct link](https://json-schema.app/view/%23?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json)) so you can view all documentation while also interactively writing and validating your config! From there you can drill down into any object, see its requirements, view an example JSON document, and live-edit your configuration on the right-hand side.
|
||||
## Web UI and Screenshots
|
||||
|
||||
### Action Templating
|
||||
### Dashboard
|
||||
|
||||
Actions that can submit text (Report, Comment) will have their `content` values run through a [Mustache Template](https://mustache.github.io/). This means you can insert data generated by Rules into your text before the Action is performed.
|
||||
CM comes equipped with a dashboard designed for use by both moderators and bot operators.
|
||||
|
||||
See here for a [cheatsheet](https://gist.github.com/FoxxMD/d365707cf99fdb526a504b8b833a5b78) and [here](https://www.tsmean.com/articles/mustache/the-ultimate-mustache-tutorial/) for a more thorough tutorial.
|
||||
* Authentication via Reddit OAuth -- only accessible if you are the bot operator or a moderator of a subreddit the bot moderates
|
||||
* Connect to multiple ContextMod instances (specified in configuration)
|
||||
* Monitor API usage/rates
|
||||
* Monitoring and administration **per subreddit:**
|
||||
* Start/stop/pause various bot components
|
||||
* View statistics on bot usage (# of events, checks run, actions performed) and cache usage
|
||||
* View various parts of your subreddit's configuration and manually update configuration
|
||||
* View **real-time logs** of what the bot is doing on your subreddit
|
||||
* **Run bot on any permalink**
|
||||
|
||||
All Actions with `content` have access to this data:
|
||||

|
||||
|
||||
```json5
|
||||
{
|
||||
item: {
|
||||
kind: 'string', // the type of item (comment/submission)
|
||||
author: 'string', // name of the item author (reddit user)
|
||||
permalink: 'string', // a url to the item
|
||||
url: 'string', // if the item is a Submission then its URL (external for link type submission, reddit link for self-posts)
|
||||
title: 'string', // if the item is a Submission, then the title of the Submission,
|
||||
botLink: 'string' // a link to the bot's FAQ
|
||||
},
|
||||
rules: {
|
||||
// contains all rules that were run and are accessible using the name, lowercased, with all spaces/dashes/underscores removed
|
||||
}
|
||||
}
|
||||
### Bot Setup/Authentication
|
||||
|
||||
```
|
||||
A bot oauth helper allows operators to define oauth credentials/permissions and then generate unique, one-time invite links that allow moderators to authenticate their own bots without operator assistance. [Learn more about using the oauth helper.](docs/botAuthentication.md#cm-oauth-helper-recommended)
|
||||
|
||||
The properties of `rules` are accessible using the name, lower-cased, with all spaces/dashes/underscores. If no name is given `kind` is used as `name` Example:
|
||||
Operator view/invite link generation:
|
||||
|
||||
```
|
||||
"rules": [
|
||||
{
|
||||
"name": "My Custom-Recent Activity Rule", // mycustomrecentactivityrule
|
||||
"kind": "recentActivity"
|
||||
},
|
||||
{
|
||||
// name = repeatsubmission
|
||||
"kind": "repeatActivity",
|
||||
}
|
||||
]
|
||||
```
|
||||

|
||||
|
||||
**To see what data is available for individual Rules [consult the schema](#configuration) for each Rule.**
|
||||
Moderator view/invite and authorization:
|
||||
|
||||
#### Quick Templating Tutorial
|
||||

|
||||
|
||||
<details>
|
||||
### Configuration Editor
|
||||
|
||||
As a quick example for how you will most likely be using templating -- wrapping a variable in curly brackets, `{{variable}}`, will cause the variable value to be rendered instead of the brackets:
|
||||
```
|
||||
myVariable = 50;
|
||||
myOtherVariable = "a text fragment"
|
||||
template = "This is my template, the variable is {{myVariable}}, my other variable is {{myOtherVariable}}, and that's it!";
|
||||
A built-in editor using [monaco-editor](https://microsoft.github.io/monaco-editor/) makes editing configurations easy:
|
||||
|
||||
console.log(Mustache.render(template, {myVariable});
|
||||
// will render...
|
||||
"This is my template, the variable is 50, my other variable is a text fragment, and that's it!";
|
||||
```
|
||||
* Automatic JSON syntax validation and formatting
|
||||
* Automatic Schema (subreddit or operator) validation
|
||||
* All properties are annotated via hover popups
|
||||
* Unauthenticated view via `yourdomain.com/config`
|
||||
* Authenticated view loads subreddit configurations by simple link found on the subreddit dashboard
|
||||
* Switch schemas to edit either subreddit or operator configurations
|
||||
|
||||
**Note: When accessing an object or its properties you must use dot notation**
|
||||
```
|
||||
const item = {
|
||||
aProperty: 'something',
|
||||
anotherObject: {
|
||||
bProperty: 'something else'
|
||||
}
|
||||
}
|
||||
const content = "My content will render the property {{item.aProperty}} like this, and another nested property {{item.anotherObject.bProperty}} like this."
|
||||
```
|
||||
</details>
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
Usage: index [options] [command]
|
||||
|
||||
Options:
|
||||
-h, --help display help for command
|
||||
|
||||
Commands:
|
||||
run [options] [interface] Monitor new activities from configured subreddits.
|
||||
check [options] <activityIdentifier> [type] Run check(s) on a specific activity
|
||||
unmoderated [options] <subreddits...> Run checks on all unmoderated activity in the modqueue
|
||||
help [command] display help for command
|
||||
|
||||
|
||||
Options:
|
||||
-c, --operatorConfig <path> An absolute path to a JSON file to load all parameters from (default: process.env.OPERATOR_CONFIG)
|
||||
-i, --clientId <id> Client ID for your Reddit application (default: process.env.CLIENT_ID)
|
||||
-e, --clientSecret <secret> Client Secret for your Reddit application (default: process.env.CLIENT_SECRET)
|
||||
-a, --accessToken <token> Access token retrieved from authenticating an account with your Reddit Application (default: process.env.ACCESS_TOKEN)
|
||||
-r, --refreshToken <token> Refresh token retrieved from authenticating an account with your Reddit Application (default: process.env.REFRESH_TOKEN)
|
||||
-u, --redirectUri <uri> Redirect URI for your Reddit application (default: process.env.REDIRECT_URI)
|
||||
-t, --sessionSecret <secret> Secret use to encrypt session id/data (default: process.env.SESSION_SECRET || a random string)
|
||||
-s, --subreddits <list...> List of subreddits to run on. Bot will run on all subs it has access to if not defined (default: process.env.SUBREDDITS)
|
||||
-d, --logDir [dir] Absolute path to directory to store rotated logs in. Leaving undefined disables rotating logs (default: process.env.LOG_DIR)
|
||||
-l, --logLevel <level> Minimum level to log at (default: process.env.LOG_LEVEL || verbose)
|
||||
-w, --wikiConfig <path> Relative url to contextbot wiki page EX https://reddit.com/r/subreddit/wiki/<path> (default: process.env.WIKI_CONFIG || 'botconfig/contextbot')
|
||||
--snooDebug Set Snoowrap to debug. If undefined will be on if logLevel='debug' (default: process.env.SNOO_DEBUG)
|
||||
--authorTTL <ms> Set the TTL (ms) for the Author Activities shared cache (default: process.env.AUTHOR_TTL || 60000)
|
||||
--heartbeat <s> Interval, in seconds, between heartbeat checks. (default: process.env.HEARTBEAT || 300)
|
||||
--softLimit <limit> When API limit remaining (600/10min) is lower than this subreddits will have SLOW MODE enabled (default: process.env.SOFT_LIMIT || 250)
|
||||
--hardLimit <limit> When API limit remaining (600/10min) is lower than this all subreddit polling will be paused until api limit reset (default: process.env.SOFT_LIMIT || 250)
|
||||
--dryRun Set all subreddits in dry run mode, overriding configurations (default: process.env.DRYRUN || false)
|
||||
--proxy <proxyEndpoint> Proxy Snoowrap requests through this endpoint (default: process.env.PROXY)
|
||||
--operator <name> Username of the reddit user operating this application, used for displaying OP level info/actions in UI (default: process.env.OPERATOR)
|
||||
--operatorDisplay <name> An optional name to display who is operating this application in the UI (default: process.env.OPERATOR_DISPLAY || Anonymous)
|
||||
-p, --port <port> Port for web server to listen on (default: process.env.PORT || 8085)
|
||||
-q, --shareMod If enabled then all subreddits using the default settings to poll "unmoderated" or "modqueue" will retrieve results from a shared request to /r/mod (default: process.env.SHARE_MOD || false)
|
||||
-h, --help display help for command
|
||||
```
|
||||
|
||||
### Logging
|
||||
|
||||
### Reddit App??
|
||||
|
||||
To use this bot you must do two things:
|
||||
* Create a reddit application
|
||||
* Authenticate that application to act as a user (login to the application with an account)
|
||||
|
||||
#### Create Application
|
||||
|
||||
Visit [your reddit preferences](https://www.reddit.com/prefs/apps) and at the bottom of the page go through the **create an(other) app** process.
|
||||
* Choose **script**
|
||||
* For redirect uri use https://not-an-aardvark.github.io/reddit-oauth-helper/
|
||||
* Write down your **Client ID** and **Client Secret** somewhere
|
||||
|
||||
#### Authenticate an Account
|
||||
|
||||
Visit https://not-an-aardvark.github.io/reddit-oauth-helper/
|
||||
* Input your **Client ID** and **Client Secret** in the text boxes with those names.
|
||||
* Choose scopes. **It is very important you check everything on this list or Context Bot will not work correctly**
|
||||
* edit
|
||||
* flair
|
||||
* history
|
||||
* identity
|
||||
* modcontributors
|
||||
* modflair
|
||||
* modposts
|
||||
* modself
|
||||
* mysubreddits
|
||||
* read
|
||||
* report
|
||||
* submit
|
||||
* wikiread
|
||||
* wikiedit (if you are using Toolbox User Notes)
|
||||
* Click **Generate tokens**, you will get a popup asking you to approve access (or login) -- **the account you approve access with is the account that Bot will control.**
|
||||
* After approving an **Access Token** and **Refresh Token** will be shown at the bottom of the page. Write these down.
|
||||
|
||||
You should now have all the information you need to start the bot.
|
||||

|
||||
|
||||
## License
|
||||
|
||||
|
||||
2
app.json
2
app.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "Reddit Context Bot",
|
||||
"description": "An event-based, reddit moderation bot built on top of snoowrap and written in typescript",
|
||||
"repository": "https://github.com/FoxxMD/reddit-context-bot",
|
||||
"repository": "https://github.com/FoxxMD/context-mod",
|
||||
"stack": "container",
|
||||
"env": {
|
||||
"CLIENT_ID": {
|
||||
|
||||
186
docs/README.md
186
docs/README.md
@@ -5,6 +5,7 @@
|
||||
* [Getting Started](#getting-started)
|
||||
* [How It Works](#how-it-works)
|
||||
* [Concepts](#concepts)
|
||||
* [Check](#checks)
|
||||
* [Rule](#rule)
|
||||
* [Examples](#available-rules)
|
||||
* [Rule Set](#rule-set)
|
||||
@@ -12,29 +13,35 @@
|
||||
* [Action](#action)
|
||||
* [Examples](#available-actions)
|
||||
* [Filters](#filters)
|
||||
* [Configuration](#configuration)
|
||||
* [Configuration and Usage](#configuration-and-usage)
|
||||
* [Common Resources](#common-resources)
|
||||
* [Activities `window`](#activities-window)
|
||||
* [Comparisons](#thresholds-and-comparisons)
|
||||
* [Activity Templating](/docs/actionTemplating.md)
|
||||
* [Best Practices](#best-practices)
|
||||
* [Subreddit-ready Configurations](#subreddit-ready-configurations)
|
||||
* [Named Rules](#named-rules)
|
||||
* [Rule Order](#rule-order)
|
||||
* [Caching](#caching)
|
||||
* FAQ
|
||||
|
||||
## Getting Started
|
||||
|
||||
Review **at least** the **How It Works** and **Concepts** below and then head to the [**Getting Started documentation.**](/docs/gettingStarted.md)
|
||||
Review **at least** the **How It Works** and **Concepts** below, then:
|
||||
|
||||
* For **Operators** (running a bot instance) refer to [**Operator Getting Started**](/docs/gettingStartedOperator.md) guide
|
||||
* For **Moderators** (configuring an existing bot for your subreddit) refer to the [**Moderator Getting Started**](/docs/gettingStartedMod.md) guide
|
||||
|
||||
## How It Works
|
||||
|
||||
Where possible Reddit Context Bot (RCB) uses the same terminology as, and emulates the behavior, of **automoderator** so if you are familiar with that much of this may seem familiar to you.
|
||||
Where possible Context Mod (CM) uses the same terminology as, and emulates the behavior, of **automoderator** so if you are familiar with that much of this may seem familiar to you.
|
||||
|
||||
RCB's lifecycle looks like this:
|
||||
CM's lifecycle looks like this:
|
||||
|
||||
#### 1) A new event in your subreddit is received by RCB
|
||||
#### 1) A new event in your subreddit is received by CM
|
||||
|
||||
The events RCB watches for are configured by you. These can be new modqueue items, submissions, or comments.
|
||||
The events CM watches for are configured by you. These can be new modqueue/unmoderated items, submissions, or comments.
|
||||
|
||||
#### 2) RCB sequentially processes each Check in your configuration
|
||||
#### 2) CM sequentially processes each Check in your configuration
|
||||
|
||||
A **Check** is a set of:
|
||||
|
||||
@@ -47,16 +54,32 @@ Once a Check is **triggered** no more Checks will be processed. This means all s
|
||||
|
||||
#### 4) All Actions from that Check are executed
|
||||
|
||||
After all Actions are executed RCB returns to waiting for the next Event.
|
||||
After all Actions are executed CM returns to waiting for the next Event.
|
||||
|
||||
## Concepts
|
||||
|
||||
Core, high-level concepts regarding how RCB works.
|
||||
Core, high-level concepts regarding how CM works.
|
||||
|
||||
### Checks
|
||||
|
||||
TODO
|
||||
A **Check** is the main logical unit of behavior for the bot. It is equivalent to "if X then Y". A Check is comprised of:
|
||||
|
||||
* One or more **Rules** that are tested against an **Activity**
|
||||
* One of more **Actions** that are performed when the **Rules** are satisfied
|
||||
|
||||
The bot's configuration can be made up of one or more **Checks** that are processed **in the order they are listed in the configuration.**
|
||||
|
||||
Once a Check is **triggered** (its Rules are satisfied and Actions performed) all subsequent Checks are skipped.
|
||||
|
||||
Some other important concepts regarding Checks:
|
||||
|
||||
* All Checks have a **kind** (defined in the configuration) that determine if they should run on **Submissions** or **Comments**
|
||||
* Checks have a **condition** property that determines when they are considered **triggered**
|
||||
* If the **condition** is `AND` then ALL of their **Rules** must be **triggered** for the Check to be **triggered**
|
||||
* If the **condition** is `OR` then if ANY **Rules** is triggered **triggered** then the Check is **triggered**
|
||||
|
||||
Examples of different types of Checks can be found in the [subreddit-ready examples.](/docs/examples/subredditReady)
|
||||
|
||||
### Rule
|
||||
|
||||
A **Rule** is some set of **criteria** (conditions) that are tested against an Activity (comment/submission), a User, or a User's history. A Rule is considered **triggered** when the **criteria** for that rule are found to be **true** for whatever is being tested against.
|
||||
@@ -67,7 +90,7 @@ There are generally three main properties for a Rule:
|
||||
* **Activities Window** -- If applicable, the range of activities that the **criteria** will be tested against.
|
||||
* **Rule-specific options** -- Any number of options that modify how the **criteria** are tested.
|
||||
|
||||
RCB has different **Rules** that can test against different types of behavior and aspects of a User, their history, and the Activity (submission/common) being checked.
|
||||
CM has different **Rules** that can test against different types of behavior and aspects of a User, their history, and the Activity (submission/common) being checked.
|
||||
|
||||
#### Available Rules
|
||||
Find detailed descriptions of all the Rules, with examples, below:
|
||||
@@ -77,6 +100,7 @@ Find detailed descriptions of all the Rules, with examples, below:
|
||||
* [Repeat Activity](/docs/examples/repeatActivity)
|
||||
* [History](/docs/examples/history)
|
||||
* [Author](/docs/examples/author)
|
||||
* [Regex](/docs/examples/regex)
|
||||
|
||||
### Rule Set
|
||||
|
||||
@@ -108,9 +132,9 @@ Example
|
||||
|
||||
### Action
|
||||
|
||||
An **Action** is some action the bot can take against the checked Activity (comment/submission) or Author of the Activity. RCB has Actions for most things a normal reddit user or moderator can do.
|
||||
An **Action** is some action the bot can take against the checked Activity (comment/submission) or Author of the Activity. CM has Actions for most things a normal reddit user or moderator can do.
|
||||
|
||||
### Available Actions
|
||||
#### Available Actions
|
||||
|
||||
* Remove (Comment/Submission)
|
||||
* Flair (Submission)
|
||||
@@ -121,20 +145,74 @@ An **Action** is some action the bot can take against the checked Activity (comm
|
||||
* Report (Comment/Submission)
|
||||
* [UserNote](/docs/examples/userNotes) (User, when /r/Toolbox is used)
|
||||
|
||||
For detailed explanation and options of what individual Actions can do [see the links in the `actions` property in the schema.](https://json-schema.app/view/%23/%23%2Fdefinitions%2FSubmissionCheckJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json)
|
||||
For detailed explanation and options of what individual Actions can do [see the links in the `actions` property in the schema.](https://json-schema.app/view/%23/%23%2Fdefinitions%2FSubmissionCheckJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json)
|
||||
|
||||
### Filters
|
||||
|
||||
TODO
|
||||
**Checks, Rules, and Actions** all have two additional (optional) criteria "tests". These tests behave differently than rule/check triggers in that:
|
||||
|
||||
## Configuration
|
||||
* When they **pass** the thing being tested continues to process as usual
|
||||
* When they **fail** the thing being tested **is skipped, not failed.**
|
||||
|
||||
For **Checks** and **Actions** skipping means that the thing is not processed. The Action is not run, the Check is not triggered.
|
||||
|
||||
In the context of **Rules** (in a Check) skipping means the Rule does not get run BUT it does not fail. The Check will continue processing as if the Rule did not exist. However, if ALL Rules in a Check are skipped then the Check does "fail" (is not triggered).
|
||||
|
||||
#### Available Filters
|
||||
|
||||
##### Item Filter (`itemIs`)
|
||||
|
||||
This filter will test against the **state of the Activity currently being run.** Some criteria available to test against IE "Is the activity...":
|
||||
|
||||
* removed
|
||||
* nsfw
|
||||
* locked
|
||||
* stickied
|
||||
* deleted
|
||||
* etc...
|
||||
|
||||
The `itemIs` filter is made up of an array (list) of `State` criteria objects. **All** criteria in the array must pass for this filter to pass.
|
||||
|
||||
There are two different State criteria depending on what type of Activity is being tested:
|
||||
|
||||
* Submission -- [SubmissionState](https://json-schema.app/view/%23/%23%2Fdefinitions%2FSubmissionCheckJson/%23%2Fdefinitions%2FSubmissionState?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json)
|
||||
* Comment -- [CommentState](https://json-schema.app/view/%23/%23%2Fdefinitions%2FCommentCheckJson/%23%2Fdefinitions%2FCommentState?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json)
|
||||
|
||||
##### Author Filter (`authorIs`)
|
||||
|
||||
This filter will test against the **Author of the Activity currently being run.** Some criteria available to test against:
|
||||
|
||||
* account age
|
||||
* comment, link, and total karma
|
||||
* subreddit flair text/css
|
||||
* name
|
||||
* User Notes
|
||||
* verified
|
||||
* etc...
|
||||
|
||||
The `authorIs` filter is made up two (optional) lists of [`AuthorCriteria`](https://json-schema.app/view/%23/%23%2Fdefinitions%2FSubmissionCheckJson/%23%2Fdefinitions%2FAuthorOptions/%23%2Fdefinitions%2FAuthorCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) criteria objects that define how the test behaves:
|
||||
|
||||
* `include` list -- If **any** `AuthorCriteria` from this list passes then the `authorIs` test passes
|
||||
* `exclude` list -- If **any** `AuthorCriteria` from this list **does not pass** then the `authorIs` test passes. **Note:** This property is ignored if `include` is also present IE you cannot use both properties at the same time.
|
||||
|
||||
Refer to the [app schema for `AuthorCriteria`](https://json-schema.app/view/%23/%23%2Fdefinitions%2FSubmissionCheckJson/%23%2Fdefinitions%2FAuthorOptions/%23%2Fdefinitions%2FAuthorCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) for all available properties to test against.
|
||||
|
||||
Some examples of using `authorIs` can be found in the [Author examples.](/docs/examples/author)
|
||||
|
||||
## Configuration And Usage
|
||||
|
||||
* For **Operator/Bot maintainers** see **[Operation Configuration](/docs/operatorConfiguration.md)**
|
||||
* For **Moderators** see the [App Schema](https://json-schema.app/view/%23?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) and [examples](/docs/examples)
|
||||
* [CLI Usage](docs/operatorConfiguration.md#cli-usage)
|
||||
* For **Moderators**
|
||||
* Refer to the [examples folder](/docs/examples) or the [subreddit-ready examples](/docs/examples/subredditReady)
|
||||
* as well as the [schema editor](https://json-schema.app/view/%23?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) which has
|
||||
* fully annotated configuration data/structure
|
||||
* generated examples in json/yaml
|
||||
* built-in editor that automatically validates your config
|
||||
|
||||
## Common Resources
|
||||
|
||||
Technical information on recurring, common data/patterns used in RCB.
|
||||
Technical information on recurring, common data/patterns used in CM.
|
||||
|
||||
### Activities `window`
|
||||
|
||||
@@ -144,7 +222,51 @@ Refer to the [Activities Window](/docs/activitiesWindow.md) documentation for a
|
||||
|
||||
### Thresholds and Comparisons
|
||||
|
||||
TODO
|
||||
Most rules/filters have criteria that require you to define a specific condition to test against. This can be anything from repeats of activities to account age.
|
||||
|
||||
In all of these scenarios the condition is defined using a subset of [comparison operators](https://www.codecademy.com/articles/fwd-js-comparison-logical) (very similar to how automoderator does things).
|
||||
|
||||
Available operators:
|
||||
|
||||
* `<` -- **less than** => `5 < 6` => 5 is less than 6
|
||||
* `>` -- **greater than** => `6 > 5` => 6 is greater than 5
|
||||
* `<=` -- **less than or equal to** => `5 <= 5` => 5 is less than **or equal to** 5
|
||||
* `>=` -- **greater than or equal to** => `5 >= 5` => 5 is greater than **or equal to** 5
|
||||
|
||||
In the context of a rule/filter comparison you provide the comparison **omitting** the value that is being tested. An example...
|
||||
|
||||
The RepeatActivity rule has a `threshold` comparison to test against the number of repeat activities it finds
|
||||
|
||||
* You want the rule to trigger if it finds **4 or more repeat activities**
|
||||
* The rule would be configured like this `"threshold": ">= 4"`
|
||||
|
||||
Essentially what this is telling the rule is `threshold: "x >= 4"` where `x` is the largest repeat of activities it finds.
|
||||
|
||||
#### Other Comparison Types
|
||||
|
||||
Other than comparison numeric values there are two other values that can be compared (depending on the criteria)
|
||||
|
||||
##### Percentages
|
||||
|
||||
Some criteria accept an optional **percentage** to compare against:
|
||||
|
||||
```
|
||||
"threshold": "> 20%"
|
||||
```
|
||||
|
||||
Refer to the individual rule/criteria schema to see what this percentage is comparing against.
|
||||
|
||||
##### Durations
|
||||
|
||||
Some criteria accept an optional **duration** to compare against:
|
||||
|
||||
```
|
||||
"threshold": "< 1 month"
|
||||
```
|
||||
|
||||
The duration value compares a time range from **now** to `duration value` time in the past.
|
||||
|
||||
Refer to [duration values in activity window documentation](/docs/activitiesWindow.md#duration-values) as well as the individual rule/criteria schema to see what this duration is comparing against.
|
||||
|
||||
## Best Practices
|
||||
|
||||
@@ -152,7 +274,7 @@ TODO
|
||||
|
||||
All **Rules** in a subreddit's configuration can be assigned a **name** that can then be referenced from any other Check.
|
||||
|
||||
Create general-use rules so they can be reused and de-clutter your configuration. Additionally RCB will automatically cache the result of a rule so there is a performance and api usage benefit to re-using Rules.
|
||||
Create general-use rules so they can be reused and de-clutter your configuration. Additionally, CM will automatically cache the result of a rule so there is a performance and api usage benefit to re-using Rules.
|
||||
|
||||
See [ruleNameReuse.json5](/docs/examples/advancedConcepts/ruleNameReuse.json5) for a detailed configuration with annotations.
|
||||
|
||||
@@ -189,22 +311,26 @@ If the Check is using `AND` condition for its rules (default) then if either Rul
|
||||
|
||||
**It is therefore advantageous to list your lightweight Rules first in each Check.**
|
||||
|
||||
### API Caching
|
||||
### Caching
|
||||
|
||||
Context bot implements some basic caching functionality for **Author Activities** and wiki pages (on Comment/Report Actions).
|
||||
ContextMod implements caching functionality for:
|
||||
|
||||
**Author Activities** are cached for a subreddit-configurable amount of time (10 seconds by default). A cached activities set can be re-used if the **window on a Rule is identical to the window on another Rule**.
|
||||
* author history (`window` criteria in rules)
|
||||
* `authorIs` results
|
||||
* `content` that uses wiki pages (on Comment/Report/Ban Actions)
|
||||
* and User Notes
|
||||
|
||||
This means that when possible you should re-use window values.
|
||||
All of these use api requests so caching them reduces api usage.
|
||||
|
||||
IE If you want to check an Author's Activities for a time range try to always use **7 Days** or always use **50 Items** for absolute counts.
|
||||
Cached results can be re-used if the criteria in configuration is identical to a previously cached result. So...
|
||||
|
||||
* author history cache results are re-used if **`window` criteria on a Rule is identical to the `window` on another Rule** IE always use **7 Days** or always use **50 Items** for absolute counts.
|
||||
* `authorIs` criteria is identical to another `authorIs` elsewhere in configuration..
|
||||
* etc...
|
||||
|
||||
Re-use will result in less API calls and faster Check times.
|
||||
|
||||
|
||||
## Subreddit-ready Configurations
|
||||
|
||||
TODO
|
||||
PROTIP: You can monitor the re-use of cache in the `Cache` section of your subreddit on the web interface. See the tooltips in that section for a better breakdown of cache statistics.
|
||||
|
||||
## FAQ
|
||||
|
||||
|
||||
72
docs/actionTemplating.md
Normal file
72
docs/actionTemplating.md
Normal file
@@ -0,0 +1,72 @@
|
||||
Actions that can submit text (Report, Comment) will have their `content` values run through a [Mustache Template](https://mustache.github.io/). This means you can insert data generated by Rules into your text before the Action is performed.
|
||||
|
||||
See here for a [cheatsheet](https://gist.github.com/FoxxMD/d365707cf99fdb526a504b8b833a5b78) and [here](https://www.tsmean.com/articles/mustache/the-ultimate-mustache-tutorial/) for a more thorough tutorial.
|
||||
|
||||
All Actions with `content` have access to this data:
|
||||
|
||||
```json5
|
||||
|
||||
{
|
||||
item: {
|
||||
kind: 'string', // the type of item (comment/submission)
|
||||
author: 'string', // name of the item author (reddit user)
|
||||
permalink: 'string', // a url to the item
|
||||
url: 'string', // if the item is a Submission then its URL (external for link type submission, reddit link for self-posts)
|
||||
title: 'string', // if the item is a Submission, then the title of the Submission,
|
||||
botLink: 'string' // a link to the bot's FAQ
|
||||
},
|
||||
rules: {
|
||||
// contains all rules that were run and are accessible using the name, lowercased, with all spaces/dashes/underscores removed
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
The properties of `rules` are accessible using the name, lower-cased, with all spaces/dashes/underscores. If no name is given `kind` is used as `name` Example:
|
||||
|
||||
```
|
||||
|
||||
"rules": [
|
||||
{
|
||||
"name": "My Custom-Recent Activity Rule", // mycustomrecentactivityrule
|
||||
"kind": "recentActivity"
|
||||
},
|
||||
{
|
||||
// name = repeatsubmission
|
||||
"kind": "repeatActivity",
|
||||
}
|
||||
]
|
||||
|
||||
```
|
||||
|
||||
**To see what data is available for individual Rules [consult the schema](#configuration) for each Rule.**
|
||||
|
||||
#### Quick Templating Tutorial
|
||||
|
||||
As a quick example for how you will most likely be using templating -- wrapping a variable in curly brackets, `{{variable}}`, will cause the variable value to be rendered instead of the brackets:
|
||||
|
||||
```
|
||||
|
||||
myVariable = 50;
|
||||
myOtherVariable = "a text fragment"
|
||||
template = "This is my template, the variable is {{myVariable}}, my other variable is {{myOtherVariable}}, and that's it!";
|
||||
|
||||
console.log(Mustache.render(template, {myVariable});
|
||||
// will render...
|
||||
"This is my template, the variable is 50, my other variable is a text fragment, and that's it!";
|
||||
|
||||
```
|
||||
|
||||
**Note: When accessing an object or its properties you must use dot notation**
|
||||
|
||||
```
|
||||
|
||||
const item = {
|
||||
aProperty: 'something',
|
||||
anotherObject: {
|
||||
bProperty: 'something else'
|
||||
}
|
||||
}
|
||||
const content = "My content will render the property {{item.aProperty}} like this, and another nested property {{item.anotherObject.bProperty}} like this."
|
||||
|
||||
```
|
||||
109
docs/botAuthentication.md
Normal file
109
docs/botAuthentication.md
Normal file
@@ -0,0 +1,109 @@
|
||||
**Note:** This is for **bot operators.** If you are a subreddit moderator check out the **[Getting Started Guide](/docs/gettingStartedMod.md)**
|
||||
|
||||
Before you can start using your bot on reddit there are a few steps you must take:
|
||||
|
||||
* Create your bot account IE the reddit account that will be the "bot"
|
||||
* Create a Reddit application
|
||||
* Authenticate your bot account with the application
|
||||
|
||||
At the end of this process you will have this info:
|
||||
|
||||
* clientId
|
||||
* clientSecret
|
||||
* refreshToken
|
||||
* accessToken
|
||||
* redirectUri
|
||||
|
||||
**Note:** If you already have this information you can skip this guide **but make sure your redirect uri is correct if you plan on using the web interface.**
|
||||
|
||||
# Table Of Contents
|
||||
|
||||
* [Creating an Application](#create-application)
|
||||
* [Authenticate Your Bot](#authenticate-your-bot-account)
|
||||
* [Using CM OAuth Helper](#cm-oauth-helper-recommended)
|
||||
* [Using Aardvark OAuth Helper](#aardvark-oauth-helper)
|
||||
* [Provide Credentials to CM](#provide-credentials-to-cm)
|
||||
|
||||
# Create Application
|
||||
|
||||
Visit [your reddit preferences](https://www.reddit.com/prefs/apps) and at the bottom of the page go through the **create an(other) app** process.
|
||||
* Give it a **name**
|
||||
* Choose **web app**
|
||||
* If you know what you will use for **redirect uri** go ahead and use it, otherwise use **http://localhost:8085/callback**
|
||||
|
||||
Click **create app**.
|
||||
|
||||
Then write down your **Client ID, Client Secret, and Redirect Uri** somewhere (or keep this webpage open)
|
||||
|
||||
# Authenticate Your Bot Account
|
||||
|
||||
There are **two ways** you can authenticate your bot account. It is recommended to use the CM oauth helper.
|
||||
|
||||
## CM OAuth Helper (Recommended)
|
||||
|
||||
This method will use CM's built in oauth flow. It is recommended because it will ensure your bot is authenticated with the correct oauth permissions.
|
||||
|
||||
### Start CM with Client ID/Secret and Operator
|
||||
|
||||
Start the application and provide these to your configuration:
|
||||
|
||||
* **Client ID**
|
||||
* **Client Secret**
|
||||
* **Redirect URI**
|
||||
* **Operator**
|
||||
|
||||
It is important you define **Operator** because the auth route is **protected.** You must login to the application in order to access the route.
|
||||
|
||||
Refer to the [operator config guide](/docs/operatorConfiguration.md) if you need help with this.
|
||||
|
||||
Examples:
|
||||
|
||||
* CLI - `node src/index.js --clientId=myId --clientSecret=mySecret --redirectUri="http://localhost:8085/callback" --operator=FoxxMD`
|
||||
* Docker - `docker run -e "CLIENT_ID=myId" -e "CLIENT_SECRET=mySecret" -e "OPERATOR=FoxxMD" -e "REDIRECT_URI=http://localhost:8085/callback" foxxmd/context-mod`
|
||||
|
||||
### Create An Auth Invite
|
||||
|
||||
Then open the CM web interface (default is [http://localhost:8085](http://localhost:8085)) and login.
|
||||
|
||||
After logging in you should be automatically redirected the auth page. If you are not then visit [http://localhost:8085/auth/helper](http://localhost:8085/auth/helper))
|
||||
|
||||
Follow the directions in the helper to create an **auth invite link.** Open this link and then follow the directions to authenticate your bot. At the end of the process you will receive an **Access Token** and **Refresh Token**
|
||||
|
||||
## Aardvark OAuth Helper
|
||||
|
||||
This method should only be used if you cannot use the [CM OAuth Helper method](#cm-oauth-helper-recommended) because you cannot access the CM web interface.
|
||||
|
||||
* Visit [https://not-an-aardvark.github.io/reddit-oauth-helper/](https://not-an-aardvark.github.io/reddit-oauth-helper/) and follow the instructions given.
|
||||
* **Note:** You will need to update your **redirect uri.**
|
||||
* Input your **Client ID** and **Client Secret** in the text boxes with those names.
|
||||
* Choose scopes. **It is very important you check everything on this list or CM may not work correctly**
|
||||
* edit
|
||||
* flair
|
||||
* history
|
||||
* identity
|
||||
* modcontributors
|
||||
* modflair
|
||||
* modposts
|
||||
* modself
|
||||
* mysubreddits
|
||||
* read
|
||||
* report
|
||||
* submit
|
||||
* wikiread
|
||||
* wikiedit (if you are using Toolbox User Notes)
|
||||
* Click **Generate tokens**, you will get a popup asking you to approve access (or login) -- **the account you approve access with is the account that Bot will control.**
|
||||
* After approving an **Access Token** and **Refresh Token** will be shown at the bottom of the page. Save these to use with CM.
|
||||
|
||||
# Provide Credentials to CM
|
||||
|
||||
At the end of the last step you chose you should now have this information saved somewhere:
|
||||
|
||||
* clientId
|
||||
* clientSecret
|
||||
* refreshToken
|
||||
* accessToken
|
||||
* redirectUri
|
||||
|
||||
This is all the information you need to run your bot with CM.
|
||||
|
||||
Using these credentials follow the [operator config guide](/docs/operatorConfiguration.md) to finish setting up your CM instance.
|
||||
@@ -1,6 +1,6 @@
|
||||
# Examples
|
||||
|
||||
This directory contains example of valid, ready-to-go configurations for Context Bot for the purpose of:
|
||||
This directory contains example of valid, ready-to-go configurations for Context Mod for the purpose of:
|
||||
|
||||
* showcasing what the bot can do
|
||||
* providing best practices for writing your configuration
|
||||
@@ -16,10 +16,12 @@ This directory contains example of valid, ready-to-go configurations for Context
|
||||
* [Repeat Activity](/docs/examples/repeatActivity)
|
||||
* [History](/docs/examples/history)
|
||||
* [Author](/docs/examples/author)
|
||||
* [Regex](/docs/examples/regex)
|
||||
* [Toolbox User Notes](/docs/examples/userNotes)
|
||||
* [Advanced Concepts](/docs/examples/advancedConcepts)
|
||||
* [Rule Sets](/docs/examples/advancedConcepts/ruleSets.json5)
|
||||
* [Name Rules](/docs/examples/advancedConcepts/ruleNameReuse.json5)
|
||||
* [Check Ordering](/docs/examples/advancedConcepts)
|
||||
* Subreddit-ready examples
|
||||
* Coming soon...
|
||||
* [Subreddit-ready examples](/docs/examples/subredditReady)
|
||||
|
||||
PROTIP: You can edit/build on examples by using the [schema editor.](https://json-schema.app/view/%23?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json)
|
||||
|
||||
@@ -23,7 +23,7 @@ The `rules` array on a `Checks` can contain both `Rule` objects and `RuleSet` ob
|
||||
|
||||
A **Rule Set** is a "nested" set of `Rule` objects with a passing condition specified. These allow you to create more complex trigger behavior by combining multiple rules.
|
||||
|
||||
See **[ruleSets.json5](/docs/examples/advancedConcepts/ruleSets.json5)** for a complete example as well as consulting the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FRuleSetJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json).
|
||||
See **[ruleSets.json5](/docs/examples/advancedConcepts/ruleSets.json5)** for a complete example as well as consulting the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FRuleSetJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json).
|
||||
|
||||
### Rule Order
|
||||
|
||||
@@ -45,7 +45,7 @@ If the Check is using `AND` condition for its rules (default) then if either Rul
|
||||
|
||||
### API Caching
|
||||
|
||||
Context bot implements some basic caching functionality for **Author Activities** and wiki pages (on Comment/Report Actions).
|
||||
Context Mod implements some basic caching functionality for **Author Activities** and wiki pages (on Comment/Report Actions).
|
||||
|
||||
**Author Activities** are cached for a subreddit-configurable amount of time (10 seconds by default). A cached activities set can be re-used if the **window on a Rule is identical to the window on another Rule**.
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ The **Attribution** rule will aggregate an Author's content Attribution (youtube
|
||||
* Look at all domains or only media (youtube, vimeo, etc.)
|
||||
* Include self posts (by reddit domain) or not
|
||||
|
||||
Consult the [schema](https://json-schema.app/view/%23/%23%2Fdefinitions%2FCheckJson/%23%2Fdefinitions%2FAttributionJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
|
||||
Consult the [schema](https://json-schema.app/view/%23/%23%2Fdefinitions%2FCheckJson/%23%2Fdefinitions%2FAttributionJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
|
||||
|
||||
### Examples
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
## Rule
|
||||
|
||||
The **Author** rule triggers if any [AuthorCriteria](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) from a list are either **included** or **excluded**, depending on which property you put them in.
|
||||
The **Author** rule triggers if any [AuthorCriteria](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) from a list are either **included** or **excluded**, depending on which property you put them in.
|
||||
|
||||
**AuthorCriteria** that can be checked:
|
||||
* name (u/userName)
|
||||
@@ -13,7 +13,7 @@ The **Author** rule triggers if any [AuthorCriteria](https://json-schema.app/vie
|
||||
|
||||
The Author **Rule** is best used in conjunction with other Rules to short-circuit a Check based on who the Author is. It is easier to use a Rule to do this then to write **author filters** for every Rule (and makes Rules more re-useable).
|
||||
|
||||
Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorRuleJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
|
||||
Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorRuleJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
|
||||
|
||||
### Examples
|
||||
|
||||
@@ -25,7 +25,7 @@ Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorRule
|
||||
|
||||
## Filter
|
||||
|
||||
All **Rules** and **Checks** have an optional `authorIs` property that takes an [AuthorOptions](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorOptions?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) object.
|
||||
All **Rules** and **Checks** have an optional `authorIs` property that takes an [AuthorOptions](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorOptions?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) object.
|
||||
|
||||
**This property works the same as the Author Rule except that:**
|
||||
* On **Rules** if all criteria fail the Rule is **skipped.**
|
||||
|
||||
@@ -5,7 +5,7 @@ The **History** rule can check an Author's submission/comment statistics over a
|
||||
* Comment total or percentage of all Activity
|
||||
* Comments made as OP (commented in their own Submission) total or percentage of all Comments
|
||||
|
||||
Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FHistoryJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
|
||||
Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FHistoryJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
|
||||
|
||||
### Examples
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
The **Recent Activity** rule can check if an Author has made any Submissions/Comments in a list of defined Subreddits.
|
||||
|
||||
Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FRecentActivityRuleJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
|
||||
Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FRecentActivityRuleJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
|
||||
|
||||
### Examples
|
||||
|
||||
|
||||
20
docs/examples/regex/README.md
Normal file
20
docs/examples/regex/README.md
Normal file
@@ -0,0 +1,20 @@
|
||||
The **Regex** rule matches on text content from a comment or submission in the same way automod uses regex. The rule, however, provides additional functionality automod does not:
|
||||
|
||||
* Can set the **number** of matches that trigger the rule (`matchThreshold`)
|
||||
|
||||
Which can then be used in conjunction with a [`window`](https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md) to match against activities from the history of the Author of the Activity being checked (including the Activity being checked):
|
||||
|
||||
* Can set the **number of Activities** that meet the `matchThreshold` to trigger the rule (`activityMatchThreshold`)
|
||||
* Can set the **number of total matches** across all Activities to trigger the rule (`totalMatchThreshold`)
|
||||
* Can set the **type of Activities** to check (`lookAt`)
|
||||
* When an Activity is a Submission can **specify which parts of the Submission to match against** IE title, body, and/or url (`testOn`)
|
||||
|
||||
### Examples
|
||||
|
||||
* [Trigger if regex matches against the current activity](/docs/examples/regex/matchAnyCurrentActivity.json5)
|
||||
* [Trigger if regex matches 5 times against the current activity](/docs/examples/regex/matchThresholdCurrentActivity.json5)
|
||||
* [Trigger if regex matches against any part of a Submission](/docs/examples/regex/matchSubmissionParts.json5)
|
||||
* [Trigger if regex matches any of Author's last 10 activities](/docs/examples/regex/matchHistoryActivity.json5)
|
||||
* [Trigger if regex matches at least 3 of Author's last 10 activities](/docs/examples/regex/matchActivityThresholdHistory.json5)
|
||||
* [Trigger if there are 5 regex matches in the Author's last 10 activities](/docs/examples/regex/matchTotalHistoryActivity.json5)
|
||||
* [Trigger if there are 5 regex matches in the Author's last 10 comments](/docs/examples/regex/matchSubsetHistoryActivity.json5)
|
||||
20
docs/examples/regex/matchActivityThresholdHistory.json5
Normal file
20
docs/examples/regex/matchActivityThresholdHistory.json5
Normal file
@@ -0,0 +1,20 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if more than 3 activities in the last 10 match the regex
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// this differs from "totalMatchThreshold"
|
||||
//
|
||||
// activityMatchThreshold => # of activities from window must match regex
|
||||
// totalMatchThreshold => # of matches across all activities from window must match regex
|
||||
"activityMatchThreshold": "> 3",
|
||||
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
|
||||
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
|
||||
"window": 10,
|
||||
},
|
||||
]
|
||||
}
|
||||
14
docs/examples/regex/matchAnyCurrentActivity.json5
Normal file
14
docs/examples/regex/matchAnyCurrentActivity.json5
Normal file
@@ -0,0 +1,14 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if current activity has more than 0 matches
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// if "matchThreshold" is not specified it defaults to this -- default behavior is to trigger if there are any matches
|
||||
// "matchThreshold": "> 0"
|
||||
},
|
||||
]
|
||||
}
|
||||
15
docs/examples/regex/matchHistoryActivity.json5
Normal file
15
docs/examples/regex/matchHistoryActivity.json5
Normal file
@@ -0,0 +1,15 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if any activity in the last 10 (including current activity) match the regex
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
|
||||
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
|
||||
"window": 10,
|
||||
},
|
||||
]
|
||||
}
|
||||
19
docs/examples/regex/matchSubmissionParts.json5
Normal file
19
docs/examples/regex/matchSubmissionParts.json5
Normal file
@@ -0,0 +1,19 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
{
|
||||
// triggers if the current activity has more than 0 matches
|
||||
// if the activity is a submission then matches against title, body, and url
|
||||
// if "testOn" is not provided then `title, body` are the defaults
|
||||
"regex": "/fuck|shit|damn/",
|
||||
"testOn": [
|
||||
"title",
|
||||
"body",
|
||||
"url"
|
||||
]
|
||||
},
|
||||
]
|
||||
}
|
||||
23
docs/examples/regex/matchSubsetHistoryActivity.json5
Normal file
23
docs/examples/regex/matchSubsetHistoryActivity.json5
Normal file
@@ -0,0 +1,23 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if there are more than 5 regex matches in the last 10 activities (comments only)
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// this differs from "activityMatchThreshold"
|
||||
//
|
||||
// activityMatchThreshold => # of activities from window must match regex
|
||||
// totalMatchThreshold => # of matches across all activities from window must match regex
|
||||
"totalMatchThreshold": "> 5",
|
||||
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
|
||||
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
|
||||
"window": 10,
|
||||
// determines which activities from window to consider
|
||||
//defaults to "all" (submissions and comments)
|
||||
"lookAt": "comments",
|
||||
},
|
||||
]
|
||||
}
|
||||
13
docs/examples/regex/matchThresholdCurrentActivity.json5
Normal file
13
docs/examples/regex/matchThresholdCurrentActivity.json5
Normal file
@@ -0,0 +1,13 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// triggers if current activity has greater than 5 matches
|
||||
"matchThreshold": "> 5"
|
||||
},
|
||||
]
|
||||
}
|
||||
21
docs/examples/regex/matchTotalHistoryActivity.json5
Normal file
21
docs/examples/regex/matchTotalHistoryActivity.json5
Normal file
@@ -0,0 +1,21 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if there are more than 5 regex matches in the last 10 activities (comments or submission)
|
||||
{
|
||||
// triggers if there are more than 5 *total matches* across the last 10 activities
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// this differs from "activityMatchThreshold"
|
||||
//
|
||||
// activityMatchThreshold => # of activities from window must match regex
|
||||
// totalMatchThreshold => # of matches across all activities from window must match regex
|
||||
"totalMatchThreshold": "> 5",
|
||||
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
|
||||
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
|
||||
"window": 10,
|
||||
},
|
||||
]
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
# Repeat Activity
|
||||
|
||||
The **Repeat Activity** rule will check for patterns of repetition in an Author's Submission/Comment history. Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FRepeatActivityJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
|
||||
The **Repeat Activity** rule will check for patterns of repetition in an Author's Submission/Comment history. Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FRepeatActivityJSONConfig?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the rule's properties.
|
||||
|
||||
## Tuning
|
||||
|
||||
|
||||
41
docs/examples/subredditReady/README.md
Normal file
41
docs/examples/subredditReady/README.md
Normal file
@@ -0,0 +1,41 @@
|
||||
Provided here are **complete, ready-to-go configuration** that can copy-pasted straight into your configuration wiki page to get going with ContextMod immediately.
|
||||
|
||||
These configurations attempt to provide sensible, non-destructive, default behavior for some common scenarios and subreddit types.
|
||||
|
||||
In most cases these will perform decently out-of-the-box but they are not perfect. You should still monitor bot behavior to see how it performs and will most likely still need to tweak these configurations to get your desired behavior.
|
||||
|
||||
All actions for these configurations are non-destructive in that:
|
||||
|
||||
* All instances where an activity would be modified (remove/ban/approve) will have `dryRun: true` set to prevent the action from actually being performed
|
||||
* These instances will also have a `report` action detailing the action would have been performed
|
||||
|
||||
**You will have to remove the `report` action and `dryRun` settings yourself.** This is to ensure that you understand the behavior the bot will be performing. If you are unsure of this you should leave them in place until you are certain the behavior the bot is performing is acceptable.
|
||||
|
||||
## Submission-based Behavior
|
||||
|
||||
### [Remove submissions from users who have used 'freekarma' subs to bypass karma checks](/docs/examples/subredditReady/freekarma.json5)
|
||||
|
||||
If the user has any activity (comment/submission) in known freekarma subreddits in the past (50 activities or 6 months) then remove the submission.
|
||||
|
||||
### [Remove submissions from users who have crossposted the same submission 4 or more times](/docs/examples/subredditReady/crosspostSpam.json5)
|
||||
|
||||
If the user has crossposted the same submission in the past (50 activities or 6 months) 4 or more times in a row then remove the submission.
|
||||
|
||||
### [Remove submissions from users who have crossposted or used 'freekarma' subs](/docs/examples/subredditReady/freeKarmaOrCrosspostSpam.json5)
|
||||
|
||||
Will remove submission if either of the above two behaviors is detected
|
||||
|
||||
### [Remove link submissions where the user's history is comprised of 10% or more of the same link](/docs/examples/subredditReady/selfPromo.json5)
|
||||
|
||||
If the link origin (youtube author, twitter author, etc. or regular domain for non-media links)
|
||||
|
||||
* comprises 10% or more of the users **entire** history in the past (100 activities or 6 months)
|
||||
* or comprises 10% or more of the users **submission** history in the past (100 activities or 6 months) and the user has low engagement (<50% of history is comments or 40%> of comment are as OP)
|
||||
|
||||
then remove the submission
|
||||
|
||||
## Comment-based behavior
|
||||
|
||||
### [Remove comment if the user has posted the same comment 4 or more times in a row](/docs/examples/subredditReady/commentSpam.json5)
|
||||
|
||||
If the user made the same comment (with some fuzzy matching) 4 or more times in a row in the past (50 activities or 6 months) then remove the comment.
|
||||
42
docs/examples/subredditReady/commentSpam.json5
Normal file
42
docs/examples/subredditReady/commentSpam.json5
Normal file
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"polling": ["newComm"],
|
||||
"checks": [
|
||||
{
|
||||
//
|
||||
// Stop users who spam the same comment many times
|
||||
//
|
||||
// Remove a COMMENT if the user has crossposted it at least 4 times in recent history
|
||||
//
|
||||
"name": "low xp comment spam",
|
||||
"description": "X-posted comment >=4x",
|
||||
"kind": "comment",
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
{
|
||||
"name": "xPostLow",
|
||||
"kind": "repeatActivity",
|
||||
"gapAllowance": 2,
|
||||
"threshold": ">= 4",
|
||||
"window": {
|
||||
"count": 50,
|
||||
"duration": "6 months"
|
||||
}
|
||||
},
|
||||
],
|
||||
"actions": [
|
||||
// remove this after confirming behavior is acceptable
|
||||
{
|
||||
"kind": "report",
|
||||
"content": "Remove=> Posted same comment {{rules.xpostlow.largestRepeat}}x times"
|
||||
},
|
||||
//
|
||||
//
|
||||
{
|
||||
"kind": "remove",
|
||||
// remove the line below after confirming behavior is acceptable
|
||||
"dryRun": true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
77
docs/examples/subredditReady/crosspostSpam.json5
Normal file
77
docs/examples/subredditReady/crosspostSpam.json5
Normal file
@@ -0,0 +1,77 @@
|
||||
{
|
||||
"polling": ["unmoderated"],
|
||||
"checks": [
|
||||
{
|
||||
//
|
||||
// Stop users who post low-effort, crossposted spam
|
||||
//
|
||||
// Remove a SUBMISSION if the user has crossposted it at least 4 times in recent history AND
|
||||
// less than 50% of their activity is comments OR more than 40% of those comments are as OP (in the own submissions)
|
||||
//
|
||||
"name": "low xp spam and engagement",
|
||||
"description": "X-posted 4x and low comment engagement",
|
||||
"kind": "submission",
|
||||
"itemIs": [
|
||||
{
|
||||
"removed": false
|
||||
}
|
||||
],
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
{
|
||||
"name": "xPostLow",
|
||||
"kind": "repeatActivity",
|
||||
"gapAllowance": 2,
|
||||
"threshold": ">= 4",
|
||||
"window": {
|
||||
"count": 50,
|
||||
"duration": "6 months"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "lowOrOpComm",
|
||||
"kind": "history",
|
||||
"criteriaJoin": "OR",
|
||||
"criteria": [
|
||||
{
|
||||
"window": {
|
||||
"count": 100,
|
||||
"duration": "6 months"
|
||||
},
|
||||
"comment": "< 50%"
|
||||
},
|
||||
{
|
||||
"window": {
|
||||
"count": 100,
|
||||
"duration": "6 months"
|
||||
},
|
||||
"comment": "> 40% OP"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
// remove this after confirming behavior is acceptable
|
||||
{
|
||||
"kind": "report",
|
||||
"content": "Remove=>{{rules.xpostlow.largestRepeat}} X-P => {{rules.loworopcomm.thresholdSummary}}"
|
||||
},
|
||||
//
|
||||
//
|
||||
{
|
||||
"kind": "remove",
|
||||
// remove the line below after confirming behavior is acceptable
|
||||
"dryRun": true
|
||||
},
|
||||
// optionally remove "dryRun" from below if you want to leave a comment on removal
|
||||
// PROTIP: the comment is bland, you should make it better
|
||||
{
|
||||
"kind": "comment",
|
||||
"content": "Your submission has been removed because you cross-posted it {{rules.xpostlow.largestRepeat}} times and you have very low engagement outside of making submissions",
|
||||
"distinguish": true,
|
||||
"dryRun": true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
138
docs/examples/subredditReady/freeKarmaOrCrosspostSpam.json5
Normal file
138
docs/examples/subredditReady/freeKarmaOrCrosspostSpam.json5
Normal file
@@ -0,0 +1,138 @@
|
||||
{
|
||||
"polling": [
|
||||
"unmoderated"
|
||||
],
|
||||
"checks": [
|
||||
{
|
||||
//
|
||||
// Stop users who post low-effort, crossposted spam
|
||||
//
|
||||
// Remove a SUBMISSION if the user has crossposted it at least 4 times in recent history AND
|
||||
// less than 50% of their activity is comments OR more than 40% of those comments are as OP (in the own submissions)
|
||||
//
|
||||
"name": "remove on low xp spam and engagement",
|
||||
"description": "X-posted 4x and low comment engagement",
|
||||
"kind": "submission",
|
||||
"itemIs": [
|
||||
{
|
||||
"removed": false
|
||||
}
|
||||
],
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
{
|
||||
"name": "xPostLow",
|
||||
"kind": "repeatActivity",
|
||||
"gapAllowance": 2,
|
||||
"threshold": ">= 4",
|
||||
"window": {
|
||||
"count": 50,
|
||||
"duration": "6 months"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "lowOrOpComm",
|
||||
"kind": "history",
|
||||
"criteriaJoin": "OR",
|
||||
"criteria": [
|
||||
{
|
||||
"window": {
|
||||
"count": 100,
|
||||
"duration": "6 months"
|
||||
},
|
||||
"comment": "< 50%"
|
||||
},
|
||||
{
|
||||
"window": {
|
||||
"count": 100,
|
||||
"duration": "6 months"
|
||||
},
|
||||
"comment": "> 40% OP"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
// remove this after confirming behavior is acceptable
|
||||
{
|
||||
"kind": "report",
|
||||
"content": "Remove=>{{rules.xpostlow.largestRepeat}} X-P => {{rules.loworopcomm.thresholdSummary}}"
|
||||
},
|
||||
//
|
||||
//
|
||||
{
|
||||
"kind": "remove",
|
||||
// remove the line below after confirming behavior is acceptable
|
||||
"dryRun": true
|
||||
},
|
||||
// optionally remove "dryRun" from below if you want to leave a comment on removal
|
||||
// PROTIP: the comment is bland, you should make it better
|
||||
{
|
||||
"kind": "comment",
|
||||
"content": "Your submission has been removed because you cross-posted it {{rules.xpostlow.largestRepeat}} times and you have very low engagement outside of making submissions",
|
||||
"distinguish": true,
|
||||
"dryRun": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
//
|
||||
// Remove submissions from users who have recent activity in freekarma subs within the last 50 activities or 6 months (whichever is less)
|
||||
//
|
||||
"name": "freekarma removal",
|
||||
"description": "Remove submission if user has used freekarma sub recently",
|
||||
"kind": "submission",
|
||||
"itemIs": [
|
||||
{
|
||||
"removed": false
|
||||
}
|
||||
],
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
{
|
||||
"name": "freekarma",
|
||||
"kind": "recentActivity",
|
||||
"window": {
|
||||
"count": 50,
|
||||
"duration": "6 months"
|
||||
},
|
||||
"useSubmissionAsReference": false,
|
||||
"thresholds": [
|
||||
{
|
||||
"subreddits": [
|
||||
"FreeKarma4U",
|
||||
"FreeKarma4You",
|
||||
"KarmaStore",
|
||||
"promote",
|
||||
"shamelessplug",
|
||||
"upvote"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
// remove this after confirming behavior is acceptable
|
||||
{
|
||||
"kind": "report",
|
||||
"content": "Remove=> {{rules.newtube.totalCount}} activities in freekarma subs"
|
||||
},
|
||||
//
|
||||
//
|
||||
{
|
||||
"kind": "remove",
|
||||
// remove the line below after confirming behavior is acceptable
|
||||
"dryRun": true
|
||||
},
|
||||
// optionally remove "dryRun" from below if you want to leave a comment on removal
|
||||
// PROTIP: the comment is bland, you should make it better
|
||||
{
|
||||
"kind": "comment",
|
||||
"content": "Your submission has been removed because you have recent activity in 'freekarma' subs",
|
||||
"distinguish": true,
|
||||
"dryRun": true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
64
docs/examples/subredditReady/freekarma.json5
Normal file
64
docs/examples/subredditReady/freekarma.json5
Normal file
@@ -0,0 +1,64 @@
|
||||
{
|
||||
"polling": [
|
||||
"unmoderated"
|
||||
],
|
||||
"checks": [
|
||||
{
|
||||
//
|
||||
// Remove submissions from users who have recent activity in freekarma subs within the last 50 activities or 6 months (whichever is less)
|
||||
//
|
||||
"name": "freekarma removal",
|
||||
"description": "Remove submission if user has used freekarma sub recently",
|
||||
"kind": "submission",
|
||||
"itemIs": [
|
||||
{
|
||||
"removed": false
|
||||
}
|
||||
],
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
{
|
||||
"name": "freekarma",
|
||||
"kind": "recentActivity",
|
||||
"window": {
|
||||
"count": 50,
|
||||
"duration": "6 months"
|
||||
},
|
||||
"useSubmissionAsReference": false,
|
||||
"thresholds": [
|
||||
{
|
||||
"subreddits": [
|
||||
"FreeKarma4U",
|
||||
"FreeKarma4You",
|
||||
"KarmaStore",
|
||||
"upvote"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
// remove this after confirming behavior is acceptable
|
||||
{
|
||||
"kind": "report",
|
||||
"content": "Remove=> {{rules.newtube.totalCount}} activities in freekarma subs"
|
||||
},
|
||||
//
|
||||
//
|
||||
{
|
||||
"kind": "remove",
|
||||
// remove the line below after confirming behavior is acceptable
|
||||
"dryRun": true,
|
||||
},
|
||||
// optionally remove "dryRun" from below if you want to leave a comment on removal
|
||||
// PROTIP: the comment is bland, you should make it better
|
||||
{
|
||||
"kind": "comment",
|
||||
"content": "Your submission has been removed because you have recent activity in 'freekarma' subs",
|
||||
"distinguish": true,
|
||||
"dryRun": true,
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
104
docs/examples/subredditReady/selfPromo.json5
Normal file
104
docs/examples/subredditReady/selfPromo.json5
Normal file
@@ -0,0 +1,104 @@
|
||||
{
|
||||
"polling": [
|
||||
"unmoderated"
|
||||
],
|
||||
"checks": [
|
||||
{
|
||||
//
|
||||
// Stop users who make link submissions with a self-promotional agenda (with reddit's suggested 10% rule)
|
||||
// https://www.reddit.com/wiki/selfpromotion#wiki_guidelines_for_self-promotion_on_reddit
|
||||
//
|
||||
// Remove a SUBMISSION if the link comprises more than or equal to 10% of users history (100 activities or 6 months) OR
|
||||
//
|
||||
// if link comprises 10% of submission history (100 activities or 6 months)
|
||||
// AND less than 50% of their activity is comments OR more than 40% of those comments are as OP (in the own submissions)
|
||||
//
|
||||
"name": "Self-promo all AND low engagement",
|
||||
"description": "Self-promo is >10% for all or just sub and low comment engagement",
|
||||
"kind": "submission",
|
||||
"condition": "OR",
|
||||
"rules": [
|
||||
{
|
||||
"name": "attr",
|
||||
"kind": "attribution",
|
||||
"criteria": [
|
||||
{
|
||||
"threshold": ">= 10%",
|
||||
"window": {
|
||||
"count": 100,
|
||||
"duration": "6 months"
|
||||
},
|
||||
"domains": [
|
||||
"AGG:SELF"
|
||||
]
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
{
|
||||
"name": "attrsub",
|
||||
"kind": "attribution",
|
||||
"criteria": [
|
||||
{
|
||||
"threshold": ">= 10%",
|
||||
"thresholdOn": "submissions",
|
||||
"window": {
|
||||
"count": 100,
|
||||
"duration": "6 months"
|
||||
},
|
||||
"domains": [
|
||||
"AGG:SELF"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "lowOrOpComm",
|
||||
"kind": "history",
|
||||
"criteriaJoin": "OR",
|
||||
"criteria": [
|
||||
{
|
||||
"window": {
|
||||
"count": 100,
|
||||
"duration": "6 months"
|
||||
},
|
||||
"comment": "< 50%"
|
||||
},
|
||||
{
|
||||
"window": {
|
||||
"count": 100,
|
||||
"duration": "6 months"
|
||||
},
|
||||
"comment": "> 40% OP"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"kind": "report",
|
||||
"content": "{{rules.attr.largestPercent}}{{rules.attrsub.largestPercent}} of {{rules.attr.activityTotal}}{{rules.attrsub.activityTotal}} items ({{rules.attr.window}}{{rules.attrsub.window}}){{#rules.loworopcomm.thresholdSummary}} => {{rules.loworopcomm.thresholdSummary}}{{/rules.loworopcomm.thresholdSummary}}"
|
||||
},
|
||||
//
|
||||
//
|
||||
{
|
||||
"kind": "remove",
|
||||
// remove the line below after confirming behavior is acceptable
|
||||
"dryRun": true
|
||||
},
|
||||
// optionally remove "dryRun" from below if you want to leave a comment on removal
|
||||
// PROTIP: the comment is bland, you should make it better
|
||||
{
|
||||
"kind": "comment",
|
||||
"content": "Your submission has been removed it comprises 10% or more of your recent history ({{rules.attr.largestPercent}}{{rules.attrsub.largestPercent}}). This is against [reddit's self promotional guidelines.](https://www.reddit.com/wiki/selfpromotion#wiki_guidelines_for_self-promotion_on_reddit)",
|
||||
"distinguish": true,
|
||||
"dryRun": true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
# [Toolbox](https://www.reddit.com/r/toolbox/wiki/docs) [User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes)
|
||||
|
||||
Context Bot supports reading and writing [User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) for the [Toolbox](https://www.reddit.com/r/toolbox/wiki/docs) extension.
|
||||
Context Mod supports reading and writing [User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) for the [Toolbox](https://www.reddit.com/r/toolbox/wiki/docs) extension.
|
||||
|
||||
**You must have Toolbox setup for your subreddit and at least one User Note created before you can use User Notes related features on Context Bot.**
|
||||
|
||||
@@ -8,9 +8,9 @@ Context Bot supports reading and writing [User Notes](https://www.reddit.com/r/t
|
||||
|
||||
## Filter
|
||||
|
||||
User Notes are an additional criteria on [AuthorCriteria](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) that can be used alongside other Author properties for both [filtering rules and in the AuthorRule.](/docs/examples/author/)
|
||||
User Notes are an additional criteria on [AuthorCriteria](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) that can be used alongside other Author properties for both [filtering rules and in the AuthorRule.](/docs/examples/author/)
|
||||
|
||||
Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FUserNoteCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the **UserNoteCriteria** object that can be used in AuthorCriteria.
|
||||
Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FUserNoteCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) for a complete reference of the **UserNoteCriteria** object that can be used in AuthorCriteria.
|
||||
|
||||
### Examples
|
||||
|
||||
@@ -18,7 +18,7 @@ Consult the [schema](https://json-schema.app/view/%23%2Fdefinitions%2FUserNoteCr
|
||||
|
||||
## Action
|
||||
|
||||
A User Note can also be added to the Author of a Submission or Comment with the [UserNoteAction.](https://json-schema.app/view/%23%2Fdefinitions%2FUserNoteActionJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json)
|
||||
A User Note can also be added to the Author of a Submission or Comment with the [UserNoteAction.](https://json-schema.app/view/%23%2Fdefinitions%2FUserNoteActionJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json)
|
||||
|
||||
|
||||
### Examples
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
### Creating Your Configuration
|
||||
|
||||
#### Get the raw contents of the configuration
|
||||
|
||||
* In a new tab open the github page for the configuration you want ([example](/docs/examples/repeatActivity/crosspostSpamming.json5))
|
||||
* Click the **Raw** button...keep this tab open and move on to the next step
|
||||
|
||||
#### Edit your wiki configuration
|
||||
|
||||
* Visit the wiki page of the subreddit you want the bot to moderate
|
||||
* Using default bot settings this will be `https://old.reddit.com/r/YOURSUBERDDIT/wiki/botconfig/contextbot`
|
||||
* If the page does not exist create it, otherwise click **Edit**
|
||||
* Copy-paste the configuration into the wiki text box
|
||||
* In the previous tab you opened (for the configuration) **Select All** (Ctrl+A), then **Copy**
|
||||
* On the wiki page **Paste** into the text box
|
||||
* Save the edited wiki page
|
||||
* Ensure the wiki page visibility is restricted
|
||||
* On the wiki page click **settings** (**Page settings** in new reddit)
|
||||
* Check the box for **Only mods may edit and view** and then **save**
|
||||
140
docs/gettingStartedMod.md
Normal file
140
docs/gettingStartedMod.md
Normal file
@@ -0,0 +1,140 @@
|
||||
This getting started guide is for **reddit moderators** -- that is, someone who wants **an existing ContextMod bot to run on their subreddit.** If you are trying to run a ContextMod
|
||||
instance (software) please refer to the [operator getting started](/docs/gettingStartedOperator.md) guide.
|
||||
|
||||
# Table of Contents
|
||||
|
||||
* [Prior Knowledge](#prior-knowledge)
|
||||
* [Choose A Bot](#choose-a-bot)
|
||||
* [Use The Operator's Bot](#use-the-operators-bot)
|
||||
* [Bring Your Own Bot (BYOB)](#bring-your-own-bot-byob)
|
||||
* [Creating Configuration](#configuring-the-bot)
|
||||
* [Monitor the Bot](#monitor-the-bot)
|
||||
|
||||
# Prior Knowledge
|
||||
|
||||
Before continuing with this guide you should first make sure you understand how a ContextMod works. Please review this documentation:
|
||||
|
||||
* [How It Works](/docs#how-it-works)
|
||||
* [Core Concepts](/docs#concepts)
|
||||
|
||||
# Choose A Bot
|
||||
|
||||
First determine what bot (reddit account) you want to run ContextMod with. (You may have already discussed this with your operator)
|
||||
|
||||
## Use the Operator's Bot
|
||||
|
||||
If the Operator has communicated that **you should add a bot they control as a moderator** to your subreddit this is the option you will use.
|
||||
|
||||
**Pros:**
|
||||
|
||||
* Do not have to create and keep track of another reddit account
|
||||
* Easiest option in terms of setup for both moderators and operator
|
||||
|
||||
**Cons:**
|
||||
|
||||
* Shared api quota among other moderated subreddits (not great for high-volume subreddits)
|
||||
|
||||
___
|
||||
|
||||
Ensure that you are in communication with the **operator** for this bot. The bot **will not automatically accept a moderator invitation,** it must be manually done by the bot operator. This is an intentional barrier to ensure moderators and the operator are familiar with their respective needs and have some form of trust.
|
||||
|
||||
Now invite the bot to moderate your subreddit. The bot should have at least these permissions:
|
||||
|
||||
* Manage Users
|
||||
* Manage Posts and Comments
|
||||
* Manage Flair
|
||||
|
||||
Additionally, the bot must have the **Manage Wiki Pages** permission if you plan to use [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes). If you are not planning on using this feature and do not want the bot to have this permission then you **must** ensure the bot has visibility to the configuration wiki page (detailed below).
|
||||
|
||||
## Bring Your Own Bot (BYOB)
|
||||
|
||||
If the operator has communicated that **they want to use a bot you control** this is the option you will use.
|
||||
|
||||
**Pros:**
|
||||
|
||||
* **Dedicated API quota**
|
||||
* This is basically a requirement if your subreddit has high-volume activity and you plan on running checks on comments
|
||||
* More security guarantees since you control the account
|
||||
* **Note:** authenticating an account does NOT give the operator access to view or change the email/password for the account
|
||||
* Established history in your subreddit
|
||||
|
||||
**Cons:**
|
||||
|
||||
* More setup required for both moderators and operators
|
||||
|
||||
___
|
||||
|
||||
The **operator** will send you an **invite link** that you will use to authenticate your bot with the operator's application. Example link: `https://operatorsUrl.com/auth/invite?invite=4kf9n3o03ncd4nd`
|
||||
|
||||
Review the information shown on the invite link webpage and then follow the directions shown to authorize your bot for the operator.
|
||||
|
||||
**Note:** There is information display **after** authentication that you will need to communicate to your operator -- **Refresh** and **Access** token values. Make sure you save these somewhere as the invite link is **one-use only.**
|
||||
|
||||
# Configuring the Bot
|
||||
|
||||
## Setup wiki page
|
||||
|
||||
* Visit the wiki page of the subreddit you want the bot to moderate
|
||||
* The default location the bot checks for a configuration is at `https://old.reddit.com/r/YOURSUBERDDIT/wiki/botconfig/contextbot`
|
||||
* If the page does not exist create it
|
||||
* Ensure the wiki page visibility is restricted
|
||||
* On the wiki page click **settings** (**Page settings** in new reddit)
|
||||
* Check the box for **Only mods may edit and view** and then **save**
|
||||
* Alternatively, if you did not give the bot the **Manage Wiki Pages** permission then add it to the **allow users to edit page** setting
|
||||
|
||||
## Procure a configuration
|
||||
|
||||
Now you need to make the actual configuration that will be used to configure the bot's behavior on your subreddit. This may have already been done for you by your operator or you may be copying a fellow moderator's configuration.
|
||||
|
||||
If you already have a configuration you may skip the below step and go directly to [saving your configuration](#saving-your-configuration)
|
||||
|
||||
### Using an Example Config
|
||||
|
||||
Visit the [Examples](https://github.com/FoxxMD/context-mod/tree/master/docs/examples) folder to find various examples of individual rules or see the [subreddit-ready examples.](/docs/examples/subredditReady)
|
||||
|
||||
After you have found a configuration to use as a starting point:
|
||||
|
||||
* In a new tab open the github page for the configuration you want ([example](/docs/examples/repeatActivity/crosspostSpamming.json5))
|
||||
* Click the **Raw** button, then select all and copy all of the text to your clipboard.
|
||||
|
||||
### Build Your Own Config
|
||||
|
||||
Additionally, you can use [this schema editor](https://json-schema.app/view/%23?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) to build your configuration. The editor features a ton of handy features:
|
||||
|
||||
* fully annotated configuration data/structure
|
||||
* generated examples in json/yaml
|
||||
* built-in editor that automatically validates your config
|
||||
|
||||
PROTIP: Find an example config to use as a starting point and then build on it using the editor.
|
||||
|
||||
## Saving Your Configuration
|
||||
|
||||
* Open the wiki page you created in the [previous step](#setup-wiki-page) and click **edit**
|
||||
* Copy-paste your configuration into the wiki text box
|
||||
* Save the edited wiki page
|
||||
|
||||
___
|
||||
|
||||
The bot automatically checks for new configurations on your wiki page every 5 minutes. If your operator has the web interface accessible you may login there and force the config to update on your subreddit.
|
||||
|
||||
# Monitor the Bot
|
||||
|
||||
Monitoring the behavior of the bot is dependent on how your operator setup their instance. ContextMod comes with a built-in web interface that is secure and accessible only to moderates of subreddits it is running on. However there is some additional setup for the operator to perform in order to make this interface accessible publicly. If you do not have access to this interface please communicate with your operator.
|
||||
|
||||
After logging in to the interface you will find your subreddit in a tab at the top of the web page. Selecting your subreddit will give you access to:
|
||||
|
||||
* Current status of the bot
|
||||
* Current status of your configuration
|
||||
* Statistics pertaining to the number of checks/rules/actions run and cache usage
|
||||
* **A real-time view for bot logs pertaining to your subreddit**
|
||||
|
||||
The logs are the meat and potatoes of the bot and the main source of feedback you have for fine-tuning the bot's behavior. The **verbose** log level will show you:
|
||||
|
||||
* The event being processed
|
||||
* The individual results of triggered rules, per check
|
||||
* The checks that were run and their rules
|
||||
* The actions performed, with markdown content preview, of triggered checks
|
||||
|
||||
This information should enable you to tweak the criteria for your rules in order to get the required behavior from the bot.
|
||||
|
||||
Additionally, you can test your bot on any comment/submission by entering its permalink in the text bot at the top of the logs and selecting **Dry Run** -- this will run the bot on an Activity without actually performing any actions allowing you to preview the results of a run.
|
||||
71
docs/gettingStartedOperator.md
Normal file
71
docs/gettingStartedOperator.md
Normal file
@@ -0,0 +1,71 @@
|
||||
This getting started guide is for **Operators** -- that is, someone who wants to run the actual software for a ContentMod bot. If you are a **Moderator** check out the [moderator getting started](/docs/gettingStartedMod.md) guide instead.
|
||||
|
||||
# Table of Contents
|
||||
|
||||
* [Installation](#installation)
|
||||
* [Docker](#docker-recommended)
|
||||
* [Locally](#locally)
|
||||
* [Heroku](#heroku-quick-deployhttpsherokucomabout)
|
||||
* [Bot Authentication](#bot-authentication)
|
||||
* [Instance Configuration](#instance-configuration)
|
||||
* [Run Your Bot and Start Moderating](#run-your-bot-and-start-moderating)
|
||||
|
||||
# Installation
|
||||
|
||||
In order to run a ContextMod instance you must first you must install it somewhere.
|
||||
|
||||
ContextMod can be run on almost any operating system but it is recommended to use Docker due to ease of deployment.
|
||||
|
||||
## Docker (Recommended)
|
||||
|
||||
PROTIP: Using a container management tool like [Portainer.io CE](https://www.portainer.io/products/community-edition) will help with setup/configuration tremendously.
|
||||
|
||||
### [Dockerhub](https://hub.docker.com/r/foxxmd/context-mod)
|
||||
|
||||
```
|
||||
foxxmd/context-mod:latest
|
||||
```
|
||||
|
||||
Adding **environmental variables** to your `docker run` command will pass them through to the app EX:
|
||||
```
|
||||
docker run -d -e "CLIENT_ID=myId" ... foxxmd/context-mod
|
||||
```
|
||||
|
||||
### Locally
|
||||
|
||||
Requirements:
|
||||
|
||||
* Typescript >=4.3.5
|
||||
* Node >=15
|
||||
|
||||
Clone this repository somewhere and then install from the working directory
|
||||
|
||||
```bash
|
||||
git clone https://github.com/FoxxMD/context-mod.git .
|
||||
cd context-mod
|
||||
npm install
|
||||
tsc -p .
|
||||
```
|
||||
|
||||
### [Heroku Quick Deploy](https://heroku.com/about)
|
||||
[](https://dashboard.heroku.com/new?template=https://github.com/FoxxMD/context-mod)
|
||||
|
||||
# Bot Authentication
|
||||
|
||||
Next you need to create a bot and authenticate it with Reddit. Follow the [bot authentication guide](/docs/botAuthentication.md) to complete this step.
|
||||
|
||||
# Instance Configuration
|
||||
|
||||
Finally, you must provide the credentials you received from the **Bot Authentication** step to the ContextMod instance you installed earlier. Refer to the [Operator Configuration](/docs/operatorConfiguration.md) guide to learn how this can be done as there are multiple approaches depending on how you installed the software.
|
||||
|
||||
Additionally, at this step you can also tweak many more settings and behavior concerning how your CM bot will operate.
|
||||
|
||||
# Run Your Bot and Start Moderating
|
||||
|
||||
Congratulations! You should now have a fully authenticated bot running on ContextMod software.
|
||||
|
||||
In order for your Bot to operate on reddit though it **must be a moderator in the subreddit you want it to run in.** This may be your own subreddit or someone else's.
|
||||
|
||||
**Note: ContextMod does not currently handle moderation invites automatically** and may never have this functionality. Due to the fact that many of its behaviors are api-heavy and that subreddits can control their own configuration the api and resource (cpu/memory) usage of a ContextMod instance can be highly variable. It therefore does not make sense to allow any/all subreddits to automatically have access to an instance through automatically accepting moderator invites. So...if you are planning to run a ContextMod instance for subreddits other than those you moderate you should establish solid trust with moderators of that subreddit as well as a solid line of communication in order to ensure their configurations can be tailored to best fit their needs and your resources.
|
||||
|
||||
Once you have logged in as your bot and manually accepted the moderator invite you will need to restart your ContextMod instance in order for these changes to take effect.
|
||||
@@ -6,6 +6,7 @@ activities the Bot runs on.
|
||||
|
||||
* [Minimum Required Configuration](#minimum-required-configuration)
|
||||
* [Defining Configuration](#defining-configuration)
|
||||
* [CLI Usage](#cli-usage)
|
||||
* [Examples](#example-configurations)
|
||||
* [Minimum Config](#minimum-config)
|
||||
* [Using Config Overrides](#using-config-overrides)
|
||||
@@ -13,20 +14,20 @@ activities the Bot runs on.
|
||||
|
||||
# Minimum Required Configuration
|
||||
|
||||
The minimum required configuration variables to run the bot on subreddits are:
|
||||
| property | Server And Web | Server Only | Web/Bot-Auth Only |
|
||||
|:--------------:|:------------------:|:------------------:|:------------------:|
|
||||
| `clientId` | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: |
|
||||
| `clientSecret` | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: |
|
||||
| `redirectUri` | :heavy_check_mark: | :x: | :heavy_check_mark: |
|
||||
| `refreshToken` | :heavy_check_mark: | :heavy_check_mark: | :x: |
|
||||
| `accessToken` | :heavy_check_mark: | :heavy_check_mark: | :x: |
|
||||
|
||||
* clientId
|
||||
* clientSecret
|
||||
* refreshToken
|
||||
* accessToken
|
||||
|
||||
However, only **clientId** and **clientSecret** are required to run the **oauth helper** mode for generate the last two
|
||||
configuration variables.
|
||||
Refer to the **[Bot Authentication guide](/docs/botAuthentication.md)** to retrieve credentials.
|
||||
|
||||
# Defining Configuration
|
||||
|
||||
RCB can be configured using **any or all** of the approaches below. **At each level ALL configuration values are
|
||||
optional** but some are required depending on the mode of operation for the application.
|
||||
CM can be configured using **any or all** of the approaches below. Note that **at each level ALL configuration values are
|
||||
optional** but the "required configuration" mentioned above must be available when all levels are combined.
|
||||
|
||||
Any values defined at a **lower-listed** level of configuration will override any values from a higher-listed
|
||||
configuration.
|
||||
@@ -34,177 +35,83 @@ configuration.
|
||||
* **ENV** -- Environment variables loaded from an [`.env`](https://github.com/toddbluhm/env-cmd) file (path may be
|
||||
specified with `--file` cli argument)
|
||||
* **ENV** -- Any already existing environment variables (exported on command line/terminal profile/etc.)
|
||||
* **FILE** -- Values specified in a JSON configuration file using the structure shown below (TODO example json file)
|
||||
* **ARG** -- Values specified as CLI arguments to the program (see [Usage](/README.md#usage)
|
||||
or `node src/index.js run help` for details)
|
||||
* **FILE** -- Values specified in a JSON configuration file using the structure [in the schema](https://json-schema.app/view/%23?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FOperatorConfig.json)
|
||||
* **ARG** -- Values specified as CLI arguments to the program (see [ClI Usage](#cli-usage) below)
|
||||
|
||||
In the below configuration, if the variable is available at a level of configuration other than **FILE** it will be
|
||||
**Note:** When reading the **schema** if the variable is available at a level of configuration other than **FILE** it will be
|
||||
noted with the same symbol as above. The value shown is the default.
|
||||
|
||||
**NOTE:** To load a JSON configuration (for **FILE**) use the `-c` cli argument EX: `node src/index.js -c /path/to/JSON/config.json`
|
||||
* To load a JSON configuration (for **FILE**) **from the command line** use the `-c` cli argument EX: `node src/index.js -c /path/to/JSON/config.json`
|
||||
* To load a JSON configuration (for **FILE**) **using an environmental variable** use `OPERATOR_CONFIG` EX: `OPERATOR_CONFIG=/path/to/JSON/config.json`
|
||||
|
||||
[**See the Operator Config Schema here**](https://json-schema.app/view/%23?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FOperatorConfig.json)
|
||||
|
||||
## Defining Multiple Bots or CM Instances
|
||||
|
||||
One ContextMod instance can
|
||||
|
||||
* Run multiple bots (multiple reddit accounts -- each as a bot)
|
||||
* Connect to many other, independent, ContextMod instances
|
||||
|
||||
However, the default configuration (using **ENV/ARG**) assumes your intention is to run one bot (one reddit account) on one CM instance without these additional features. This is to make this mode of operation easier for users with this intention.
|
||||
|
||||
To take advantage of this additional features you **must** use a **FILE** configuration. Learn about how this works and how to configure this scenario in the [Architecture Documentation.](/docs/serverClientArchitecture.md)
|
||||
|
||||
## CLI Usage
|
||||
|
||||
Running CM from the command line is accomplished with the following command:
|
||||
|
||||
```bash
|
||||
|
||||
node src/index.js run
|
||||
|
||||
```js
|
||||
const config = {
|
||||
operator: {
|
||||
// Username of the reddit user operating this application, used for displaying OP level info/actions in UI
|
||||
//
|
||||
// ENV => OPERATOR
|
||||
// ARG => --operator <name>
|
||||
name: undefined,
|
||||
// An optional name to display who is operating this application in the UI
|
||||
//
|
||||
// ENV => OPERATOR_DISPLAY
|
||||
// ARG => --operator <name>
|
||||
display: undefined,
|
||||
},
|
||||
// Values required to interact with Reddit's API
|
||||
credentials: {
|
||||
// Client ID for your Reddit application
|
||||
//
|
||||
// ENV => CLIENT_ID
|
||||
// ARG => --clientId <id>
|
||||
clientId: undefined,
|
||||
// Client Secret for your Reddit application
|
||||
//
|
||||
// ENV => CLIENT_SECRET
|
||||
// ARG => --clientSecret <secret>
|
||||
clientSecret: undefined,
|
||||
// Redirect URI for your Reddit application
|
||||
//
|
||||
// ENV => REDIRECT_URI
|
||||
// ARG => --redirectUri <uri>
|
||||
redirectUri: undefined,
|
||||
// Access token retrieved from authenticating an account with your Reddit Application
|
||||
//
|
||||
// ENV => ACCESS_TOKEN
|
||||
// ARG => --accessToken <token>
|
||||
accessToken: undefined,
|
||||
// Refresh token retrieved from authenticating an account with your Reddit Application
|
||||
//
|
||||
// ENV => REFRESH_TOKEN
|
||||
// ARG => --refreshToken <token>
|
||||
refreshToken: undefined
|
||||
},
|
||||
logging: {
|
||||
// Minimum level to log at.
|
||||
// Must be one of: error, warn, info, verbose, debug
|
||||
//
|
||||
// ENV => LOG_LEVEL
|
||||
// ARG => --logLevel <level>
|
||||
level: 'verbose',
|
||||
// Absolute path to directory to store rotated logs in.
|
||||
//
|
||||
// Leaving undefined disables rotating logs
|
||||
// Use ENV => true or ARG => --logDir to log to the current directory under /logs folder
|
||||
//
|
||||
// ENV => LOG_DIR
|
||||
// ARG => --logDir [dir]
|
||||
path: undefined,
|
||||
},
|
||||
snoowrap: {
|
||||
// Proxy endpoint to make Snoowrap requests to
|
||||
//
|
||||
// ENV => PROXY
|
||||
// ARG => --proxy <proxyEndpoint>
|
||||
proxy: undefined,
|
||||
// Set Snoowrap to log debug statements. If undefined will debug based on current log level
|
||||
//
|
||||
// ENV => SNOO_DEBUG
|
||||
// ARG => --snooDebug
|
||||
debug: false,
|
||||
},
|
||||
subreddits: {
|
||||
// Names of subreddits for bot to run on
|
||||
//
|
||||
// If undefined bot will run on all subreddits it is a moderated of
|
||||
//
|
||||
// ENV => SUBREDDITS (comma-separated)
|
||||
// ARG => --subreddits <list...>
|
||||
names: undefined,
|
||||
// If true set all subreddits in dry run mode, overriding configurations
|
||||
//
|
||||
// ENV => DRYRUN
|
||||
// ARG => --dryRun
|
||||
dryRun: false,
|
||||
// The default relative url to contextbot wiki page EX https://reddit.com/r/subreddit/wiki/<path>
|
||||
//
|
||||
// ENV => WIKI_CONFIG
|
||||
// ARG => --wikiConfig <path>
|
||||
wikiConfig: 'botconfig/contextbot',
|
||||
// Interval, in seconds, to perform application heartbeat
|
||||
//
|
||||
// ENV => HEARTBEAT
|
||||
// ARG => --heartbeat <sec>
|
||||
heartbeatInterval: 300,
|
||||
},
|
||||
polling: {
|
||||
// If set to true all subreddits polling unmoderated/modqueue with default polling settings will share a request to "r/mod"
|
||||
// otherwise each subreddit will poll its own mod view
|
||||
//
|
||||
// ENV => SHARE_MOD
|
||||
// ARG => --shareMod
|
||||
sharedMod: false,
|
||||
// Default interval, in seconds, to poll activity sources at
|
||||
interval: 30,
|
||||
},
|
||||
web: {
|
||||
// Whether the web server interface should be started
|
||||
// In most cases this does not need to be specified as the application will automatically detect if it is possible to start it --
|
||||
// use this to specify 'cli' if you encounter errors with port/address or are paranoid
|
||||
//
|
||||
// ENV => WEB
|
||||
// ARG => 'node src/index.js run [interface]' -- interface can be 'web' or 'cli'
|
||||
enabled: true,
|
||||
// Set the port for the web interface
|
||||
//
|
||||
// ENV => PORT
|
||||
// ARG => --port <number>
|
||||
port: 8085,
|
||||
session: {
|
||||
// The cache provider for sessions
|
||||
// can be 'memory', 'redis', or a custom config
|
||||
provider: 'memory',
|
||||
// The secret value used to encrypt session data
|
||||
// If provider is persistent (redis) specifying a value here will ensure sessions are valid between application restarts
|
||||
//
|
||||
// If undefined a random string is generated
|
||||
secret: undefined,
|
||||
},
|
||||
// The default log level to filter to in the web interface
|
||||
// If not specified will be same as application log level
|
||||
logLevel: undefined,
|
||||
// Maximum number of log statements to keep in memory for each subreddit
|
||||
maxLogs: 200,
|
||||
},
|
||||
caching: {
|
||||
// The default maximum age of cached data for an Author's history
|
||||
//
|
||||
// ENV => AUTHOR_TTL
|
||||
// ARG => --authorTTL <sec>
|
||||
authorTTL: 60,
|
||||
// The default maximum age of cached usernotes for a subreddit
|
||||
userNotesTTL: 300,
|
||||
// The default maximum age of cached content, retrieved from an external URL or subreddit wiki, used for comments/ban/footer
|
||||
wikiTTL: 300,
|
||||
// The cache provider used for caching reddit API responses and some internal results
|
||||
// can be 'memory', 'redis', or a custom config
|
||||
provider: 'memory'
|
||||
},
|
||||
api: {
|
||||
// The number of API requests remaining at which "slow mode" should be enabled
|
||||
//
|
||||
// ENV => SOFT_LIMT
|
||||
// ARG => --softLimit <limit>
|
||||
softLimit: 250,
|
||||
// The number of API requests remaining at at which all subreddit event polling should be paused
|
||||
//
|
||||
// ENV => HARD_LIMIT
|
||||
// ARG => --hardLimit <limit>
|
||||
hardLimit: 50,
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Run `node src/index.js run help` to get a list of available command line options (denoted by **ARG** above):
|
||||
|
||||
<details>
|
||||
|
||||
```
|
||||
Usage: index [options] [command]
|
||||
|
||||
Options:
|
||||
-h, --help display help for command
|
||||
|
||||
Commands:
|
||||
run [options] [interface] Monitor new activities from configured subreddits.
|
||||
check [options] <activityIdentifier> [type] Run check(s) on a specific activity
|
||||
unmoderated [options] <subreddits...> Run checks on all unmoderated activity in the modqueue
|
||||
help [command] display help for command
|
||||
|
||||
|
||||
Options:
|
||||
-c, --operatorConfig <path> An absolute path to a JSON file to load all parameters from (default: process.env.OPERATOR_CONFIG)
|
||||
-i, --clientId <id> Client ID for your Reddit application (default: process.env.CLIENT_ID)
|
||||
-e, --clientSecret <secret> Client Secret for your Reddit application (default: process.env.CLIENT_SECRET)
|
||||
-a, --accessToken <token> Access token retrieved from authenticating an account with your Reddit Application (default: process.env.ACCESS_TOKEN)
|
||||
-r, --refreshToken <token> Refresh token retrieved from authenticating an account with your Reddit Application (default: process.env.REFRESH_TOKEN)
|
||||
-u, --redirectUri <uri> Redirect URI for your Reddit application (default: process.env.REDIRECT_URI)
|
||||
-t, --sessionSecret <secret> Secret use to encrypt session id/data (default: process.env.SESSION_SECRET || a random string)
|
||||
-s, --subreddits <list...> List of subreddits to run on. Bot will run on all subs it has access to if not defined (default: process.env.SUBREDDITS)
|
||||
-d, --logDir [dir] Absolute path to directory to store rotated logs in. Leaving undefined disables rotating logs (default: process.env.LOG_DIR)
|
||||
-l, --logLevel <level> Minimum level to log at (default: process.env.LOG_LEVEL || verbose)
|
||||
-w, --wikiConfig <path> Relative url to contextbot wiki page EX https://reddit.com/r/subreddit/wiki/<path> (default: process.env.WIKI_CONFIG || 'botconfig/contextbot')
|
||||
--snooDebug Set Snoowrap to debug. If undefined will be on if logLevel='debug' (default: process.env.SNOO_DEBUG)
|
||||
--authorTTL <s> Set the TTL (seconds) for the Author Activities shared cache (default: process.env.AUTHOR_TTL || 60)
|
||||
--heartbeat <s> Interval, in seconds, between heartbeat checks. (default: process.env.HEARTBEAT || 300)
|
||||
--softLimit <limit> When API limit remaining (600/10min) is lower than this subreddits will have SLOW MODE enabled (default: process.env.SOFT_LIMIT || 250)
|
||||
--hardLimit <limit> When API limit remaining (600/10min) is lower than this all subreddit polling will be paused until api limit reset (default: process.env.SOFT_LIMIT || 250)
|
||||
--dryRun Set all subreddits in dry run mode, overriding configurations (default: process.env.DRYRUN || false)
|
||||
--proxy <proxyEndpoint> Proxy Snoowrap requests through this endpoint (default: process.env.PROXY)
|
||||
--operator <name...> Username(s) of the reddit user(s) operating this application, used for displaying OP level info/actions in UI (default: process.env.OPERATOR)
|
||||
--operatorDisplay <name> An optional name to display who is operating this application in the UI (default: process.env.OPERATOR_DISPLAY || Anonymous)
|
||||
-p, --port <port> Port for web server to listen on (default: process.env.PORT || 8085)
|
||||
-q, --shareMod If enabled then all subreddits using the default settings to poll "unmoderated" or "modqueue" will retrieve results from a shared request to /r/mod (default: process.env.SHARE_MOD || false)
|
||||
-h, --help display help for command
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
# Example Configurations
|
||||
|
||||
## Minimum Config
|
||||
@@ -214,14 +121,18 @@ Below are examples of the minimum required config to run the application using a
|
||||
Using **FILE**
|
||||
<details>
|
||||
|
||||
```json
|
||||
```json5
|
||||
{
|
||||
"credentials": {
|
||||
"clientId": "f4b4df1c7b2",
|
||||
"clientSecret": "34v5q1c56ub",
|
||||
"refreshToken": "34_f1w1v4",
|
||||
"accessToken": "p75_1c467b2"
|
||||
}
|
||||
"bots": [
|
||||
{
|
||||
"credentials": {
|
||||
"clientId": "f4b4df1c7b2",
|
||||
"clientSecret": "34v5q1c56ub",
|
||||
"refreshToken": "34_f1w1v4",
|
||||
"accessToken": "p75_1c467b2"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
@@ -252,17 +163,15 @@ node src/index.js run --clientId=f4b4df1c7b2 --clientSecret=34v5q1c56ub --refres
|
||||
|
||||
## Using Config Overrides
|
||||
|
||||
Using all three configs together:
|
||||
An example of using multiple configuration levels together IE all are provided to the application:
|
||||
|
||||
**FILE**
|
||||
<details>
|
||||
|
||||
```json
|
||||
{
|
||||
"credentials": {
|
||||
"clientId": "f4b4df1c7b2",
|
||||
"refreshToken": "34_f1w1v4",
|
||||
"accessToken": "p75_1c467b2"
|
||||
"logging": {
|
||||
"level": "debug"
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -275,9 +184,10 @@ Using all three configs together:
|
||||
|
||||
```
|
||||
CLIENT_SECRET=34v5q1c56ub
|
||||
REFRESH_TOKEN=34_f1w1v4
|
||||
ACCESS_TOKEN=p75_1c467b2
|
||||
SUBREDDITS=sub1,sub2,sub3
|
||||
PORT=9008
|
||||
LOG_LEVEL=DEBUG
|
||||
```
|
||||
|
||||
</details>
|
||||
@@ -287,12 +197,12 @@ LOG_LEVEL=DEBUG
|
||||
<details>
|
||||
|
||||
```
|
||||
node src/index.js run --subreddits=sub1
|
||||
node src/index.js run --subreddits=sub1 --clientId=34v5q1c56ub
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
Produces these variables at runtime for the application:
|
||||
When all three are used together they produce these variables at runtime for the application:
|
||||
|
||||
```
|
||||
clientId: f4b4df1c7b2
|
||||
@@ -304,9 +214,55 @@ port: 9008
|
||||
log level: debug
|
||||
```
|
||||
|
||||
## Configuring Client for Many Instances
|
||||
|
||||
See the [Architecture Docs](/docs/serverClientArchitecture.md) for more information.
|
||||
|
||||
<details>
|
||||
|
||||
```json5
|
||||
{
|
||||
"bots": [
|
||||
{
|
||||
"credentials": {
|
||||
"clientId": "f4b4df1c7b2",
|
||||
"clientSecret": "34v5q1c56ub",
|
||||
"refreshToken": "34_f1w1v4",
|
||||
"accessToken": "p75_1c467b2"
|
||||
}
|
||||
}
|
||||
],
|
||||
"web": {
|
||||
"credentials": {
|
||||
"clientId": "f4b4df1c7b2",
|
||||
"clientSecret": "34v5q1c56ub",
|
||||
"redirectUri": "http://localhost:8085/callback"
|
||||
},
|
||||
"clients": [
|
||||
// server application running on this same CM instance
|
||||
{
|
||||
"host": "localhost:8095",
|
||||
"secret": "localSecret"
|
||||
},
|
||||
// a server application running somewhere else
|
||||
{
|
||||
// api endpoint and port
|
||||
"host": "mySecondContextMod.com:8095",
|
||||
"secret": "anotherSecret"
|
||||
}
|
||||
]
|
||||
},
|
||||
"api": {
|
||||
"secret": "localSecret",
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
# Cache Configuration
|
||||
|
||||
RCB implements two caching backend **providers**. By default all providers use `memory`:
|
||||
CM implements two caching backend **providers**. By default all providers use `memory`:
|
||||
|
||||
* `memory` -- in-memory (non-persistent) backend
|
||||
* `redis` -- [Redis](https://redis.io/) backend
|
||||
|
||||
BIN
docs/screenshots/editor.jpg
Normal file
BIN
docs/screenshots/editor.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 125 KiB |
BIN
docs/screenshots/oauth-invite.jpg
Normal file
BIN
docs/screenshots/oauth-invite.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 148 KiB |
BIN
docs/screenshots/oauth.jpg
Normal file
BIN
docs/screenshots/oauth.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 226 KiB |
BIN
docs/screenshots/subredditStatus.jpg
Normal file
BIN
docs/screenshots/subredditStatus.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 479 KiB |
71
docs/serverClientArchitecture.md
Normal file
71
docs/serverClientArchitecture.md
Normal file
@@ -0,0 +1,71 @@
|
||||
|
||||
# Overview
|
||||
|
||||
ContextMod's high-level functionality is separated into two **independently run** applications.
|
||||
|
||||
Each application consists of an [Express](https://expressjs.com/) web server that executes the core logic for that application and communicates via HTTP API calls:
|
||||
|
||||
Applications:
|
||||
|
||||
* **Server** -- Responsible for **running the bots** and providing an API to retrieve information on and interact with them EX start/stop bot, reload config, retrieve operational status, etc.
|
||||
* **Client** -- Responsible for serving the **web interface** and handling the bot oauth authentication flow between operators and moderators.
|
||||
|
||||
Both applications operate independently and can be run individually. The determination for which is run is made by environmental variables, operator config, or cli arguments.
|
||||
|
||||
# Authentication
|
||||
|
||||
Communication between the applications is secured using [Json Web Tokens](https://github.com/mikenicholson/passport-jwt) signed/encoded by a **shared secret** (HMAC algorithm). The secret is defined in the operator configuration.
|
||||
|
||||
# Configuration
|
||||
|
||||
## Default Mode
|
||||
|
||||
**ContextMod is designed to operate in a "monolith" mode by default.**
|
||||
|
||||
This is done by assuming that when configuration is provided by **environmental variables or CLI arguments** the user's intention is to run the client/server together with only one bot, as if ContextMod is a monolith application. When using these configuration types the same values are parsed to both the server/client to ensure interoperability/transparent usage for the operator. Some examples of this in the **operator configuration**:
|
||||
|
||||
* The **shared secret** for both client/secret cannot be defined using env/cli -- at runtime a random string is generated that is set for the value `secret` on both the `api` and `web` properties.
|
||||
* The `bots` array cannot be defined using env/cli -- a single entry is generated by the configuration parser using the combined values provided from env/cli
|
||||
* The `PORT` env/cli argument only applies to the `client` wev server to guarantee the default port for the `server` web server is used (so the `client` can connect to `server`)
|
||||
|
||||
**The end result of this default behavior is that an operator who does not care about running multiple CM instances does not need to know or understand anything about the client/server architecture.**
|
||||
|
||||
## Server
|
||||
|
||||
To run a ContextMod instance as **sever only (headless):**
|
||||
|
||||
* Config file -- define top-level `"mode":"server"`
|
||||
* ENV -- `MODE=server`
|
||||
* CLI - `node src/index.js run server`
|
||||
|
||||
The relevant sections of the **operator configuration** for the **Server** are:
|
||||
|
||||
* [`operator.name`](https://json-schema.app/view/%23/%23%2Fproperties%2Foperator?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FOperatorConfig.json) -- Define the reddit users who will be able to have full access to this server regardless of moderator status
|
||||
* `api`
|
||||
|
||||
### [`api`](https://json-schema.app/view/%23/%23%2Fproperties%2Fapi?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FOperatorConfig.json)
|
||||
|
||||
* `port` - The port the Server will listen on for incoming api requests. Cannot be the same as the Client (when running on the same host)
|
||||
* `secret` - The **shared secret** that will be used to verify incoming api requests coming from an authenticated Client.
|
||||
* `friendly` - An optional string to identify this **Server** on the client. It is recommended to provide this otherwise it will default to `host:port`
|
||||
|
||||
## Client
|
||||
|
||||
To run a ContextMod instance as **client only:**
|
||||
|
||||
* Config file -- define top-level `"mode":"client"`
|
||||
* ENV -- `MODE=client`
|
||||
* CLI - `node src/index.js run client`
|
||||
|
||||
### [`web`](https://json-schema.app/view/%23/%23%2Fproperties%2Fweb?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FOperatorConfig.json)
|
||||
|
||||
In the **operator configuration** the top-level `web` property defines the configuration for the **Client** application.
|
||||
|
||||
* `web.credentials` -- Defines the reddit oauth credentials used to authenticate users for the web interface
|
||||
* Must contain a `redirectUri` property to work
|
||||
* Credentials are parsed from ENV/CLI credentials when not specified (IE will be same as default bot)
|
||||
* `web.operators` -- Parsed from `operator.name` if not specified IE will use same users as defined for the bot operators
|
||||
* `port` -- the port the web interface will be served from, defaults to `8085`
|
||||
* `clients` -- An array of `BotConnection` objects that specify what **Server** instances the web interface should connect to. Each object should have:
|
||||
* `host` -- The URL specifying where the server api is listening ie `localhost:8085`
|
||||
* `secret` -- The **shared secret** used to sign api calls. **This should be the same as `api.secret` on the server being connected to.**
|
||||
1908
package-lock.json
generated
1908
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
31
package.json
31
package.json
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "redditcontextbot",
|
||||
"version": "1.0.0",
|
||||
"version": "0.5.1",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no tests installed\" && exit 1",
|
||||
"build": "tsc",
|
||||
"start": "node server.js",
|
||||
"start": "node src/index.js run",
|
||||
"guard": "ts-auto-guard src/JsonConfig.ts",
|
||||
"schema": "npm run -s schema-app & npm run -s schema-ruleset & npm run -s schema-rule & npm run -s schema-action & npm run -s schema-config",
|
||||
"schema-app": "typescript-json-schema tsconfig.json JSONConfig --out src/Schema/App.json --required --tsNodeRegister --refs",
|
||||
@@ -26,14 +26,18 @@
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@awaitjs/express": "^0.8.0",
|
||||
"@stdlib/regexp-regexp": "^0.0.6",
|
||||
"ajv": "^7.2.4",
|
||||
"async": "^3.2.0",
|
||||
"autolinker": "^3.14.3",
|
||||
"body-parser": "^1.19.0",
|
||||
"cache-manager": "^3.4.4",
|
||||
"cache-manager-redis-store": "^2.0.0",
|
||||
"commander": "^8.0.0",
|
||||
"cookie-parser": "^1.3.5",
|
||||
"dayjs": "^1.10.5",
|
||||
"deepmerge": "^4.2.2",
|
||||
"delimiter-stream": "^3.0.1",
|
||||
"ejs": "^3.1.6",
|
||||
"env-cmd": "^10.1.0",
|
||||
"es6-error": "^4.1.1",
|
||||
@@ -43,22 +47,36 @@
|
||||
"express-socket.io-session": "^1.3.5",
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
"fuse.js": "^6.4.6",
|
||||
"got": "^11.8.2",
|
||||
"he": "^1.2.0",
|
||||
"http-proxy": "^1.18.1",
|
||||
"js-yaml": "^4.1.0",
|
||||
"json5": "^2.2.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"lodash": "^4.17.21",
|
||||
"lru-cache": "^6.0.0",
|
||||
"monaco-editor": "^0.27.0",
|
||||
"mustache": "^4.2.0",
|
||||
"node-fetch": "^2.6.1",
|
||||
"normalize-url": "^6.1.0",
|
||||
"object-hash": "^2.2.0",
|
||||
"p-event": "^4.2.0",
|
||||
"passport": "^0.4.1",
|
||||
"passport-custom": "^1.1.1",
|
||||
"passport-jwt": "^4.0.0",
|
||||
"pretty-print-json": "^1.0.3",
|
||||
"safe-stable-stringify": "^1.1.1",
|
||||
"snoostorm": "^1.5.2",
|
||||
"snoowrap": "^1.23.0",
|
||||
"socket.io": "^4.1.3",
|
||||
"tcp-port-used": "^1.0.2",
|
||||
"triple-beam": "^1.3.0",
|
||||
"typescript": "^4.3.4",
|
||||
"webhook-discord": "^3.7.7",
|
||||
"winston": "FoxxMD/winston#fbab8de969ecee578981c77846156c7f43b5f01e",
|
||||
"winston-daily-rotate-file": "^4.5.5",
|
||||
"winston-duplex": "^0.1.1",
|
||||
"winston-transport": "^4.4.0",
|
||||
"zlib": "^1.0.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -66,20 +84,25 @@
|
||||
"@types/async": "^3.2.7",
|
||||
"@types/cache-manager": "^3.4.2",
|
||||
"@types/cache-manager-redis-store": "^2.0.0",
|
||||
"@types/cookie-parser": "^1.4.2",
|
||||
"@types/express": "^4.17.13",
|
||||
"@types/express-session": "^1.17.4",
|
||||
"@types/express-socket.io-session": "^1.3.6",
|
||||
"@types/he": "^1.1.1",
|
||||
"@types/http-proxy": "^1.17.7",
|
||||
"@types/js-yaml": "^4.0.1",
|
||||
"@types/jsonwebtoken": "^8.5.4",
|
||||
"@types/lodash": "^4.14.171",
|
||||
"@types/lru-cache": "^5.1.1",
|
||||
"@types/memory-cache": "^0.2.1",
|
||||
"@types/minimist": "^1.2.1",
|
||||
"@types/mustache": "^4.1.1",
|
||||
"@types/node": "^15.6.1",
|
||||
"@types/node-fetch": "^2.5.10",
|
||||
"@types/object-hash": "^2.1.0",
|
||||
"@types/pako": "^1.0.1",
|
||||
"@types/passport": "^1.0.7",
|
||||
"@types/passport-jwt": "^3.0.6",
|
||||
"@types/tcp-port-used": "^1.0.0",
|
||||
"@types/triple-beam": "^1.3.2",
|
||||
"ts-auto-guard": "*",
|
||||
"ts-json-schema-generator": "^0.93.0",
|
||||
"typescript-json-schema": "^0.50.1"
|
||||
|
||||
@@ -8,26 +8,31 @@ import {Logger} from "winston";
|
||||
import {UserNoteAction, UserNoteActionJson} from "./UserNoteAction";
|
||||
import ApproveAction, {ApproveActionConfig} from "./ApproveAction";
|
||||
import BanAction, {BanActionJson} from "./BanAction";
|
||||
import {MessageAction, MessageActionJson} from "./MessageAction";
|
||||
import {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import Snoowrap from "snoowrap";
|
||||
|
||||
export function actionFactory
|
||||
(config: ActionJson, logger: Logger, subredditName: string): Action {
|
||||
(config: ActionJson, logger: Logger, subredditName: string, resources: SubredditResources, client: Snoowrap): Action {
|
||||
switch (config.kind) {
|
||||
case 'comment':
|
||||
return new CommentAction({...config as CommentActionJson, logger, subredditName});
|
||||
return new CommentAction({...config as CommentActionJson, logger, subredditName, resources, client});
|
||||
case 'lock':
|
||||
return new LockAction({...config, logger, subredditName});
|
||||
return new LockAction({...config, logger, subredditName, resources, client});
|
||||
case 'remove':
|
||||
return new RemoveAction({...config, logger, subredditName});
|
||||
return new RemoveAction({...config, logger, subredditName, resources, client});
|
||||
case 'report':
|
||||
return new ReportAction({...config as ReportActionJson, logger, subredditName});
|
||||
return new ReportAction({...config as ReportActionJson, logger, subredditName, resources, client});
|
||||
case 'flair':
|
||||
return new FlairAction({...config as FlairActionJson, logger, subredditName});
|
||||
return new FlairAction({...config as FlairActionJson, logger, subredditName, resources, client});
|
||||
case 'approve':
|
||||
return new ApproveAction({...config as ApproveActionConfig, logger, subredditName});
|
||||
return new ApproveAction({...config as ApproveActionConfig, logger, subredditName, resources, client});
|
||||
case 'usernote':
|
||||
return new UserNoteAction({...config as UserNoteActionJson, logger, subredditName});
|
||||
return new UserNoteAction({...config as UserNoteActionJson, logger, subredditName, resources, client});
|
||||
case 'ban':
|
||||
return new BanAction({...config as BanActionJson, logger, subredditName});
|
||||
return new BanAction({...config as BanActionJson, logger, subredditName, resources, client});
|
||||
case 'message':
|
||||
return new MessageAction({...config as MessageActionJson, logger, subredditName, resources, client});
|
||||
default:
|
||||
throw new Error('rule "kind" was not recognized.');
|
||||
}
|
||||
|
||||
@@ -2,23 +2,33 @@ import {ActionJson, ActionConfig} from "./index";
|
||||
import Action from "./index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {ActionProcessResult} from "../Common/interfaces";
|
||||
|
||||
export class ApproveAction extends Action {
|
||||
getKind() {
|
||||
return 'Approve';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
//snoowrap typing issue, thinks comments can't be locked
|
||||
// @ts-ignore
|
||||
if (item.approved) {
|
||||
this.logger.warn('Item is already approved');
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Item is already approved'
|
||||
}
|
||||
}
|
||||
if (!dryRun) {
|
||||
// @ts-ignore
|
||||
await item.approve();
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import Action from "./index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {Footer} from "../Common/interfaces";
|
||||
import {ActionProcessResult, Footer} from "../Common/interfaces";
|
||||
|
||||
export class BanAction extends Action {
|
||||
|
||||
@@ -33,7 +33,7 @@ export class BanAction extends Action {
|
||||
return 'Ban';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = this.message === undefined ? undefined : await this.resources.getContent(this.message, item.subreddit);
|
||||
const renderedBody = content === undefined ? undefined : await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
@@ -58,6 +58,11 @@ export class BanAction extends Action {
|
||||
duration: this.duration
|
||||
});
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
result: `Banned ${item.author.name} ${durText}${this.reason !== undefined ? ` (${this.reason})` : ''}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,8 +2,9 @@ import Action, {ActionJson, ActionOptions} from "./index";
|
||||
import {Comment} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
|
||||
import {ActionProcessResult, Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {truncateStringToLength} from "../util";
|
||||
|
||||
export class CommentAction extends Action {
|
||||
content: string;
|
||||
@@ -32,7 +33,7 @@ export class CommentAction extends Action {
|
||||
return 'Comment';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = await this.resources.getContent(this.content, item.subreddit);
|
||||
const body = await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
@@ -44,7 +45,11 @@ export class CommentAction extends Action {
|
||||
|
||||
if(item.archived) {
|
||||
this.logger.warn('Cannot comment because Item is archived');
|
||||
return;
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Cannot comment because Item is archived'
|
||||
};
|
||||
}
|
||||
let reply: Comment;
|
||||
if(!dryRun) {
|
||||
@@ -62,6 +67,19 @@ export class CommentAction extends Action {
|
||||
// @ts-ignore
|
||||
await reply.distinguish({sticky: this.sticky});
|
||||
}
|
||||
let modifiers = [];
|
||||
if(this.distinguish) {
|
||||
modifiers.push('Distinguished');
|
||||
}
|
||||
if(this.sticky) {
|
||||
modifiers.push('Stickied');
|
||||
}
|
||||
const modifierStr = modifiers.length === 0 ? '' : `[${modifiers.join(' | ')}]`;
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
result: `${modifierStr}${this.lock ? ' - Locked Author\'s Activity - ' : ''}${truncateStringToLength(100)(body)}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,24 +2,34 @@ import {ActionJson, ActionConfig} from "./index";
|
||||
import Action from "./index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {ActionProcessResult} from "../Common/interfaces";
|
||||
|
||||
export class LockAction extends Action {
|
||||
getKind() {
|
||||
return 'Lock';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
//snoowrap typing issue, thinks comments can't be locked
|
||||
// @ts-ignore
|
||||
if (item.locked) {
|
||||
this.logger.warn('Item is already locked');
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Item is already locked'
|
||||
};
|
||||
}
|
||||
if (!dryRun) {
|
||||
//snoowrap typing issue, thinks comments can't be locked
|
||||
// @ts-ignore
|
||||
await item.lock();
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
145
src/Action/MessageAction.ts
Normal file
145
src/Action/MessageAction.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
import Action, {ActionJson, ActionOptions} from "./index";
|
||||
import {Comment, ComposeMessageParams} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {ActionProcessResult, Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {
|
||||
asSubmission,
|
||||
boolToString,
|
||||
isSubmission,
|
||||
parseRedditEntity,
|
||||
REDDIT_ENTITY_REGEX_URL,
|
||||
truncateStringToLength
|
||||
} from "../util";
|
||||
import SimpleError from "../Utils/SimpleError";
|
||||
|
||||
export class MessageAction extends Action {
|
||||
content: string;
|
||||
lock: boolean = false;
|
||||
sticky: boolean = false;
|
||||
distinguish: boolean = false;
|
||||
footer?: false | string;
|
||||
|
||||
title?: string;
|
||||
to?: string;
|
||||
asSubreddit: boolean;
|
||||
|
||||
constructor(options: MessageActionOptions) {
|
||||
super(options);
|
||||
const {
|
||||
content,
|
||||
asSubreddit,
|
||||
title,
|
||||
footer,
|
||||
to,
|
||||
} = options;
|
||||
this.to = to;
|
||||
this.footer = footer;
|
||||
this.content = content;
|
||||
this.asSubreddit = asSubreddit;
|
||||
this.title = title;
|
||||
}
|
||||
|
||||
getKind() {
|
||||
return 'Message';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = await this.resources.getContent(this.content);
|
||||
const body = await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
|
||||
const footer = await this.resources.generateFooter(item, this.footer);
|
||||
|
||||
const renderedContent = `${body}${footer}`;
|
||||
|
||||
let recipient = item.author.name;
|
||||
if(this.to !== undefined) {
|
||||
// parse to value
|
||||
try {
|
||||
const entityData = parseRedditEntity(this.to);
|
||||
if(entityData.type === 'user') {
|
||||
recipient = entityData.name;
|
||||
} else {
|
||||
recipient = `/r/${entityData.name}`;
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error(`'to' field for message was not in a valid format. See ${REDDIT_ENTITY_REGEX_URL} for valid examples`);
|
||||
this.logger.error(err);
|
||||
err.logged = true;
|
||||
throw err;
|
||||
}
|
||||
if(recipient.includes('/r/') && this.asSubreddit) {
|
||||
throw new SimpleError(`Cannot send a message as a subreddit to another subreddit. Requested recipient: ${recipient}`);
|
||||
}
|
||||
}
|
||||
|
||||
const msgOpts: ComposeMessageParams = {
|
||||
to: recipient,
|
||||
text: renderedContent,
|
||||
// @ts-ignore
|
||||
fromSubreddit: this.asSubreddit ? await item.subreddit.fetch() : undefined,
|
||||
subject: this.title || `Concerning your ${isSubmission(item) ? 'Submission' : 'Comment'}`,
|
||||
};
|
||||
|
||||
const msgPreview = `\r\n
|
||||
TO: ${recipient}\r\n
|
||||
Subject: ${msgOpts.subject}\r\n
|
||||
Sent As Modmail: ${boolToString(this.asSubreddit)}\r\n\r\n
|
||||
${renderedContent}`;
|
||||
|
||||
this.logger.verbose(`Message Preview => \r\n ${msgPreview}`);
|
||||
|
||||
if (!dryRun) {
|
||||
await this.client.composeMessage(msgOpts);
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
result: truncateStringToLength(200)(msgPreview)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface MessageActionConfig extends RequiredRichContent, Footer {
|
||||
/**
|
||||
* Should this message be sent from modmail (as the subreddit) or as the bot user?
|
||||
* */
|
||||
asSubreddit: boolean
|
||||
|
||||
/**
|
||||
* Entity to send message to.
|
||||
*
|
||||
* If not present Message be will sent to the Author of the Activity being checked.
|
||||
*
|
||||
* Valid formats:
|
||||
*
|
||||
* * `aUserName` -- send to /u/aUserName
|
||||
* * `u/aUserName` -- send to /u/aUserName
|
||||
* * `r/aSubreddit` -- sent to modmail of /r/aSubreddit
|
||||
*
|
||||
* **Note:** Reddit does not support sending a message AS a subreddit TO another subreddit
|
||||
*
|
||||
* @pattern ^\s*(\/[ru]\/|[ru]\/)*(\w+)*\s*$
|
||||
* @examples ["aUserName","u/aUserName","r/aSubreddit"]
|
||||
* */
|
||||
to?: string
|
||||
|
||||
/**
|
||||
* The title of the message
|
||||
*
|
||||
* If not specified will be defaulted to `Concerning your [Submission/Comment]`
|
||||
* */
|
||||
title?: string
|
||||
}
|
||||
|
||||
export interface MessageActionOptions extends MessageActionConfig, ActionOptions {
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a private message to the Author of the Activity.
|
||||
* */
|
||||
export interface MessageActionJson extends MessageActionConfig, ActionJson {
|
||||
kind: 'message'
|
||||
}
|
||||
@@ -3,24 +3,33 @@ import Action from "./index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {activityIsRemoved} from "../Utils/SnoowrapUtils";
|
||||
import {ActionProcessResult} from "../Common/interfaces";
|
||||
|
||||
export class RemoveAction extends Action {
|
||||
getKind() {
|
||||
return 'Remove';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
// issue with snoowrap typings, doesn't think prop exists on Submission
|
||||
// @ts-ignore
|
||||
if (activityIsRemoved(item)) {
|
||||
this.logger.warn('Item is already removed');
|
||||
return;
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Item is already removed',
|
||||
}
|
||||
}
|
||||
if (!dryRun) {
|
||||
// @ts-ignore
|
||||
await item.remove();
|
||||
}
|
||||
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {truncateStringToLength} from "../util";
|
||||
import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {RichContent} from "../Common/interfaces";
|
||||
import {ActionProcessResult, RichContent} from "../Common/interfaces";
|
||||
|
||||
// https://www.reddit.com/dev/api/oauth#POST_api_report
|
||||
// denotes 100 characters maximum
|
||||
@@ -23,7 +23,7 @@ export class ReportAction extends Action {
|
||||
return 'Report';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = await this.resources.getContent(this.content, item.subreddit);
|
||||
const renderedContent = await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
@@ -33,6 +33,12 @@ export class ReportAction extends Action {
|
||||
// @ts-ignore
|
||||
await item.report({reason: truncatedContent});
|
||||
}
|
||||
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
result: truncatedContent
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import {SubmissionActionConfig} from "./index";
|
||||
import Action, {ActionJson, ActionOptions} from "../index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../../Rule";
|
||||
import {ActionProcessResult} from "../../Common/interfaces";
|
||||
|
||||
export class FlairAction extends Action {
|
||||
text: string;
|
||||
@@ -20,7 +21,17 @@ export class FlairAction extends Action {
|
||||
return 'Flair';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
let flairParts = [];
|
||||
if(this.text !== '') {
|
||||
flairParts.push(`Text: ${this.text}`);
|
||||
}
|
||||
if(this.css !== '') {
|
||||
flairParts.push(`CSS: ${this.css}`);
|
||||
}
|
||||
const flairSummary = flairParts.length === 0 ? 'No flair (unflaired)' : flairParts.join(' | ');
|
||||
this.logger.verbose(flairSummary);
|
||||
if (item instanceof Submission) {
|
||||
if(!this.dryRun) {
|
||||
// @ts-ignore
|
||||
@@ -28,6 +39,16 @@ export class FlairAction extends Action {
|
||||
}
|
||||
} else {
|
||||
this.logger.warn('Cannot flair Comment');
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Cannot flair Comment',
|
||||
}
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
result: flairSummary
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.SubmissionAction = void 0;
|
||||
const index_1 = __importDefault(require("../index"));
|
||||
class SubmissionAction extends index_1.default {
|
||||
}
|
||||
exports.SubmissionAction = SubmissionAction;
|
||||
//# sourceMappingURL=index.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":";;;;;;AAAA,qDAA8C;AAE9C,MAAsB,gBAAiB,SAAQ,eAAM;CAEpD;AAFD,4CAEC"}
|
||||
@@ -5,6 +5,7 @@ import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {UserNote, UserNoteJson} from "../Subreddit/UserNotes";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {ActionProcessResult} from "../Common/interfaces";
|
||||
|
||||
|
||||
export class UserNoteAction extends Action {
|
||||
@@ -24,7 +25,7 @@ export class UserNoteAction extends Action {
|
||||
return 'User Note';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = await this.resources.getContent(this.content, item.subreddit);
|
||||
const renderedContent = await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
@@ -35,7 +36,11 @@ export class UserNoteAction extends Action {
|
||||
const existingNote = notes.find((x) => x.link.includes(item.id));
|
||||
if (existingNote) {
|
||||
this.logger.info(`Will not add note because one already exists for this Activity (${existingNote.time.local().format()}) and allowDuplicate=false`);
|
||||
return;
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: `Will not add note because one already exists for this Activity (${existingNote.time.local().format()}) and allowDuplicate=false`
|
||||
};
|
||||
}
|
||||
}
|
||||
if (!dryRun) {
|
||||
@@ -43,6 +48,11 @@ export class UserNoteAction extends Action {
|
||||
} else if (!await this.resources.userNotes.warningExists(this.type)) {
|
||||
this.logger.warn(`UserNote type '${this.type}' does not exist. If you meant to use this please add it through Toolbox first.`);
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
dryRun,
|
||||
result: `(${this.type}) ${renderedContent}`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,22 +1,28 @@
|
||||
import {Comment, Submission} from "snoowrap";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {Logger} from "winston";
|
||||
import {RuleResult} from "../Rule";
|
||||
import ResourceManager, {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import {ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
|
||||
import {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import {ActionProcessResult, ActionResult, ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
|
||||
import Author, {AuthorOptions} from "../Author/Author";
|
||||
import {isItem} from "../Utils/SnoowrapUtils";
|
||||
import {mergeArr} from "../util";
|
||||
import LoggedError from "../Utils/LoggedError";
|
||||
|
||||
export abstract class Action {
|
||||
name?: string;
|
||||
logger: Logger;
|
||||
resources: SubredditResources;
|
||||
client: Snoowrap
|
||||
authorIs: AuthorOptions;
|
||||
itemIs: TypedActivityStates;
|
||||
dryRun: boolean;
|
||||
enabled: boolean;
|
||||
|
||||
constructor(options: ActionOptions) {
|
||||
const {
|
||||
enable = true,
|
||||
name = this.getKind(),
|
||||
resources,
|
||||
client,
|
||||
logger,
|
||||
subredditName,
|
||||
dryRun = false,
|
||||
@@ -29,8 +35,10 @@ export abstract class Action {
|
||||
|
||||
this.name = name;
|
||||
this.dryRun = dryRun;
|
||||
this.resources = ResourceManager.get(subredditName) as SubredditResources;
|
||||
this.logger = logger.child({labels: [`Action ${this.getActionUniqueName()}`]});
|
||||
this.enabled = enable;
|
||||
this.resources = resources;
|
||||
this.client = client;
|
||||
this.logger = logger.child({labels: [`Action ${this.getActionUniqueName()}`]}, mergeArr);
|
||||
|
||||
this.authorIs = {
|
||||
exclude: exclude.map(x => new Author(x)),
|
||||
@@ -46,52 +54,68 @@ export abstract class Action {
|
||||
return this.name === this.getKind() ? this.getKind() : `${this.getKind()} - ${this.name}`;
|
||||
}
|
||||
|
||||
async handle(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async handle(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
let actionRun = false;
|
||||
const [itemPass, crit] = isItem(item, this.itemIs, this.logger);
|
||||
if (!itemPass) {
|
||||
this.logger.verbose(`Activity did not pass 'itemIs' test, Action not run`);
|
||||
return;
|
||||
}
|
||||
const authorRun = async () => {
|
||||
|
||||
let actRes: ActionResult = {
|
||||
kind: this.getKind(),
|
||||
name: this.getActionUniqueName(),
|
||||
run: false,
|
||||
dryRun,
|
||||
success: false,
|
||||
};
|
||||
try {
|
||||
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
|
||||
if (!itemPass) {
|
||||
this.logger.verbose(`Activity did not pass 'itemIs' test, Action not run`);
|
||||
actRes.runReason = `Activity did not pass 'itemIs' test, Action not run`;
|
||||
return actRes;
|
||||
}
|
||||
if (this.authorIs.include !== undefined && this.authorIs.include.length > 0) {
|
||||
for (const auth of this.authorIs.include) {
|
||||
if (await this.resources.testAuthorCriteria(item, auth)) {
|
||||
await this.process(item, ruleResults, runtimeDryrun);
|
||||
return true;
|
||||
actRes.run = true;
|
||||
const results = await this.process(item, ruleResults, runtimeDryrun);
|
||||
return {...actRes, ...results};
|
||||
}
|
||||
}
|
||||
this.logger.verbose('Inclusive author criteria not matched, Action not run');
|
||||
return false;
|
||||
}
|
||||
if (!actionRun && this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
|
||||
actRes.runReason = 'Inclusive author criteria not matched';
|
||||
return actRes;
|
||||
} else if (this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
|
||||
for (const auth of this.authorIs.exclude) {
|
||||
if (await this.resources.testAuthorCriteria(item, auth, false)) {
|
||||
await this.process(item, ruleResults, runtimeDryrun);
|
||||
return true;
|
||||
actRes.run = true;
|
||||
const results = await this.process(item, ruleResults, runtimeDryrun);
|
||||
return {...actRes, ...results};
|
||||
}
|
||||
}
|
||||
this.logger.verbose('Exclusive author criteria not matched, Action not run');
|
||||
return false;
|
||||
actRes.runReason = 'Exclusive author criteria not matched';
|
||||
return actRes;
|
||||
}
|
||||
return null;
|
||||
|
||||
actRes.run = true;
|
||||
const results = await this.process(item, ruleResults, runtimeDryrun);
|
||||
return {...actRes, ...results};
|
||||
} catch (err) {
|
||||
if(!(err instanceof LoggedError)) {
|
||||
this.logger.error(`Encountered error while running`, err);
|
||||
}
|
||||
actRes.success = false;
|
||||
actRes.result = err.message;
|
||||
return actRes;
|
||||
}
|
||||
const authorRunResults = await authorRun();
|
||||
if (null === authorRunResults) {
|
||||
await this.process(item, ruleResults, runtimeDryrun);
|
||||
} else if (!authorRunResults) {
|
||||
return;
|
||||
}
|
||||
this.logger.verbose(`${dryRun ? 'DRYRUN - ' : ''}Done`);
|
||||
}
|
||||
|
||||
abstract process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryun?: boolean): Promise<void>;
|
||||
abstract process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryun?: boolean): Promise<ActionProcessResult>;
|
||||
}
|
||||
|
||||
export interface ActionOptions extends ActionConfig {
|
||||
logger: Logger;
|
||||
subredditName: string;
|
||||
resources: SubredditResources
|
||||
client: Snoowrap
|
||||
}
|
||||
|
||||
export interface ActionConfig extends ChecksActivityState {
|
||||
@@ -124,13 +148,21 @@ export interface ActionConfig extends ChecksActivityState {
|
||||
*
|
||||
* */
|
||||
itemIs?: TypedActivityStates
|
||||
|
||||
/**
|
||||
* If set to `false` the Action will not be run
|
||||
*
|
||||
* @default true
|
||||
* @examples [true]
|
||||
* */
|
||||
enable?: boolean
|
||||
}
|
||||
|
||||
export interface ActionJson extends ActionConfig {
|
||||
/**
|
||||
* The type of action that will be performed
|
||||
*/
|
||||
kind: 'comment' | 'lock' | 'remove' | 'report' | 'approve' | 'ban' | 'flair' | 'usernote'
|
||||
kind: 'comment' | 'lock' | 'remove' | 'report' | 'approve' | 'ban' | 'flair' | 'usernote' | 'message'
|
||||
}
|
||||
|
||||
export const isActionJson = (obj: object): obj is ActionJson => {
|
||||
|
||||
478
src/App.ts
478
src/App.ts
@@ -1,462 +1,94 @@
|
||||
import Snoowrap, {Subreddit} from "snoowrap";
|
||||
import {Manager} from "./Subreddit/Manager";
|
||||
import winston, {Logger} from "winston";
|
||||
import {
|
||||
argParseInt,
|
||||
createRetryHandler, formatNumber,
|
||||
labelledFormat, logLevels,
|
||||
parseBool, parseDuration,
|
||||
parseFromJsonOrYamlToObject,
|
||||
parseSubredditName,
|
||||
sleep
|
||||
} from "./util";
|
||||
import pEvent from "p-event";
|
||||
import EventEmitter from "events";
|
||||
import CacheManager from './Subreddit/SubredditResources';
|
||||
import dayjs, {Dayjs} from "dayjs";
|
||||
import LoggedError from "./Utils/LoggedError";
|
||||
import {ProxiedSnoowrap, RequestTrackingSnoowrap} from "./Utils/SnoowrapClients";
|
||||
import {ModQueueStream, UnmoderatedStream} from "./Subreddit/Streams";
|
||||
import {getLogger} from "./Utils/loggerFactory";
|
||||
import {DurationString, OperatorConfig, PAUSED, RUNNING, STOPPED, SYSTEM, USER} from "./Common/interfaces";
|
||||
import { Duration } from "dayjs/plugin/duration";
|
||||
|
||||
const {transports} = winston;
|
||||
|
||||
const snooLogWrapper = (logger: Logger) => {
|
||||
return {
|
||||
warn: (...args: any[]) => logger.warn(args.slice(0, 2).join(' '), [args.slice(2)]),
|
||||
debug: (...args: any[]) => logger.debug(args.slice(0, 2).join(' '), [args.slice(2)]),
|
||||
info: (...args: any[]) => logger.info(args.slice(0, 2).join(' '), [args.slice(2)]),
|
||||
trace: (...args: any[]) => logger.debug(args.slice(0, 2).join(' '), [args.slice(2)]),
|
||||
}
|
||||
}
|
||||
import {Invokee, OperatorConfig} from "./Common/interfaces";
|
||||
import Bot from "./Bot";
|
||||
import LoggedError from "./Utils/LoggedError";
|
||||
|
||||
export class App {
|
||||
|
||||
client: Snoowrap;
|
||||
subreddits: string[];
|
||||
subManagers: Manager[] = [];
|
||||
bots: Bot[]
|
||||
logger: Logger;
|
||||
wikiLocation: string;
|
||||
dryRun?: true | undefined;
|
||||
heartbeatInterval: number;
|
||||
nextHeartbeat?: Dayjs;
|
||||
heartBeating: boolean = false;
|
||||
//apiLimitWarning: number;
|
||||
softLimit: number | string = 250;
|
||||
hardLimit: number | string = 50;
|
||||
nannyMode?: 'soft' | 'hard';
|
||||
nextExpiration!: Dayjs;
|
||||
botName?: string;
|
||||
startedAt: Dayjs = dayjs();
|
||||
sharedModqueue: boolean = false;
|
||||
|
||||
apiSample: number[] = [];
|
||||
interval: any;
|
||||
apiRollingAvg: number = 0;
|
||||
apiEstDepletion?: Duration;
|
||||
depletedInSecs: number = 0;
|
||||
error: any;
|
||||
|
||||
constructor(config: OperatorConfig) {
|
||||
const {
|
||||
subreddits: {
|
||||
names = [],
|
||||
wikiConfig,
|
||||
dryRun,
|
||||
heartbeatInterval,
|
||||
operator: {
|
||||
name,
|
||||
},
|
||||
credentials: {
|
||||
clientId,
|
||||
clientSecret,
|
||||
refreshToken,
|
||||
accessToken,
|
||||
},
|
||||
snoowrap: {
|
||||
proxy,
|
||||
debug,
|
||||
},
|
||||
polling: {
|
||||
sharedMod,
|
||||
},
|
||||
caching: {
|
||||
authorTTL,
|
||||
provider: {
|
||||
store
|
||||
}
|
||||
},
|
||||
api: {
|
||||
softLimit,
|
||||
hardLimit,
|
||||
}
|
||||
notifications,
|
||||
bots = [],
|
||||
} = config;
|
||||
|
||||
CacheManager.setDefaultsFromConfig(config);
|
||||
|
||||
this.dryRun = parseBool(dryRun) === true ? true : undefined;
|
||||
this.heartbeatInterval = heartbeatInterval;
|
||||
//this.apiLimitWarning = argParseInt(apiLimitWarning);
|
||||
this.softLimit = softLimit;
|
||||
this.hardLimit = hardLimit;
|
||||
this.wikiLocation = wikiConfig;
|
||||
this.sharedModqueue = sharedMod;
|
||||
|
||||
this.logger = getLogger(config.logging);
|
||||
|
||||
if (this.dryRun) {
|
||||
this.logger.info('Running in DRYRUN mode');
|
||||
}
|
||||
this.logger.info(`Operators: ${name.length === 0 ? 'None Specified' : name.join(', ')}`)
|
||||
|
||||
this.subreddits = names.map(parseSubredditName);
|
||||
this.bots = bots.map(x => new Bot(x, this.logger));
|
||||
|
||||
const creds = {
|
||||
userAgent: `web:contextBot:dev`,
|
||||
clientId,
|
||||
clientSecret,
|
||||
refreshToken,
|
||||
accessToken,
|
||||
};
|
||||
|
||||
const missingCreds = [];
|
||||
for(const [k,v] of Object.entries(creds)) {
|
||||
if(v === undefined || v === '' || v === null) {
|
||||
missingCreds.push(k);
|
||||
}
|
||||
}
|
||||
if(missingCreds.length > 0) {
|
||||
this.logger.error('There are credentials missing that would prevent initializing the Reddit API Client and subsequently the rest of the application');
|
||||
this.logger.error(`Missing credentials: ${missingCreds.join(', ')}`)
|
||||
this.logger.info(`If this is a first-time setup use the 'web' command for a web-based guide to configuring your application`);
|
||||
this.logger.info(`Or check the USAGE section of the readme for the correct naming of these arguments/environment variables`);
|
||||
throw new LoggedError(`Missing credentials: ${missingCreds.join(', ')}`);
|
||||
}
|
||||
|
||||
this.client = proxy === undefined ? new Snoowrap(creds) : new ProxiedSnoowrap({...creds, proxy});
|
||||
this.client.config({
|
||||
warnings: true,
|
||||
maxRetryAttempts: 5,
|
||||
debug,
|
||||
logger: snooLogWrapper(this.logger.child({labels: ['Snoowrap']})),
|
||||
continueAfterRatelimitError: true,
|
||||
process.on('uncaughtException', (e) => {
|
||||
this.error = e;
|
||||
});
|
||||
|
||||
const retryHandler = createRetryHandler({maxRequestRetry: 8, maxOtherRetry: 1}, this.logger);
|
||||
|
||||
const modStreamErrorListener = (name: string) => async (err: any) => {
|
||||
this.logger.error('Polling error occurred', err);
|
||||
const shouldRetry = await retryHandler(err);
|
||||
if(shouldRetry) {
|
||||
defaultUnmoderatedStream.startInterval();
|
||||
} else {
|
||||
for(const m of this.subManagers) {
|
||||
if(m.modStreamCallbacks.size > 0) {
|
||||
m.notificationManager.handle('runStateChanged', `${name.toUpperCase()} Polling Stopped`, 'Encountered too many errors from Reddit while polling. Will try to restart on next heartbeat.');
|
||||
}
|
||||
process.on('unhandledRejection', (e) => {
|
||||
this.error = e;
|
||||
});
|
||||
process.on('exit', async (code) => {
|
||||
if(code === 0) {
|
||||
await this.onTerminate();
|
||||
} else if(this.error !== undefined) {
|
||||
let errMsg;
|
||||
if(typeof this.error === 'object' && this.error.message !== undefined) {
|
||||
errMsg = this.error.message;
|
||||
} else if(typeof this.error === 'string') {
|
||||
errMsg = this.error;
|
||||
}
|
||||
this.logger.error(`Mod stream ${name.toUpperCase()} encountered too many errors while polling. Will try to restart on next heartbeat.`);
|
||||
await this.onTerminate(`Application exited due to an unexpected error${errMsg !== undefined ? `: ${errMsg}` : ''}`);
|
||||
} else {
|
||||
await this.onTerminate(`Application exited with unclean exit signal (${code})`);
|
||||
}
|
||||
}
|
||||
|
||||
const defaultUnmoderatedStream = new UnmoderatedStream(this.client, {subreddit: 'mod'});
|
||||
// @ts-ignore
|
||||
defaultUnmoderatedStream.on('error', modStreamErrorListener('unmoderated'));
|
||||
const defaultModqueueStream = new ModQueueStream(this.client, {subreddit: 'mod'});
|
||||
// @ts-ignore
|
||||
defaultModqueueStream.on('error', modStreamErrorListener('modqueue'));
|
||||
CacheManager.modStreams.set('unmoderated', defaultUnmoderatedStream);
|
||||
CacheManager.modStreams.set('modqueue', defaultModqueueStream);
|
||||
|
||||
const onTerm = () => {
|
||||
for(const m of this.subManagers) {
|
||||
m.notificationManager.handle('runStateChanged', 'Application Shutdown', 'The application was shutdown');
|
||||
}
|
||||
}
|
||||
|
||||
process.on('SIGTERM', () => {
|
||||
onTerm();
|
||||
});
|
||||
}
|
||||
|
||||
async testClient() {
|
||||
try {
|
||||
// @ts-ignore
|
||||
await this.client.getMe();
|
||||
this.logger.info('Test API call successful');
|
||||
} catch (err) {
|
||||
this.logger.error('An error occurred while trying to initialize the Reddit API Client which would prevent the entire application from running.');
|
||||
if(err.name === 'StatusCodeError') {
|
||||
const authHeader = err.response.headers['www-authenticate'];
|
||||
if (authHeader !== undefined && authHeader.includes('insufficient_scope')) {
|
||||
this.logger.error('Reddit responded with a 403 insufficient_scope. Please ensure you have chosen the correct scopes when authorizing your account.');
|
||||
} else if(err.statusCode === 401) {
|
||||
this.logger.error('It is likely a credential is missing or incorrect. Check clientId, clientSecret, refreshToken, and accessToken');
|
||||
}
|
||||
this.logger.error(`Error Message: ${err.message}`);
|
||||
} else {
|
||||
this.logger.error(err);
|
||||
async onTerminate(reason = 'The application was shutdown') {
|
||||
for(const b of this.bots) {
|
||||
for(const m of b.subManagers) {
|
||||
await m.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
|
||||
}
|
||||
err.logged = true;
|
||||
throw err;
|
||||
//await b.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
|
||||
}
|
||||
}
|
||||
|
||||
async buildManagers(subreddits: string[] = []) {
|
||||
let availSubs = [];
|
||||
const name = await this.client.getMe().name;
|
||||
this.logger.info(`Reddit API Limit Remaining: ${this.client.ratelimitRemaining}`);
|
||||
this.logger.info(`Authenticated Account: /u/${name}`);
|
||||
this.botName = name;
|
||||
for (const sub of await this.client.getModeratedSubreddits()) {
|
||||
// TODO don't know a way to check permissions yet
|
||||
availSubs.push(sub);
|
||||
}
|
||||
this.logger.info(`/u/${name} is a moderator of these subreddits: ${availSubs.map(x => x.display_name_prefixed).join(', ')}`);
|
||||
|
||||
let subsToRun: Subreddit[] = [];
|
||||
const subsToUse = subreddits.length > 0 ? subreddits.map(parseSubredditName) : this.subreddits;
|
||||
if (subsToUse.length > 0) {
|
||||
this.logger.info(`Operator-defined subreddit constraints detected (CLI argument or environmental variable), will try to run on: ${subsToUse.join(', ')}`);
|
||||
for (const sub of subsToUse) {
|
||||
const asub = availSubs.find(x => x.display_name.toLowerCase() === sub.toLowerCase())
|
||||
if (asub === undefined) {
|
||||
this.logger.warn(`Will not run on ${sub} because is not modded by, or does not have appropriate permissions to mod with, for this client.`);
|
||||
} else {
|
||||
// @ts-ignore
|
||||
const fetchedSub = await asub.fetch();
|
||||
subsToRun.push(fetchedSub);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// otherwise assume all moddable subs from client should be run on
|
||||
this.logger.info('No user-defined subreddit constraints detected, will try to run on all');
|
||||
subsToRun = availSubs;
|
||||
}
|
||||
|
||||
let subSchedule: Manager[] = [];
|
||||
// get configs for subs we want to run on and build/validate them
|
||||
for (const sub of subsToRun) {
|
||||
const manager = new Manager(sub, this.client, this.logger, {dryRun: this.dryRun, sharedModqueue: this.sharedModqueue});
|
||||
try {
|
||||
await manager.parseConfiguration('system', true, {suppressNotification: true});
|
||||
} catch (err) {
|
||||
if (!(err instanceof LoggedError)) {
|
||||
this.logger.error(`Config was not valid:`, {subreddit: sub.display_name_prefixed});
|
||||
this.logger.error(err, {subreddit: sub.display_name_prefixed});
|
||||
}
|
||||
}
|
||||
subSchedule.push(manager);
|
||||
}
|
||||
this.subManagers = subSchedule;
|
||||
}
|
||||
|
||||
async heartbeat() {
|
||||
try {
|
||||
this.heartBeating = true;
|
||||
while (true) {
|
||||
this.nextHeartbeat = dayjs().add(this.heartbeatInterval, 'second');
|
||||
await sleep(this.heartbeatInterval * 1000);
|
||||
const heartbeat = `HEARTBEAT -- API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ~${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion === undefined ? 'N/A' : this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`
|
||||
this.logger.info(heartbeat);
|
||||
for (const s of this.subManagers) {
|
||||
if(s.botState.state === STOPPED && s.botState.causedBy === USER) {
|
||||
this.logger.debug('Skipping config check/restart on heartbeat due to previously being stopped by user', {subreddit: s.displayLabel});
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const newConfig = await s.parseConfiguration();
|
||||
if(newConfig || (s.queueState.state !== RUNNING && s.queueState.causedBy === SYSTEM))
|
||||
{
|
||||
await s.startQueue('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running queue'});
|
||||
async initBots(causedBy: Invokee = 'system') {
|
||||
for (const b of this.bots) {
|
||||
if (b.error === undefined) {
|
||||
try {
|
||||
await b.testClient();
|
||||
await b.buildManagers();
|
||||
b.runManagers(causedBy).catch((err) => {
|
||||
this.logger.error(`Unexpected error occurred while running Bot ${b.botName}. Bot must be re-built to restart`);
|
||||
if (!err.logged || !(err instanceof LoggedError)) {
|
||||
this.logger.error(err);
|
||||
}
|
||||
if(newConfig || (s.eventsState.state !== RUNNING && s.eventsState.causedBy === SYSTEM))
|
||||
{
|
||||
await s.startEvents('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running events'});
|
||||
}
|
||||
if(s.botState.state !== RUNNING && s.eventsState.state === RUNNING && s.queueState.state === RUNNING) {
|
||||
s.botState = {
|
||||
state: RUNNING,
|
||||
causedBy: 'system',
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.info('Stopping event polling to prevent activity processing queue from backing up. Will be restarted when config update succeeds.')
|
||||
await s.stopEvents('system', {reason: 'Invalid config will cause events to pile up in queue. Will be restarted when config update succeeds (next heartbeat).'});
|
||||
if(!(err instanceof LoggedError)) {
|
||||
this.logger.error(err, {subreddit: s.displayLabel});
|
||||
}
|
||||
if(this.nextHeartbeat !== undefined) {
|
||||
this.logger.info(`Will retry parsing config on next heartbeat (in ${dayjs.duration(this.nextHeartbeat.diff(dayjs())).humanize()})`, {subreddit: s.displayLabel});
|
||||
}
|
||||
}
|
||||
}
|
||||
await this.runModStreams(true);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error('Error occurred during heartbeat', err);
|
||||
throw err;
|
||||
} finally {
|
||||
this.nextHeartbeat = undefined;
|
||||
this.heartBeating = false;
|
||||
}
|
||||
}
|
||||
|
||||
async runModStreams(notify = false) {
|
||||
for(const [k,v] of CacheManager.modStreams) {
|
||||
if(!v.running && v.listeners('item').length > 0) {
|
||||
v.startInterval();
|
||||
this.logger.info(`Starting default ${k.toUpperCase()} mod stream`);
|
||||
if(notify) {
|
||||
for(const m of this.subManagers) {
|
||||
if(m.modStreamCallbacks.size > 0) {
|
||||
m.notificationManager.handle('runStateChanged', `${k.toUpperCase()} Polling Started`, 'Polling was successfully restarted on heartbeat.');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async runManagers() {
|
||||
if(this.subManagers.every(x => !x.validConfigLoaded)) {
|
||||
this.logger.warn('All managers have invalid configs!');
|
||||
}
|
||||
for (const manager of this.subManagers) {
|
||||
if (manager.validConfigLoaded && manager.botState.state !== RUNNING) {
|
||||
await manager.start('system', {reason: 'Caused by application startup'});
|
||||
}
|
||||
}
|
||||
|
||||
await this.runModStreams();
|
||||
|
||||
if (this.heartbeatInterval !== 0 && !this.heartBeating) {
|
||||
this.heartbeat();
|
||||
}
|
||||
this.runApiNanny();
|
||||
|
||||
const emitter = new EventEmitter();
|
||||
await pEvent(emitter, 'end');
|
||||
}
|
||||
|
||||
async runApiNanny() {
|
||||
while(true) {
|
||||
await sleep(10000);
|
||||
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
|
||||
const nowish = dayjs().add(10, 'second');
|
||||
if(nowish.isAfter(this.nextExpiration)) {
|
||||
// it's possible no api calls are being made because of a hard limit
|
||||
// need to make an api call to update this
|
||||
// @ts-ignore
|
||||
await this.client.getMe();
|
||||
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
|
||||
}
|
||||
const rollingSample = this.apiSample.slice(0, 7)
|
||||
rollingSample.unshift(this.client.ratelimitRemaining);
|
||||
this.apiSample = rollingSample;
|
||||
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
|
||||
if(this.apiSample[index + 1] !== undefined) {
|
||||
const d = Math.abs(curr - this.apiSample[index + 1]);
|
||||
if(d === 0) {
|
||||
return [...acc, 0];
|
||||
}
|
||||
return [...acc, d/10];
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
this.apiRollingAvg = diff.reduce((acc, curr) => acc + curr,0) / diff.length; // api requests per second
|
||||
this.depletedInSecs = this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
|
||||
this.apiEstDepletion = dayjs.duration({seconds: this.depletedInSecs});
|
||||
this.logger.debug(`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`);
|
||||
|
||||
|
||||
let hardLimitHit = false;
|
||||
if(typeof this.hardLimit === 'string') {
|
||||
const hardDur = parseDuration(this.hardLimit);
|
||||
hardLimitHit = hardDur.asSeconds() > this.apiEstDepletion.asSeconds();
|
||||
} else {
|
||||
hardLimitHit = this.hardLimit > this.client.ratelimitRemaining;
|
||||
}
|
||||
|
||||
if(hardLimitHit) {
|
||||
if(this.nannyMode === 'hard') {
|
||||
continue;
|
||||
}
|
||||
this.logger.info(`Detected HARD LIMIT of ${this.hardLimit} remaining`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${this.apiRollingAvg}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`All subreddit event polling has been paused`, {leaf: 'Api Nanny'});
|
||||
|
||||
for(const m of this.subManagers) {
|
||||
m.pauseEvents('system');
|
||||
m.notificationManager.handle('runStateChanged', 'Hard Limit Triggered', `Hard Limit of ${this.hardLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit event polling has been paused.`, 'system', 'warn');
|
||||
}
|
||||
|
||||
this.nannyMode = 'hard';
|
||||
continue;
|
||||
}
|
||||
|
||||
let softLimitHit = false;
|
||||
if(typeof this.softLimit === 'string') {
|
||||
const softDur = parseDuration(this.softLimit);
|
||||
softLimitHit = softDur.asSeconds() > this.apiEstDepletion.asSeconds();
|
||||
} else {
|
||||
softLimitHit = this.softLimit > this.client.ratelimitRemaining;
|
||||
}
|
||||
|
||||
if(softLimitHit) {
|
||||
if(this.nannyMode === 'soft') {
|
||||
continue;
|
||||
}
|
||||
this.logger.info(`Detected SOFT LIMIT of ${this.softLimit} remaining`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
|
||||
this.logger.info('Trying to detect heavy usage subreddits...', {leaf: 'Api Nanny'});
|
||||
let threshold = 0.5;
|
||||
let offenders = this.subManagers.filter(x => {
|
||||
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
|
||||
return combinedPerSec > threshold;
|
||||
});
|
||||
if(offenders.length === 0) {
|
||||
threshold = 0.25;
|
||||
// reduce threshold
|
||||
offenders = this.subManagers.filter(x => {
|
||||
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
|
||||
return combinedPerSec > threshold;
|
||||
});
|
||||
}
|
||||
|
||||
if(offenders.length > 0) {
|
||||
this.logger.info(`Slowing subreddits using >- ${threshold}req/s:`, {leaf: 'Api Nanny'});
|
||||
for(const m of offenders) {
|
||||
m.delayBy = 1.5;
|
||||
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
|
||||
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
|
||||
} catch (err) {
|
||||
if (b.error === undefined) {
|
||||
b.error = err.message;
|
||||
}
|
||||
} else {
|
||||
this.logger.info(`Couldn't detect specific offenders, slowing all...`, {leaf: 'Api Nanny'});
|
||||
for(const m of this.subManagers) {
|
||||
m.delayBy = 1.5;
|
||||
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
|
||||
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
|
||||
this.logger.error(`Bot ${b.botName} cannot recover from this error and must be re-built`);
|
||||
if (!err.logged || !(err instanceof LoggedError)) {
|
||||
this.logger.error(err);
|
||||
}
|
||||
}
|
||||
this.nannyMode = 'soft';
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(this.nannyMode !== undefined) {
|
||||
this.logger.info('Turning off due to better conditions...', {leaf: 'Api Nanny'});
|
||||
for(const m of this.subManagers) {
|
||||
if(m.delayBy !== undefined) {
|
||||
m.delayBy = undefined;
|
||||
m.notificationManager.handle('runStateChanged', 'Normal Processing Resumed', 'Slow Mode has been turned off due to better API conditions', 'system');
|
||||
}
|
||||
if(m.queueState.state === PAUSED && m.queueState.causedBy === SYSTEM) {
|
||||
m.startQueue('system', {reason: 'API Nanny has been turned off due to better API conditions'});
|
||||
}
|
||||
if(m.eventsState.state === PAUSED && m.eventsState.causedBy === SYSTEM) {
|
||||
await m.startEvents('system', {reason: 'API Nanny has been turned off due to better API conditions'});
|
||||
}
|
||||
}
|
||||
this.nannyMode = undefined;
|
||||
}
|
||||
async destroy(causedBy: Invokee) {
|
||||
this.logger.info('Stopping all bots...');
|
||||
for(const b of this.bots) {
|
||||
await b.destroy(causedBy);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,7 +65,7 @@ export interface AuthorCriteria {
|
||||
*
|
||||
* [See] https://regexr.com/609n8 for example
|
||||
*
|
||||
* @pattern ^\s*(?<opStr>>|>=|<|<=)\s*(?<time>\d+)\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\s*$
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\s*$
|
||||
* */
|
||||
age?: DurationComparor
|
||||
|
||||
|
||||
576
src/Bot/index.ts
Normal file
576
src/Bot/index.ts
Normal file
@@ -0,0 +1,576 @@
|
||||
import Snoowrap, {Subreddit} from "snoowrap";
|
||||
import {Logger} from "winston";
|
||||
import dayjs, {Dayjs} from "dayjs";
|
||||
import {Duration} from "dayjs/plugin/duration";
|
||||
import EventEmitter from "events";
|
||||
import {BotInstanceConfig, Invokee, PAUSED, RUNNING, STOPPED, SYSTEM, USER} from "../Common/interfaces";
|
||||
import {
|
||||
createRetryHandler,
|
||||
formatNumber,
|
||||
mergeArr,
|
||||
parseBool,
|
||||
parseDuration,
|
||||
parseSubredditName,
|
||||
sleep,
|
||||
snooLogWrapper
|
||||
} from "../util";
|
||||
import {Manager} from "../Subreddit/Manager";
|
||||
import {ProxiedSnoowrap} from "../Utils/SnoowrapClients";
|
||||
import {ModQueueStream, UnmoderatedStream} from "../Subreddit/Streams";
|
||||
import {BotResourcesManager} from "../Subreddit/SubredditResources";
|
||||
import LoggedError from "../Utils/LoggedError";
|
||||
import pEvent from "p-event";
|
||||
|
||||
|
||||
class Bot {
|
||||
|
||||
client!: Snoowrap;
|
||||
logger!: Logger;
|
||||
wikiLocation: string;
|
||||
dryRun?: true | undefined;
|
||||
running: boolean = false;
|
||||
subreddits: string[];
|
||||
excludeSubreddits: string[];
|
||||
subManagers: Manager[] = [];
|
||||
heartbeatInterval: number;
|
||||
nextHeartbeat: Dayjs = dayjs();
|
||||
heartBeating: boolean = false;
|
||||
|
||||
softLimit: number | string = 250;
|
||||
hardLimit: number | string = 50;
|
||||
nannyMode?: 'soft' | 'hard';
|
||||
nannyRunning: boolean = false;
|
||||
nextNannyCheck: Dayjs = dayjs().add(10, 'second');
|
||||
nannyRetryHandler: Function;
|
||||
nextExpiration: Dayjs = dayjs();
|
||||
botName?: string;
|
||||
botLink?: string;
|
||||
botAccount?: string;
|
||||
maxWorkers: number;
|
||||
startedAt: Dayjs = dayjs();
|
||||
sharedModqueue: boolean = false;
|
||||
|
||||
apiSample: number[] = [];
|
||||
apiRollingAvg: number = 0;
|
||||
apiEstDepletion?: Duration;
|
||||
depletedInSecs: number = 0;
|
||||
|
||||
error: any;
|
||||
emitter: EventEmitter = new EventEmitter();
|
||||
|
||||
cacheManager: BotResourcesManager;
|
||||
|
||||
getBotName = () => {
|
||||
return this.botName;
|
||||
}
|
||||
|
||||
getUserAgent = () => {
|
||||
return `web:contextMod:${this.botName}`
|
||||
}
|
||||
|
||||
constructor(config: BotInstanceConfig, logger: Logger) {
|
||||
const {
|
||||
notifications,
|
||||
name,
|
||||
subreddits: {
|
||||
names = [],
|
||||
exclude = [],
|
||||
wikiConfig,
|
||||
dryRun,
|
||||
heartbeatInterval,
|
||||
},
|
||||
credentials: {
|
||||
clientId,
|
||||
clientSecret,
|
||||
refreshToken,
|
||||
accessToken,
|
||||
},
|
||||
snoowrap: {
|
||||
proxy,
|
||||
debug,
|
||||
},
|
||||
polling: {
|
||||
sharedMod,
|
||||
},
|
||||
queue: {
|
||||
maxWorkers,
|
||||
},
|
||||
caching: {
|
||||
authorTTL,
|
||||
provider: {
|
||||
store
|
||||
}
|
||||
},
|
||||
nanny: {
|
||||
softLimit,
|
||||
hardLimit,
|
||||
}
|
||||
} = config;
|
||||
|
||||
this.cacheManager = new BotResourcesManager(config);
|
||||
|
||||
this.dryRun = parseBool(dryRun) === true ? true : undefined;
|
||||
this.softLimit = softLimit;
|
||||
this.hardLimit = hardLimit;
|
||||
this.wikiLocation = wikiConfig;
|
||||
this.heartbeatInterval = heartbeatInterval;
|
||||
this.sharedModqueue = sharedMod;
|
||||
if(name !== undefined) {
|
||||
this.botName = name;
|
||||
}
|
||||
|
||||
const getBotName = this.getBotName;
|
||||
const getUserName = this.getUserAgent;
|
||||
|
||||
this.logger = logger.child({
|
||||
get bot() {
|
||||
return getBotName();
|
||||
}
|
||||
}, mergeArr);
|
||||
|
||||
let mw = maxWorkers;
|
||||
if(maxWorkers < 1) {
|
||||
this.logger.warn(`Max queue workers must be greater than or equal to 1 (Specified: ${maxWorkers})`);
|
||||
mw = 1;
|
||||
}
|
||||
this.maxWorkers = mw;
|
||||
|
||||
if (this.dryRun) {
|
||||
this.logger.info('Running in DRYRUN mode');
|
||||
}
|
||||
|
||||
this.subreddits = names.map(parseSubredditName);
|
||||
this.excludeSubreddits = exclude.map(parseSubredditName);
|
||||
|
||||
let creds: any = {
|
||||
get userAgent() { return getUserName() },
|
||||
clientId,
|
||||
clientSecret,
|
||||
refreshToken,
|
||||
accessToken,
|
||||
};
|
||||
|
||||
const missingCreds = [];
|
||||
for(const [k,v] of Object.entries(creds)) {
|
||||
if(v === undefined || v === '' || v === null) {
|
||||
missingCreds.push(k);
|
||||
}
|
||||
}
|
||||
if(missingCreds.length > 0) {
|
||||
this.logger.error('There are credentials missing that would prevent initializing the Reddit API Client and subsequently the rest of the application');
|
||||
this.logger.error(`Missing credentials: ${missingCreds.join(', ')}`)
|
||||
this.logger.info(`If this is a first-time setup use the 'web' command for a web-based guide to configuring your application`);
|
||||
this.logger.info(`Or check the USAGE section of the readme for the correct naming of these arguments/environment variables`);
|
||||
this.error = `Missing credentials: ${missingCreds.join(', ')}`;
|
||||
//throw new LoggedError(`Missing credentials: ${missingCreds.join(', ')}`);
|
||||
}
|
||||
|
||||
try {
|
||||
this.client = proxy === undefined ? new Snoowrap(creds) : new ProxiedSnoowrap({...creds, proxy});
|
||||
this.client.config({
|
||||
warnings: true,
|
||||
maxRetryAttempts: 5,
|
||||
debug,
|
||||
logger: snooLogWrapper(this.logger.child({labels: ['Snoowrap']}, mergeArr)),
|
||||
continueAfterRatelimitError: true,
|
||||
});
|
||||
} catch (err) {
|
||||
if(this.error === undefined) {
|
||||
this.error = err.message;
|
||||
this.logger.error(err);
|
||||
}
|
||||
}
|
||||
|
||||
const retryHandler = createRetryHandler({maxRequestRetry: 8, maxOtherRetry: 1}, this.logger);
|
||||
this.nannyRetryHandler = createRetryHandler({maxRequestRetry: 5, maxOtherRetry: 1}, this.logger);
|
||||
|
||||
const modStreamErrorListener = (name: string) => async (err: any) => {
|
||||
this.logger.error('Polling error occurred', err);
|
||||
const shouldRetry = await retryHandler(err);
|
||||
if(shouldRetry) {
|
||||
defaultUnmoderatedStream.startInterval();
|
||||
} else {
|
||||
for(const m of this.subManagers) {
|
||||
if(m.modStreamCallbacks.size > 0) {
|
||||
m.notificationManager.handle('runStateChanged', `${name.toUpperCase()} Polling Stopped`, 'Encountered too many errors from Reddit while polling. Will try to restart on next heartbeat.');
|
||||
}
|
||||
}
|
||||
this.logger.error(`Mod stream ${name.toUpperCase()} encountered too many errors while polling. Will try to restart on next heartbeat.`);
|
||||
}
|
||||
}
|
||||
|
||||
const defaultUnmoderatedStream = new UnmoderatedStream(this.client, {subreddit: 'mod'});
|
||||
// @ts-ignore
|
||||
defaultUnmoderatedStream.on('error', modStreamErrorListener('unmoderated'));
|
||||
const defaultModqueueStream = new ModQueueStream(this.client, {subreddit: 'mod'});
|
||||
// @ts-ignore
|
||||
defaultModqueueStream.on('error', modStreamErrorListener('modqueue'));
|
||||
this.cacheManager.modStreams.set('unmoderated', defaultUnmoderatedStream);
|
||||
this.cacheManager.modStreams.set('modqueue', defaultModqueueStream);
|
||||
|
||||
process.on('uncaughtException', (e) => {
|
||||
this.error = e;
|
||||
});
|
||||
process.on('unhandledRejection', (e) => {
|
||||
this.error = e;
|
||||
});
|
||||
process.on('exit', async (code) => {
|
||||
if(code === 0) {
|
||||
await this.onTerminate();
|
||||
} else if(this.error !== undefined) {
|
||||
let errMsg;
|
||||
if(typeof this.error === 'object' && this.error.message !== undefined) {
|
||||
errMsg = this.error.message;
|
||||
} else if(typeof this.error === 'string') {
|
||||
errMsg = this.error;
|
||||
}
|
||||
await this.onTerminate(`Application exited due to an unexpected error${errMsg !== undefined ? `: ${errMsg}` : ''}`);
|
||||
} else {
|
||||
await this.onTerminate(`Application exited with unclean exit signal (${code})`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async onTerminate(reason = 'The application was shutdown') {
|
||||
for(const m of this.subManagers) {
|
||||
await m.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
|
||||
}
|
||||
}
|
||||
|
||||
async testClient() {
|
||||
try {
|
||||
// @ts-ignore
|
||||
await this.client.getMe();
|
||||
this.logger.info('Test API call successful');
|
||||
} catch (err) {
|
||||
this.logger.error('An error occurred while trying to initialize the Reddit API Client which would prevent the entire application from running.');
|
||||
if(err.name === 'StatusCodeError') {
|
||||
const authHeader = err.response.headers['www-authenticate'];
|
||||
if (authHeader !== undefined && authHeader.includes('insufficient_scope')) {
|
||||
this.logger.error('Reddit responded with a 403 insufficient_scope. Please ensure you have chosen the correct scopes when authorizing your account.');
|
||||
} else if(err.statusCode === 401) {
|
||||
this.logger.error('It is likely a credential is missing or incorrect. Check clientId, clientSecret, refreshToken, and accessToken');
|
||||
}
|
||||
this.logger.error(`Error Message: ${err.message}`);
|
||||
} else {
|
||||
this.logger.error(err);
|
||||
}
|
||||
this.error = `Error occurred while testing Reddit API client: ${err.message}`;
|
||||
err.logged = true;
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async buildManagers(subreddits: string[] = []) {
|
||||
let availSubs = [];
|
||||
// @ts-ignore
|
||||
const user = await this.client.getMe().fetch();
|
||||
this.botLink = `https://reddit.com/user/${user.name}`;
|
||||
this.botAccount = `u/${user.name}`;
|
||||
this.logger.info(`Reddit API Limit Remaining: ${this.client.ratelimitRemaining}`);
|
||||
this.logger.info(`Authenticated Account: u/${user.name}`);
|
||||
|
||||
const botNameFromConfig = this.botName !== undefined;
|
||||
if(this.botName === undefined) {
|
||||
this.botName = `u/${user.name}`;
|
||||
}
|
||||
this.logger.info(`Bot Name${botNameFromConfig ? ' (from config)' : ''}: ${this.botName}`);
|
||||
|
||||
for (const sub of await this.client.getModeratedSubreddits()) {
|
||||
// TODO don't know a way to check permissions yet
|
||||
availSubs.push(sub);
|
||||
}
|
||||
this.logger.info(`u/${user.name} is a moderator of these subreddits: ${availSubs.map(x => x.display_name_prefixed).join(', ')}`);
|
||||
|
||||
let subsToRun: Subreddit[] = [];
|
||||
const subsToUse = subreddits.length > 0 ? subreddits.map(parseSubredditName) : this.subreddits;
|
||||
if (subsToUse.length > 0) {
|
||||
this.logger.info(`Operator-defined subreddit constraints detected (CLI argument or environmental variable), will try to run on: ${subsToUse.join(', ')}`);
|
||||
for (const sub of subsToUse) {
|
||||
const asub = availSubs.find(x => x.display_name.toLowerCase() === sub.toLowerCase())
|
||||
if (asub === undefined) {
|
||||
this.logger.warn(`Will not run on ${sub} because is not modded by, or does not have appropriate permissions to mod with, for this client.`);
|
||||
} else {
|
||||
// @ts-ignore
|
||||
const fetchedSub = await asub.fetch();
|
||||
subsToRun.push(fetchedSub);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if(this.excludeSubreddits.length > 0) {
|
||||
this.logger.info(`Will run on all moderated subreddits but user-defined excluded: ${this.excludeSubreddits.join(', ')}`);
|
||||
const normalExcludes = this.excludeSubreddits.map(x => x.toLowerCase());
|
||||
subsToRun = availSubs.filter(x => !normalExcludes.includes(x.display_name.toLowerCase()));
|
||||
} else {
|
||||
this.logger.info('No user-defined subreddit constraints detected, will run on all moderated subreddits');
|
||||
subsToRun = availSubs;
|
||||
}
|
||||
}
|
||||
|
||||
let subSchedule: Manager[] = [];
|
||||
// get configs for subs we want to run on and build/validate them
|
||||
for (const sub of subsToRun) {
|
||||
const manager = new Manager(sub, this.client, this.logger, this.cacheManager, {dryRun: this.dryRun, sharedModqueue: this.sharedModqueue, wikiLocation: this.wikiLocation, botName: this.botName, maxWorkers: this.maxWorkers});
|
||||
try {
|
||||
await manager.parseConfiguration('system', true, {suppressNotification: true});
|
||||
} catch (err) {
|
||||
if (!(err instanceof LoggedError)) {
|
||||
this.logger.error(`Config was not valid:`, {subreddit: sub.display_name_prefixed});
|
||||
this.logger.error(err, {subreddit: sub.display_name_prefixed});
|
||||
}
|
||||
}
|
||||
subSchedule.push(manager);
|
||||
}
|
||||
this.subManagers = subSchedule;
|
||||
}
|
||||
|
||||
async destroy(causedBy: Invokee) {
|
||||
this.logger.info('Stopping heartbeat and nanny processes, may take up to 5 seconds...');
|
||||
const processWait = pEvent(this.emitter, 'healthStopped');
|
||||
this.running = false;
|
||||
await processWait;
|
||||
for (const manager of this.subManagers) {
|
||||
await manager.stop(causedBy, {reason: 'App rebuild'});
|
||||
}
|
||||
this.logger.info('Bot is stopped.');
|
||||
}
|
||||
|
||||
async runModStreams(notify = false) {
|
||||
for(const [k,v] of this.cacheManager.modStreams) {
|
||||
if(!v.running && v.listeners('item').length > 0) {
|
||||
v.startInterval();
|
||||
this.logger.info(`Starting default ${k.toUpperCase()} mod stream`);
|
||||
if(notify) {
|
||||
for(const m of this.subManagers) {
|
||||
if(m.modStreamCallbacks.size > 0) {
|
||||
await m.notificationManager.handle('runStateChanged', `${k.toUpperCase()} Polling Started`, 'Polling was successfully restarted on heartbeat.');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async runManagers(causedBy: Invokee = 'system') {
|
||||
if(this.subManagers.every(x => !x.validConfigLoaded)) {
|
||||
this.logger.warn('All managers have invalid configs!');
|
||||
this.error = 'All managers have invalid configs';
|
||||
}
|
||||
for (const manager of this.subManagers) {
|
||||
if (manager.validConfigLoaded && manager.botState.state !== RUNNING) {
|
||||
await manager.start(causedBy, {reason: 'Caused by application startup'});
|
||||
}
|
||||
}
|
||||
|
||||
await this.runModStreams();
|
||||
|
||||
this.running = true;
|
||||
this.nextNannyCheck = dayjs().add(10, 'second');
|
||||
this.nextHeartbeat = dayjs().add(this.heartbeatInterval, 'second');
|
||||
await this.healthLoop();
|
||||
}
|
||||
|
||||
async healthLoop() {
|
||||
while (this.running) {
|
||||
await sleep(5000);
|
||||
if (!this.running) {
|
||||
break;
|
||||
}
|
||||
if (dayjs().isSameOrAfter(this.nextNannyCheck)) {
|
||||
try {
|
||||
await this.runApiNanny();
|
||||
this.nextNannyCheck = dayjs().add(10, 'second');
|
||||
} catch (err) {
|
||||
this.logger.info('Delaying next nanny check for 1 minute due to emitted error');
|
||||
this.nextNannyCheck = dayjs().add(120, 'second');
|
||||
}
|
||||
}
|
||||
if(dayjs().isSameOrAfter(this.nextHeartbeat)) {
|
||||
try {
|
||||
await this.heartbeat();
|
||||
} catch (err) {
|
||||
this.logger.error(`Error occurred during heartbeat check: ${err.message}`);
|
||||
}
|
||||
this.nextHeartbeat = dayjs().add(this.heartbeatInterval, 'second');
|
||||
}
|
||||
}
|
||||
this.emitter.emit('healthStopped');
|
||||
}
|
||||
|
||||
async heartbeat() {
|
||||
const heartbeat = `HEARTBEAT -- API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ~${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion === undefined ? 'N/A' : this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`
|
||||
this.logger.info(heartbeat);
|
||||
for (const s of this.subManagers) {
|
||||
if(s.botState.state === STOPPED && s.botState.causedBy === USER) {
|
||||
this.logger.debug('Skipping config check/restart on heartbeat due to previously being stopped by user', {subreddit: s.displayLabel});
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const newConfig = await s.parseConfiguration();
|
||||
if(newConfig || (s.queueState.state !== RUNNING && s.queueState.causedBy === SYSTEM))
|
||||
{
|
||||
await s.startQueue('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running queue'});
|
||||
}
|
||||
if(newConfig || (s.eventsState.state !== RUNNING && s.eventsState.causedBy === SYSTEM))
|
||||
{
|
||||
await s.startEvents('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running events'});
|
||||
}
|
||||
if(s.botState.state !== RUNNING && s.eventsState.state === RUNNING && s.queueState.state === RUNNING) {
|
||||
s.botState = {
|
||||
state: RUNNING,
|
||||
causedBy: 'system',
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.info('Stopping event polling to prevent activity processing queue from backing up. Will be restarted when config update succeeds.')
|
||||
await s.stopEvents('system', {reason: 'Invalid config will cause events to pile up in queue. Will be restarted when config update succeeds (next heartbeat).'});
|
||||
if(!(err instanceof LoggedError)) {
|
||||
this.logger.error(err, {subreddit: s.displayLabel});
|
||||
}
|
||||
if(this.nextHeartbeat !== undefined) {
|
||||
this.logger.info(`Will retry parsing config on next heartbeat (in ${dayjs.duration(this.nextHeartbeat.diff(dayjs())).humanize()})`, {subreddit: s.displayLabel});
|
||||
}
|
||||
}
|
||||
}
|
||||
await this.runModStreams(true);
|
||||
}
|
||||
|
||||
async runApiNanny() {
|
||||
try {
|
||||
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
|
||||
const nowish = dayjs().add(10, 'second');
|
||||
if (nowish.isAfter(this.nextExpiration)) {
|
||||
// it's possible no api calls are being made because of a hard limit
|
||||
// need to make an api call to update this
|
||||
let shouldRetry = true;
|
||||
while (shouldRetry) {
|
||||
try {
|
||||
// @ts-ignore
|
||||
await this.client.getMe();
|
||||
shouldRetry = false;
|
||||
} catch (err) {
|
||||
shouldRetry = await this.nannyRetryHandler(err);
|
||||
if (!shouldRetry) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
|
||||
}
|
||||
const rollingSample = this.apiSample.slice(0, 7)
|
||||
rollingSample.unshift(this.client.ratelimitRemaining);
|
||||
this.apiSample = rollingSample;
|
||||
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
|
||||
if (this.apiSample[index + 1] !== undefined) {
|
||||
const d = Math.abs(curr - this.apiSample[index + 1]);
|
||||
if (d === 0) {
|
||||
return [...acc, 0];
|
||||
}
|
||||
return [...acc, d / 10];
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
this.apiRollingAvg = diff.reduce((acc, curr) => acc + curr, 0) / diff.length; // api requests per second
|
||||
this.depletedInSecs = this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
|
||||
this.apiEstDepletion = dayjs.duration({seconds: this.depletedInSecs});
|
||||
this.logger.debug(`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`);
|
||||
|
||||
|
||||
let hardLimitHit = false;
|
||||
if (typeof this.hardLimit === 'string') {
|
||||
const hardDur = parseDuration(this.hardLimit);
|
||||
hardLimitHit = hardDur.asSeconds() > this.apiEstDepletion.asSeconds();
|
||||
} else {
|
||||
hardLimitHit = this.hardLimit > this.client.ratelimitRemaining;
|
||||
}
|
||||
|
||||
if (hardLimitHit) {
|
||||
if (this.nannyMode === 'hard') {
|
||||
return;
|
||||
}
|
||||
this.logger.info(`Detected HARD LIMIT of ${this.hardLimit} remaining`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${this.apiRollingAvg}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`All subreddit event polling has been paused`, {leaf: 'Api Nanny'});
|
||||
|
||||
for (const m of this.subManagers) {
|
||||
m.pauseEvents('system');
|
||||
m.notificationManager.handle('runStateChanged', 'Hard Limit Triggered', `Hard Limit of ${this.hardLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit event polling has been paused.`, 'system', 'warn');
|
||||
}
|
||||
|
||||
this.nannyMode = 'hard';
|
||||
return;
|
||||
}
|
||||
|
||||
let softLimitHit = false;
|
||||
if (typeof this.softLimit === 'string') {
|
||||
const softDur = parseDuration(this.softLimit);
|
||||
softLimitHit = softDur.asSeconds() > this.apiEstDepletion.asSeconds();
|
||||
} else {
|
||||
softLimitHit = this.softLimit > this.client.ratelimitRemaining;
|
||||
}
|
||||
|
||||
if (softLimitHit) {
|
||||
if (this.nannyMode === 'soft') {
|
||||
return;
|
||||
}
|
||||
this.logger.info(`Detected SOFT LIMIT of ${this.softLimit} remaining`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
|
||||
this.logger.info('Trying to detect heavy usage subreddits...', {leaf: 'Api Nanny'});
|
||||
let threshold = 0.5;
|
||||
let offenders = this.subManagers.filter(x => {
|
||||
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
|
||||
return combinedPerSec > threshold;
|
||||
});
|
||||
if (offenders.length === 0) {
|
||||
threshold = 0.25;
|
||||
// reduce threshold
|
||||
offenders = this.subManagers.filter(x => {
|
||||
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
|
||||
return combinedPerSec > threshold;
|
||||
});
|
||||
}
|
||||
|
||||
if (offenders.length > 0) {
|
||||
this.logger.info(`Slowing subreddits using >- ${threshold}req/s:`, {leaf: 'Api Nanny'});
|
||||
for (const m of offenders) {
|
||||
m.delayBy = 1.5;
|
||||
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
|
||||
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
|
||||
}
|
||||
} else {
|
||||
this.logger.info(`Couldn't detect specific offenders, slowing all...`, {leaf: 'Api Nanny'});
|
||||
for (const m of this.subManagers) {
|
||||
m.delayBy = 1.5;
|
||||
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
|
||||
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
|
||||
}
|
||||
}
|
||||
this.nannyMode = 'soft';
|
||||
return
|
||||
}
|
||||
|
||||
if (this.nannyMode !== undefined) {
|
||||
this.logger.info('Turning off due to better conditions...', {leaf: 'Api Nanny'});
|
||||
for (const m of this.subManagers) {
|
||||
if (m.delayBy !== undefined) {
|
||||
m.delayBy = undefined;
|
||||
m.notificationManager.handle('runStateChanged', 'Normal Processing Resumed', 'Slow Mode has been turned off due to better API conditions', 'system');
|
||||
}
|
||||
if (m.queueState.state === PAUSED && m.queueState.causedBy === SYSTEM) {
|
||||
m.startQueue('system', {reason: 'API Nanny has been turned off due to better API conditions'});
|
||||
}
|
||||
if (m.eventsState.state === PAUSED && m.eventsState.causedBy === SYSTEM) {
|
||||
await m.startEvents('system', {reason: 'API Nanny has been turned off due to better API conditions'});
|
||||
}
|
||||
}
|
||||
this.nannyMode = undefined;
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
this.logger.error(`Error occurred during nanny loop: ${err.message}`);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default Bot;
|
||||
@@ -1,12 +1,21 @@
|
||||
import {Check, CheckOptions} from "./index";
|
||||
import {CommentState} from "../Common/interfaces";
|
||||
import {Check, CheckOptions, userResultCacheDefault, UserResultCacheOptions} from "./index";
|
||||
import {CommentState, UserResultCache} from "../Common/interfaces";
|
||||
import {Submission, Comment} from "snoowrap/dist/objects";
|
||||
import {RuleResult} from "../Rule";
|
||||
|
||||
export interface CommentCheckOptions extends CheckOptions {
|
||||
cacheUserResult?: UserResultCacheOptions;
|
||||
}
|
||||
|
||||
export class CommentCheck extends Check {
|
||||
itemIs: CommentState[];
|
||||
|
||||
constructor(options: CheckOptions) {
|
||||
constructor(options: CommentCheckOptions) {
|
||||
super(options);
|
||||
const {itemIs = []} = options;
|
||||
const {
|
||||
itemIs = [],
|
||||
} = options;
|
||||
|
||||
this.itemIs = itemIs;
|
||||
this.logSummary();
|
||||
}
|
||||
@@ -14,4 +23,34 @@ export class CommentCheck extends Check {
|
||||
logSummary() {
|
||||
super.logSummary('comment');
|
||||
}
|
||||
|
||||
async getCacheResult(item: Submission | Comment): Promise<UserResultCache | undefined> {
|
||||
if (this.cacheUserResult.enable) {
|
||||
return await this.resources.getCommentCheckCacheResult(item as Comment, {
|
||||
name: this.name,
|
||||
authorIs: this.authorIs,
|
||||
itemIs: this.itemIs
|
||||
})
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async setCacheResult(item: Submission | Comment, result: UserResultCache): Promise<void> {
|
||||
if (this.cacheUserResult.enable) {
|
||||
const {result: outcome, ruleResults} = result;
|
||||
|
||||
const res: UserResultCache = {
|
||||
result: outcome,
|
||||
// don't need to cache rule results if check was not triggered
|
||||
// since we only use rule results for actions
|
||||
ruleResults: outcome ? ruleResults : []
|
||||
};
|
||||
|
||||
await this.resources.setCommentCheckCacheResult(item as Comment, {
|
||||
name: this.name,
|
||||
authorIs: this.authorIs,
|
||||
itemIs: this.itemIs
|
||||
}, res, this.cacheUserResult.ttl)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
|
||||
import {Check, CheckOptions} from "./index";
|
||||
import {SubmissionState} from "../Common/interfaces";
|
||||
import {SubmissionState, UserResultCache} from "../Common/interfaces";
|
||||
import {Submission, Comment} from "snoowrap/dist/objects";
|
||||
import {RuleResult} from "../Rule";
|
||||
|
||||
export class SubmissionCheck extends Check {
|
||||
itemIs: SubmissionState[];
|
||||
|
||||
@@ -2,10 +2,11 @@ import {RuleSet, IRuleSet, RuleSetJson, RuleSetObjectJson} from "../Rule/RuleSet
|
||||
import {IRule, isRuleSetResult, Rule, RuleJSONConfig, RuleResult, RuleSetResult} from "../Rule";
|
||||
import Action, {ActionConfig, ActionJson} from "../Action";
|
||||
import {Logger} from "winston";
|
||||
import {Comment, Submission} from "snoowrap";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {actionFactory} from "../Action/ActionFactory";
|
||||
import {ruleFactory} from "../Rule/RuleFactory";
|
||||
import {
|
||||
boolToString,
|
||||
createAjvFactory,
|
||||
FAIL,
|
||||
mergeArr,
|
||||
@@ -15,27 +16,28 @@ import {
|
||||
truncateStringToLength
|
||||
} from "../util";
|
||||
import {
|
||||
ActionResult,
|
||||
ChecksActivityState,
|
||||
CommentState,
|
||||
JoinCondition,
|
||||
JoinOperands,
|
||||
SubmissionState,
|
||||
TypedActivityStates
|
||||
TypedActivityStates, UserResultCache
|
||||
} from "../Common/interfaces";
|
||||
import * as RuleSchema from '../Schema/Rule.json';
|
||||
import * as RuleSetSchema from '../Schema/RuleSet.json';
|
||||
import * as ActionSchema from '../Schema/Action.json';
|
||||
import {ActionObjectJson, RuleJson, RuleObjectJson, ActionJson as ActionTypeJson} from "../Common/types";
|
||||
import {isItem} from "../Utils/SnoowrapUtils";
|
||||
import ResourceManager, {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import {Author, AuthorCriteria, AuthorOptions} from "../Author/Author";
|
||||
|
||||
const checkLogName = truncateStringToLength(25);
|
||||
|
||||
export class Check implements ICheck {
|
||||
export abstract class Check implements ICheck {
|
||||
actions: Action[] = [];
|
||||
description?: string;
|
||||
name: string;
|
||||
enabled: boolean;
|
||||
condition: JoinOperands;
|
||||
rules: Array<RuleSet | Rule> = [];
|
||||
logger: Logger;
|
||||
@@ -44,19 +46,25 @@ export class Check implements ICheck {
|
||||
include: AuthorCriteria[],
|
||||
exclude: AuthorCriteria[]
|
||||
};
|
||||
cacheUserResult: Required<UserResultCacheOptions>;
|
||||
dryRun?: boolean;
|
||||
notifyOnTrigger: boolean;
|
||||
resources: SubredditResources;
|
||||
client: Snoowrap;
|
||||
|
||||
constructor(options: CheckOptions) {
|
||||
const {
|
||||
enable = true,
|
||||
name,
|
||||
resources,
|
||||
description,
|
||||
client,
|
||||
condition = 'AND',
|
||||
rules = [],
|
||||
actions = [],
|
||||
notifyOnTrigger = false,
|
||||
subredditName,
|
||||
cacheUserResult = {},
|
||||
itemIs = [],
|
||||
authorIs: {
|
||||
include = [],
|
||||
@@ -65,11 +73,14 @@ export class Check implements ICheck {
|
||||
dryRun,
|
||||
} = options;
|
||||
|
||||
this.enabled = enable;
|
||||
|
||||
this.logger = options.logger.child({labels: [`CHK ${checkLogName(name)}`]}, mergeArr);
|
||||
|
||||
const ajv = createAjvFactory(this.logger);
|
||||
|
||||
this.resources = ResourceManager.get(subredditName) as SubredditResources;
|
||||
this.resources = resources;
|
||||
this.client = client;
|
||||
|
||||
this.name = name;
|
||||
this.description = description;
|
||||
@@ -80,6 +91,10 @@ export class Check implements ICheck {
|
||||
exclude: exclude.map(x => new Author(x)),
|
||||
include: include.map(x => new Author(x)),
|
||||
}
|
||||
this.cacheUserResult = {
|
||||
...userResultCacheDefault,
|
||||
...cacheUserResult
|
||||
}
|
||||
this.dryRun = dryRun;
|
||||
for (const r of rules) {
|
||||
if (r instanceof Rule || r instanceof RuleSet) {
|
||||
@@ -90,12 +105,12 @@ export class Check implements ICheck {
|
||||
let ruleErrors: any = [];
|
||||
if (valid) {
|
||||
const ruleConfig = r as RuleSetObjectJson;
|
||||
this.rules.push(new RuleSet({...ruleConfig, logger: this.logger, subredditName}));
|
||||
this.rules.push(new RuleSet({...ruleConfig, logger: this.logger, subredditName, resources: this.resources, client: this.client}));
|
||||
} else {
|
||||
setErrors = ajv.errors;
|
||||
valid = ajv.validate(RuleSchema, r);
|
||||
if (valid) {
|
||||
this.rules.push(ruleFactory(r as RuleJSONConfig, this.logger, subredditName));
|
||||
this.rules.push(ruleFactory(r as RuleJSONConfig, this.logger, subredditName, this.resources, this.client));
|
||||
} else {
|
||||
ruleErrors = ajv.errors;
|
||||
const leastErrorType = setErrors.length < ruleErrors ? 'RuleSet' : 'Rule';
|
||||
@@ -119,7 +134,7 @@ export class Check implements ICheck {
|
||||
this.actions.push(actionFactory({
|
||||
...aj,
|
||||
dryRun: this.dryRun || aj.dryRun
|
||||
}, this.logger, subredditName));
|
||||
}, this.logger, subredditName, this.resources, this.client));
|
||||
// @ts-ignore
|
||||
a.logger = this.logger;
|
||||
} else {
|
||||
@@ -142,7 +157,7 @@ export class Check implements ICheck {
|
||||
}
|
||||
runStats.push(`${this.actions.length} Actions`);
|
||||
// not sure if this should be info or verbose
|
||||
this.logger.info(`${type.toUpperCase()} (${this.condition})${this.notifyOnTrigger ? ' ||Notify on Trigger|| ' : ''} => ${runStats.join(' | ')}${this.description !== undefined ? ` => ${this.description}` : ''}`);
|
||||
this.logger.info(`=${this.enabled ? 'Enabled' : 'Disabled'}= ${type.toUpperCase()} (${this.condition})${this.notifyOnTrigger ? ' ||Notify on Trigger|| ' : ''} => ${runStats.join(' | ')}${this.description !== undefined ? ` => ${this.description}` : ''}`);
|
||||
if (this.rules.length === 0 && this.itemIs.length === 0 && this.authorIs.exclude.length === 0 && this.authorIs.include.length === 0) {
|
||||
this.logger.warn('No rules, item tests, or author test found -- this check will ALWAYS PASS!');
|
||||
}
|
||||
@@ -162,11 +177,26 @@ export class Check implements ICheck {
|
||||
}
|
||||
}
|
||||
|
||||
async runRules(item: Submission | Comment, existingResults: RuleResult[] = []): Promise<[boolean, RuleResult[]]> {
|
||||
async getCacheResult(item: Submission | Comment) : Promise<UserResultCache | undefined> {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async setCacheResult(item: Submission | Comment, result: UserResultCache): Promise<void> {
|
||||
}
|
||||
|
||||
async runRules(item: Submission | Comment, existingResults: RuleResult[] = []): Promise<[boolean, RuleResult[], boolean?]> {
|
||||
try {
|
||||
let allRuleResults: RuleResult[] = [];
|
||||
let allResults: (RuleResult | RuleSetResult)[] = [];
|
||||
const [itemPass, crit] = isItem(item, this.itemIs, this.logger);
|
||||
|
||||
// check cache results
|
||||
const cacheResult = await this.getCacheResult(item);
|
||||
if(cacheResult !== undefined) {
|
||||
this.logger.verbose(`Skipping rules run because result was found in cache, Check Triggered Result: ${cacheResult}`);
|
||||
return [cacheResult.result, cacheResult.ruleResults, true];
|
||||
}
|
||||
|
||||
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
|
||||
if (!itemPass) {
|
||||
this.logger.verbose(`${FAIL} => Item did not pass 'itemIs' test`);
|
||||
return [false, allRuleResults];
|
||||
@@ -245,19 +275,27 @@ export class Check implements ICheck {
|
||||
}
|
||||
}
|
||||
|
||||
async runActions(item: Submission | Comment, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<Action[]> {
|
||||
async runActions(item: Submission | Comment, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionResult[]> {
|
||||
const dr = runtimeDryrun || this.dryRun;
|
||||
this.logger.debug(`${dr ? 'DRYRUN - ' : ''}Running Actions`);
|
||||
const runActions: Action[] = [];
|
||||
const runActions: ActionResult[] = [];
|
||||
for (const a of this.actions) {
|
||||
try {
|
||||
await a.handle(item, ruleResults, runtimeDryrun);
|
||||
runActions.push(a);
|
||||
} catch (err) {
|
||||
this.logger.error(`Action ${a.getActionUniqueName()} encountered an error while running`, err);
|
||||
if(!a.enabled) {
|
||||
runActions.push({
|
||||
kind: a.getKind(),
|
||||
name: a.getActionUniqueName(),
|
||||
run: false,
|
||||
success: false,
|
||||
runReason: 'Not enabled',
|
||||
dryRun: (a.dryRun || dr) || false,
|
||||
});
|
||||
this.logger.info(`Action ${a.getActionUniqueName()} not run because it is not enabled.`);
|
||||
continue;
|
||||
}
|
||||
const res = await a.handle(item, ruleResults, runtimeDryrun);
|
||||
runActions.push(res);
|
||||
}
|
||||
this.logger.info(`${dr ? 'DRYRUN - ' : ''}Ran Actions: ${runActions.map(x => x.getActionUniqueName()).join(' | ')}`);
|
||||
this.logger.info(`${dr ? 'DRYRUN - ' : ''}Ran Actions: ${runActions.map(x => x.name).join(' | ')}`);
|
||||
return runActions;
|
||||
}
|
||||
}
|
||||
@@ -296,6 +334,14 @@ export interface ICheck extends JoinCondition, ChecksActivityState {
|
||||
* If present then these Author criteria are checked before running the Check. If criteria fails then the Check will fail.
|
||||
* */
|
||||
authorIs?: AuthorOptions
|
||||
|
||||
/**
|
||||
* Should this check be run by the bot?
|
||||
*
|
||||
* @default true
|
||||
* @examples [true]
|
||||
* */
|
||||
enable?: boolean,
|
||||
}
|
||||
|
||||
export interface CheckOptions extends ICheck {
|
||||
@@ -304,6 +350,9 @@ export interface CheckOptions extends ICheck {
|
||||
logger: Logger
|
||||
subredditName: string
|
||||
notifyOnTrigger?: boolean
|
||||
resources: SubredditResources
|
||||
client: Snoowrap
|
||||
cacheUserResult?: UserResultCacheOptions;
|
||||
}
|
||||
|
||||
export interface CheckJson extends ICheck {
|
||||
@@ -338,6 +387,8 @@ export interface CheckJson extends ICheck {
|
||||
* @default false
|
||||
* */
|
||||
notifyOnTrigger?: boolean,
|
||||
|
||||
cacheUserResult?: UserResultCacheOptions;
|
||||
}
|
||||
|
||||
export interface SubmissionCheckJson extends CheckJson {
|
||||
@@ -345,6 +396,41 @@ export interface SubmissionCheckJson extends CheckJson {
|
||||
itemIs?: SubmissionState[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache the result of this check based on the comment author and the submission id
|
||||
*
|
||||
* This is useful in this type of scenario:
|
||||
*
|
||||
* 1. This check is configured to run on comments for specific submissions with high volume activity
|
||||
* 2. The rules being run are not dependent on the content of the comment
|
||||
* 3. The rule results are not likely to change while cache is valid
|
||||
* */
|
||||
export interface UserResultCacheOptions {
|
||||
/**
|
||||
* @default false
|
||||
* */
|
||||
enable?: boolean,
|
||||
/**
|
||||
* The amount of time, in seconds, to cache this result
|
||||
*
|
||||
* @default 60
|
||||
* @examples [60]
|
||||
* */
|
||||
ttl?: number,
|
||||
/**
|
||||
* In the event the cache returns a triggered result should the actions for the check also be run?
|
||||
*
|
||||
* @default true
|
||||
* */
|
||||
runActions?: boolean
|
||||
}
|
||||
|
||||
export const userResultCacheDefault: Required<UserResultCacheOptions> = {
|
||||
enable: false,
|
||||
ttl: 60,
|
||||
runActions: true,
|
||||
}
|
||||
|
||||
export interface CommentCheckJson extends CheckJson {
|
||||
kind: 'comment'
|
||||
itemIs?: CommentState[]
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
export const cacheOptDefaults = {ttl: 60, max: 500, checkPeriod: 600};
|
||||
export const cacheTTLDefaults = {authorTTL: 60, userNotesTTL: 300, wikiTTL: 300};
|
||||
export const cacheTTLDefaults = {authorTTL: 60, userNotesTTL: 300, wikiTTL: 300, submissionTTL: 60, commentTTL: 60, filterCriteriaTTL: 60, subredditTTL: 600};
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
import {RecentActivityRuleJSONConfig} from "../Rule/RecentActivityRule";
|
||||
import {RepeatActivityJSONConfig} from "../Rule/SubmissionRule/RepeatActivityRule";
|
||||
import {RepeatActivityJSONConfig} from "../Rule/RepeatActivityRule";
|
||||
import {AuthorRuleJSONConfig} from "../Rule/AuthorRule";
|
||||
import {AttributionJSONConfig} from "../Rule/AttributionRule";
|
||||
import {FlairActionJson} from "../Action/SubmissionAction/FlairAction";
|
||||
@@ -12,9 +12,10 @@ import {UserNoteActionJson} from "../Action/UserNoteAction";
|
||||
import {ApproveActionJson} from "../Action/ApproveAction";
|
||||
import {BanActionJson} from "../Action/BanAction";
|
||||
import {RegexRuleJSONConfig} from "../Rule/RegexRule";
|
||||
import {MessageActionJson} from "../Action/MessageAction";
|
||||
|
||||
export type RuleJson = RecentActivityRuleJSONConfig | RepeatActivityJSONConfig | AuthorRuleJSONConfig | AttributionJSONConfig | HistoryJSONConfig | RegexRuleJSONConfig | string;
|
||||
export type RuleObjectJson = Exclude<RuleJson, string>
|
||||
|
||||
export type ActionJson = CommentActionJson | FlairActionJson | ReportActionJson | LockActionJson | RemoveActionJson | ApproveActionJson | BanActionJson | UserNoteActionJson | string;
|
||||
export type ActionJson = CommentActionJson | FlairActionJson | ReportActionJson | LockActionJson | RemoveActionJson | ApproveActionJson | BanActionJson | UserNoteActionJson | MessageActionJson | string;
|
||||
export type ActionObjectJson = Exclude<ActionJson, string>;
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import {Logger} from "winston";
|
||||
import {
|
||||
buildCacheOptionsFromProvider,
|
||||
buildCacheOptionsFromProvider, buildCachePrefix,
|
||||
createAjvFactory,
|
||||
mergeArr,
|
||||
normalizeName,
|
||||
overwriteMerge,
|
||||
parseBool, randomId,
|
||||
readJson,
|
||||
readConfigFile,
|
||||
removeUndefinedKeys
|
||||
} from "./util";
|
||||
import {CommentCheck} from "./Check/CommentCheck";
|
||||
@@ -25,7 +25,13 @@ import {
|
||||
OperatorConfig,
|
||||
PollingOptions,
|
||||
PollingOptionsStrong,
|
||||
PollOn, StrongCache, CacheProvider, CacheOptions
|
||||
PollOn,
|
||||
StrongCache,
|
||||
CacheProvider,
|
||||
CacheOptions,
|
||||
BotInstanceJsonConfig,
|
||||
BotInstanceConfig,
|
||||
RequiredWebRedditCredentials
|
||||
} from "./Common/interfaces";
|
||||
import {isRuleSetJSON, RuleSetJson, RuleSetObjectJson} from "./Rule/RuleSet";
|
||||
import deepEqual from "fast-deep-equal";
|
||||
@@ -37,6 +43,7 @@ import {operatorConfig} from "./Utils/CommandConfig";
|
||||
import merge from 'deepmerge';
|
||||
import * as process from "process";
|
||||
import {cacheOptDefaults, cacheTTLDefaults} from "./Common/defaults";
|
||||
import objectHash from "object-hash";
|
||||
|
||||
export interface ConfigBuilderOptions {
|
||||
logger: Logger,
|
||||
@@ -246,148 +253,181 @@ export const insertNamedActions = (actions: Array<ActionJson>, namedActions: Map
|
||||
return strongActions;
|
||||
}
|
||||
|
||||
export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
|
||||
export const parseDefaultBotInstanceFromArgs = (args: any): BotInstanceJsonConfig => {
|
||||
const {
|
||||
subreddits,
|
||||
clientId,
|
||||
clientSecret,
|
||||
accessToken,
|
||||
refreshToken,
|
||||
redirectUri,
|
||||
wikiConfig,
|
||||
dryRun,
|
||||
heartbeat,
|
||||
softLimit,
|
||||
heartbeat,
|
||||
hardLimit,
|
||||
authorTTL,
|
||||
operator,
|
||||
operatorDisplay,
|
||||
snooProxy,
|
||||
snooDebug,
|
||||
sharedMod,
|
||||
logLevel,
|
||||
logDir,
|
||||
port,
|
||||
sessionSecret,
|
||||
caching,
|
||||
web
|
||||
} = args || {};
|
||||
|
||||
const data = {
|
||||
operator: {
|
||||
name: operator,
|
||||
display: operatorDisplay
|
||||
},
|
||||
credentials: {
|
||||
clientId,
|
||||
clientSecret,
|
||||
accessToken,
|
||||
refreshToken,
|
||||
redirectUri,
|
||||
},
|
||||
snoowrap: {
|
||||
proxy: snooProxy,
|
||||
debug: snooDebug,
|
||||
},
|
||||
subreddits: {
|
||||
names: subreddits,
|
||||
wikiConfig,
|
||||
dryRun,
|
||||
heartbeatInterval: heartbeat,
|
||||
dryRun
|
||||
},
|
||||
polling: {
|
||||
sharedMod,
|
||||
},
|
||||
nanny: {
|
||||
softLimit,
|
||||
hardLimit
|
||||
}
|
||||
}
|
||||
return removeUndefinedKeys(data) as BotInstanceJsonConfig;
|
||||
}
|
||||
|
||||
export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
|
||||
const {
|
||||
clientId,
|
||||
clientSecret,
|
||||
redirectUri,
|
||||
operator,
|
||||
operatorDisplay,
|
||||
logLevel,
|
||||
logDir,
|
||||
port,
|
||||
sessionSecret,
|
||||
web,
|
||||
mode,
|
||||
caching,
|
||||
authorTTL,
|
||||
} = args || {};
|
||||
|
||||
const data = {
|
||||
mode,
|
||||
operator: {
|
||||
name: operator,
|
||||
display: operatorDisplay
|
||||
},
|
||||
logging: {
|
||||
level: logLevel,
|
||||
path: logDir === true ? `${process.cwd()}/logs` : undefined,
|
||||
},
|
||||
snoowrap: {
|
||||
proxy: snooProxy,
|
||||
debug: snooDebug,
|
||||
caching: {
|
||||
provider: caching,
|
||||
authorTTL
|
||||
},
|
||||
web: {
|
||||
enabled: web,
|
||||
port,
|
||||
session: {
|
||||
secret: sessionSecret
|
||||
},
|
||||
credentials: {
|
||||
clientId,
|
||||
clientSecret,
|
||||
redirectUri,
|
||||
}
|
||||
},
|
||||
polling: {
|
||||
sharedMod,
|
||||
},
|
||||
caching: {
|
||||
provider: caching,
|
||||
authorTTL
|
||||
},
|
||||
api: {
|
||||
softLimit,
|
||||
hardLimit
|
||||
}
|
||||
}
|
||||
|
||||
return removeUndefinedKeys(data) as OperatorJsonConfig;
|
||||
}
|
||||
|
||||
export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
|
||||
let subsVal = process.env.SUBREDDITS;
|
||||
let subs;
|
||||
if (subsVal !== undefined) {
|
||||
subsVal = subsVal.trim();
|
||||
if (subsVal.includes(',')) {
|
||||
// try to parse using comma
|
||||
subs = subsVal.split(',').map(x => x.trim()).filter(x => x !== '');
|
||||
} else {
|
||||
// otherwise try spaces
|
||||
subs = subsVal.split(' ')
|
||||
// remove any extraneous spaces
|
||||
.filter(x => x !== ' ' && x !== '');
|
||||
}
|
||||
if (subs.length === 0) {
|
||||
subs = undefined;
|
||||
}
|
||||
const parseListFromEnv = (val: string | undefined) => {
|
||||
let listVals: undefined | string[];
|
||||
if (val === undefined) {
|
||||
return listVals;
|
||||
}
|
||||
const trimmedVal = val.trim();
|
||||
if (trimmedVal.includes(',')) {
|
||||
// try to parse using comma
|
||||
listVals = trimmedVal.split(',').map(x => x.trim()).filter(x => x !== '');
|
||||
} else {
|
||||
// otherwise try spaces
|
||||
listVals = trimmedVal.split(' ')
|
||||
// remove any extraneous spaces
|
||||
.filter(x => x !== ' ' && x !== '');
|
||||
}
|
||||
if (listVals.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
return listVals;
|
||||
}
|
||||
|
||||
export const parseDefaultBotInstanceFromEnv = (): BotInstanceJsonConfig => {
|
||||
const data = {
|
||||
operator: {
|
||||
name: process.env.OPERATOR,
|
||||
display: process.env.OPERATOR_DISPLAY
|
||||
},
|
||||
credentials: {
|
||||
clientId: process.env.CLIENT_ID,
|
||||
clientSecret: process.env.CLIENT_SECRET,
|
||||
accessToken: process.env.ACCESS_TOKEN,
|
||||
refreshToken: process.env.REFRESH_TOKEN,
|
||||
redirectUri: process.env.REDIRECT_URI,
|
||||
},
|
||||
subreddits: {
|
||||
names: subs,
|
||||
names: parseListFromEnv(process.env.SUBREDDITS),
|
||||
wikiConfig: process.env.WIKI_CONFIG,
|
||||
heartbeatInterval: process.env.HEARTBEAT !== undefined ? parseInt(process.env.HEARTBEAT) : undefined,
|
||||
dryRun: parseBool(process.env.DRYRUN, undefined),
|
||||
heartbeatInterval: process.env.HEARTBEAT !== undefined ? parseInt(process.env.HEARTBEAT) : undefined,
|
||||
},
|
||||
snoowrap: {
|
||||
proxy: process.env.PROXY,
|
||||
debug: parseBool(process.env.SNOO_DEBUG, undefined),
|
||||
},
|
||||
polling: {
|
||||
sharedMod: parseBool(process.env.SHARE_MOD),
|
||||
},
|
||||
nanny: {
|
||||
softLimit: process.env.SOFT_LIMIT !== undefined ? parseInt(process.env.SOFT_LIMIT) : undefined,
|
||||
hardLimit: process.env.HARD_LIMIT !== undefined ? parseInt(process.env.HARD_LIMIT) : undefined
|
||||
},
|
||||
};
|
||||
return removeUndefinedKeys(data) as BotInstanceJsonConfig;
|
||||
}
|
||||
|
||||
export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
|
||||
const data = {
|
||||
mode: process.env.MODE !== undefined ? process.env.MODE as ('all' | 'server' | 'client') : undefined,
|
||||
operator: {
|
||||
name: parseListFromEnv(process.env.OPERATOR),
|
||||
display: process.env.OPERATOR_DISPLAY
|
||||
},
|
||||
logging: {
|
||||
// @ts-ignore
|
||||
level: process.env.LOG_LEVEL,
|
||||
path: process.env.LOG_DIR === 'true' ? `${process.cwd()}/logs` : undefined,
|
||||
},
|
||||
snoowrap: {
|
||||
proxy: process.env.PROXY,
|
||||
debug: parseBool(process.env.SNOO_DEBUG, undefined),
|
||||
caching: {
|
||||
provider: {
|
||||
// @ts-ignore
|
||||
store: process.env.CACHING as (CacheProvider | undefined)
|
||||
},
|
||||
authorTTL: process.env.AUTHOR_TTL !== undefined ? parseInt(process.env.AUTHOR_TTL) : undefined
|
||||
},
|
||||
web: {
|
||||
enabled: process.env.WEB !== undefined ? parseBool(process.env.WEB) : undefined,
|
||||
port: process.env.PORT !== undefined ? parseInt(process.env.PORT) : undefined,
|
||||
session: {
|
||||
provider: process.env.SESSION_PROVIDER,
|
||||
secret: process.env.SESSION_SECRET
|
||||
}
|
||||
},
|
||||
polling: {
|
||||
sharedMod: parseBool(process.env.SHARE_MOD),
|
||||
},
|
||||
caching: {
|
||||
provider: {
|
||||
// @ts-ignore
|
||||
store: process.env.CACHING
|
||||
},
|
||||
authorTTL: process.env.AUTHOR_TTL !== undefined ? parseInt(process.env.AUTHOR_TTL) : undefined
|
||||
},
|
||||
api: {
|
||||
softLimit: process.env.SOFT_LIMIT !== undefined ? parseInt(process.env.SOFT_LIMIT) : undefined,
|
||||
hardLimit: process.env.HARD_LIMIT !== undefined ? parseInt(process.env.HARD_LIMIT) : undefined
|
||||
credentials: {
|
||||
clientId: process.env.CLIENT_ID,
|
||||
clientSecret: process.env.CLIENT_SECRET,
|
||||
redirectUri: process.env.REDIRECT_URI,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -436,7 +476,7 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
|
||||
if (operatorConfig !== undefined) {
|
||||
let rawConfig;
|
||||
try {
|
||||
rawConfig = await readJson(operatorConfig, {log: initLogger});
|
||||
rawConfig = await readConfigFile(operatorConfig, {log: initLogger}) as object;
|
||||
} catch (err) {
|
||||
initLogger.error('Cannot continue app startup because operator config file was not parseable.');
|
||||
err.logged = true;
|
||||
@@ -449,149 +489,276 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
const configFromArgs = parseOpConfigFromArgs(args);
|
||||
const configFromEnv = parseOpConfigFromEnv();
|
||||
const opConfigFromArgs = parseOpConfigFromArgs(args);
|
||||
const opConfigFromEnv = parseOpConfigFromEnv();
|
||||
|
||||
const mergedConfig = merge.all([configFromEnv, configFromFile, configFromArgs], {
|
||||
const defaultBotInstanceFromArgs = parseDefaultBotInstanceFromArgs(args);
|
||||
const defaultBotInstanceFromEnv = parseDefaultBotInstanceFromEnv();
|
||||
const {bots: botInstancesFromFile = [], ...restConfigFile} = configFromFile;
|
||||
|
||||
const mergedConfig = merge.all([opConfigFromEnv, restConfigFile, opConfigFromArgs], {
|
||||
arrayMerge: overwriteMerge,
|
||||
});
|
||||
|
||||
return removeUndefinedKeys(mergedConfig) as OperatorJsonConfig;
|
||||
const defaultBotInstance = merge.all([defaultBotInstanceFromEnv, defaultBotInstanceFromArgs], {
|
||||
arrayMerge: overwriteMerge,
|
||||
}) as BotInstanceJsonConfig;
|
||||
|
||||
if (configFromFile.caching !== undefined) {
|
||||
defaultBotInstance.caching = configFromFile.caching;
|
||||
}
|
||||
|
||||
let botInstances = [];
|
||||
if (botInstancesFromFile.length === 0) {
|
||||
botInstances = [defaultBotInstance];
|
||||
} else {
|
||||
botInstances = botInstancesFromFile.map(x => merge.all([defaultBotInstance, x], {arrayMerge: overwriteMerge}));
|
||||
}
|
||||
|
||||
return removeUndefinedKeys({...mergedConfig, bots: botInstances}) as OperatorJsonConfig;
|
||||
}
|
||||
|
||||
export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): OperatorConfig => {
|
||||
const {
|
||||
mode = 'all',
|
||||
operator: {
|
||||
name,
|
||||
display = 'Anonymous'
|
||||
} = {},
|
||||
credentials: {
|
||||
clientId: ci,
|
||||
clientSecret: cs,
|
||||
...restCred
|
||||
} = {},
|
||||
subreddits: {
|
||||
names = [],
|
||||
wikiConfig = 'botconfig/contextbot',
|
||||
heartbeatInterval = 300,
|
||||
dryRun
|
||||
name = [],
|
||||
display = 'Anonymous',
|
||||
} = {},
|
||||
logging: {
|
||||
level = 'verbose',
|
||||
path,
|
||||
} = {},
|
||||
snoowrap = {},
|
||||
caching: opCache,
|
||||
web: {
|
||||
enabled = true,
|
||||
port = 8085,
|
||||
maxLogs = 200,
|
||||
caching: webCaching = {},
|
||||
session: {
|
||||
secret = randomId(),
|
||||
provider: sessionProvider = { store: 'memory' },
|
||||
} = {}
|
||||
maxAge: sessionMaxAge = 86400,
|
||||
} = {},
|
||||
invites: {
|
||||
maxAge: inviteMaxAge = 0,
|
||||
} = {},
|
||||
clients,
|
||||
credentials: webCredentials,
|
||||
operators,
|
||||
} = {},
|
||||
polling: {
|
||||
sharedMod = false,
|
||||
limit = 100,
|
||||
interval = 30,
|
||||
} = {},
|
||||
caching = 'memory',
|
||||
api: {
|
||||
softLimit = 250,
|
||||
hardLimit = 50
|
||||
port: apiPort = 8095,
|
||||
secret: apiSecret = randomId(),
|
||||
friendly,
|
||||
} = {},
|
||||
bots = [],
|
||||
} = data;
|
||||
|
||||
let cache = {
|
||||
...cacheTTLDefaults,
|
||||
provider: {
|
||||
let cache: StrongCache;
|
||||
let defaultProvider: CacheOptions;
|
||||
let opActionedEventsMax: number | undefined;
|
||||
let opActionedEventsDefault: number = 25;
|
||||
|
||||
if (opCache === undefined) {
|
||||
defaultProvider = {
|
||||
store: 'memory',
|
||||
...cacheOptDefaults
|
||||
}
|
||||
};
|
||||
|
||||
if (typeof caching === 'string') {
|
||||
cache = {
|
||||
provider: {
|
||||
store: caching as CacheProvider,
|
||||
...cacheOptDefaults
|
||||
},
|
||||
...cacheTTLDefaults
|
||||
};
|
||||
} else if (typeof caching === 'object') {
|
||||
const {provider, ...restConfig} = caching;
|
||||
cache = {
|
||||
...cacheTTLDefaults,
|
||||
provider: defaultProvider,
|
||||
actionedEventsDefault: opActionedEventsDefault,
|
||||
};
|
||||
|
||||
} else {
|
||||
const {provider, actionedEventsMax, actionedEventsDefault = opActionedEventsDefault, ...restConfig} = opCache;
|
||||
|
||||
if (actionedEventsMax !== undefined && actionedEventsMax !== null) {
|
||||
opActionedEventsMax = actionedEventsMax;
|
||||
opActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
|
||||
}
|
||||
|
||||
if (typeof provider === 'string') {
|
||||
cache = {
|
||||
...cacheTTLDefaults,
|
||||
...restConfig,
|
||||
provider: {
|
||||
store: provider as CacheProvider,
|
||||
...cacheOptDefaults
|
||||
}
|
||||
}
|
||||
defaultProvider = {
|
||||
store: provider as CacheProvider,
|
||||
...cacheOptDefaults
|
||||
};
|
||||
} else {
|
||||
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
|
||||
cache = {
|
||||
...cacheTTLDefaults,
|
||||
...restConfig,
|
||||
provider: {
|
||||
store,
|
||||
...cacheOptDefaults,
|
||||
...rest,
|
||||
},
|
||||
}
|
||||
defaultProvider = {
|
||||
store,
|
||||
...cacheOptDefaults,
|
||||
...rest,
|
||||
};
|
||||
}
|
||||
cache = {
|
||||
...cacheTTLDefaults,
|
||||
...restConfig,
|
||||
actionedEventsMax: opActionedEventsMax,
|
||||
actionedEventsDefault: opActionedEventsDefault,
|
||||
provider: defaultProvider,
|
||||
}
|
||||
}
|
||||
|
||||
const config: OperatorConfig = {
|
||||
operator: {
|
||||
name,
|
||||
display
|
||||
},
|
||||
credentials: {
|
||||
let hydratedBots: BotInstanceConfig[] = bots.map(x => {
|
||||
const {
|
||||
name: botName,
|
||||
polling: {
|
||||
sharedMod = false,
|
||||
limit = 100,
|
||||
interval = 30,
|
||||
} = {},
|
||||
queue: {
|
||||
maxWorkers = 1,
|
||||
} = {},
|
||||
caching,
|
||||
nanny: {
|
||||
softLimit = 250,
|
||||
hardLimit = 50
|
||||
} = {},
|
||||
snoowrap = {},
|
||||
credentials: {
|
||||
clientId: ci,
|
||||
clientSecret: cs,
|
||||
...restCred
|
||||
} = {},
|
||||
subreddits: {
|
||||
names = [],
|
||||
exclude = [],
|
||||
wikiConfig = 'botconfig/contextbot',
|
||||
dryRun,
|
||||
heartbeatInterval = 300,
|
||||
} = {},
|
||||
} = x;
|
||||
|
||||
|
||||
let botCache: StrongCache;
|
||||
let botActionedEventsDefault: number;
|
||||
|
||||
if (caching === undefined) {
|
||||
|
||||
botCache = {
|
||||
...cacheTTLDefaults,
|
||||
actionedEventsDefault: opActionedEventsDefault,
|
||||
actionedEventsMax: opActionedEventsMax,
|
||||
provider: {
|
||||
store: 'memory',
|
||||
...cacheOptDefaults
|
||||
}
|
||||
};
|
||||
} else {
|
||||
const {
|
||||
provider,
|
||||
actionedEventsMax = opActionedEventsMax,
|
||||
actionedEventsDefault = opActionedEventsDefault,
|
||||
...restConfig
|
||||
} = caching;
|
||||
|
||||
botActionedEventsDefault = actionedEventsDefault;
|
||||
if(actionedEventsMax !== undefined) {
|
||||
botActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
|
||||
}
|
||||
|
||||
if (typeof provider === 'string') {
|
||||
botCache = {
|
||||
...cacheTTLDefaults,
|
||||
...restConfig,
|
||||
actionedEventsDefault: botActionedEventsDefault,
|
||||
provider: {
|
||||
store: provider as CacheProvider,
|
||||
...cacheOptDefaults
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
|
||||
botCache = {
|
||||
...cacheTTLDefaults,
|
||||
...restConfig,
|
||||
actionedEventsDefault: botActionedEventsDefault,
|
||||
actionedEventsMax,
|
||||
provider: {
|
||||
store,
|
||||
...cacheOptDefaults,
|
||||
...rest,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const botCreds = {
|
||||
clientId: (ci as string),
|
||||
clientSecret: (cs as string),
|
||||
...restCred,
|
||||
};
|
||||
if (botCache.provider.prefix === undefined || botCache.provider.prefix === defaultProvider.prefix) {
|
||||
// need to provide unique prefix to bot
|
||||
botCache.provider.prefix = buildCachePrefix([botCache.provider.prefix, 'bot', (botName || objectHash.sha1(botCreds))]);
|
||||
}
|
||||
|
||||
return {
|
||||
name: botName,
|
||||
snoowrap,
|
||||
subreddits: {
|
||||
names,
|
||||
exclude,
|
||||
wikiConfig,
|
||||
heartbeatInterval,
|
||||
dryRun,
|
||||
},
|
||||
credentials: botCreds,
|
||||
caching: botCache,
|
||||
polling: {
|
||||
sharedMod,
|
||||
limit,
|
||||
interval,
|
||||
},
|
||||
queue: {
|
||||
maxWorkers,
|
||||
},
|
||||
nanny: {
|
||||
softLimit,
|
||||
hardLimit
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
const defaultOperators = typeof name === 'string' ? [name] : name;
|
||||
|
||||
const config: OperatorConfig = {
|
||||
mode,
|
||||
operator: {
|
||||
name: defaultOperators,
|
||||
display,
|
||||
},
|
||||
logging: {
|
||||
level,
|
||||
path
|
||||
},
|
||||
snoowrap,
|
||||
subreddits: {
|
||||
names,
|
||||
wikiConfig,
|
||||
heartbeatInterval,
|
||||
dryRun,
|
||||
},
|
||||
caching: cache,
|
||||
web: {
|
||||
enabled,
|
||||
port,
|
||||
caching: {
|
||||
...defaultProvider,
|
||||
...webCaching
|
||||
},
|
||||
invites: {
|
||||
maxAge: inviteMaxAge,
|
||||
},
|
||||
session: {
|
||||
secret,
|
||||
provider: typeof sessionProvider === 'string' ? {
|
||||
...buildCacheOptionsFromProvider({
|
||||
ttl: 86400000,
|
||||
store: sessionProvider,
|
||||
})
|
||||
} : {
|
||||
...buildCacheOptionsFromProvider(sessionProvider),
|
||||
ttl: 86400000,
|
||||
},
|
||||
maxAge: sessionMaxAge,
|
||||
},
|
||||
maxLogs,
|
||||
},
|
||||
// @ts-ignore
|
||||
caching: cache,
|
||||
polling: {
|
||||
sharedMod,
|
||||
limit,
|
||||
interval,
|
||||
clients: clients === undefined ? [{host: 'localhost:8095', secret: apiSecret}] : clients,
|
||||
credentials: webCredentials as RequiredWebRedditCredentials,
|
||||
operators: operators || defaultOperators,
|
||||
},
|
||||
api: {
|
||||
softLimit,
|
||||
hardLimit
|
||||
}
|
||||
port: apiPort,
|
||||
secret: apiSecret,
|
||||
friendly
|
||||
},
|
||||
bots: hydratedBots,
|
||||
};
|
||||
|
||||
return config;
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=JsonConfig.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"JsonConfig.js","sourceRoot":"","sources":["JsonConfig.ts"],"names":[],"mappings":""}
|
||||
@@ -3,22 +3,24 @@ import {NotificationContent} from "../Common/interfaces";
|
||||
|
||||
class DiscordNotifier {
|
||||
name: string
|
||||
botName: string
|
||||
type: string = 'Discord';
|
||||
url: string;
|
||||
|
||||
constructor(name: string, url: string) {
|
||||
constructor(name: string, botName: string, url: string) {
|
||||
this.name = name;
|
||||
this.url = url;
|
||||
this.botName = botName;
|
||||
}
|
||||
|
||||
handle(val: NotificationContent) {
|
||||
async handle(val: NotificationContent) {
|
||||
const h = new webhook.Webhook(this.url);
|
||||
|
||||
const hook = new webhook.MessageBuilder();
|
||||
|
||||
const {logLevel, title, footer, body = ''} = val;
|
||||
|
||||
hook.setName('RCB')
|
||||
hook.setName(this.botName === 'ContextMod' ? 'ContextMod' : `(ContextMod) ${this.botName}`)
|
||||
.setTitle(title)
|
||||
.setDescription(body)
|
||||
|
||||
@@ -39,7 +41,7 @@ class DiscordNotifier {
|
||||
break;
|
||||
}
|
||||
|
||||
h.send(hook);
|
||||
await h.send(hook);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ class NotificationManager {
|
||||
subreddit: Subreddit;
|
||||
name: string;
|
||||
|
||||
constructor(logger: Logger, subreddit: Subreddit, displayName: string, config?: NotificationConfig) {
|
||||
constructor(logger: Logger, subreddit: Subreddit, displayName: string, botName: string, config?: NotificationConfig) {
|
||||
this.logger = logger.child({leaf: 'Notifications'}, mergeArr);
|
||||
this.subreddit = subreddit;
|
||||
this.name = displayName;
|
||||
@@ -27,7 +27,7 @@ class NotificationManager {
|
||||
for (const p of providers) {
|
||||
switch (p.type) {
|
||||
case 'discord':
|
||||
this.notifiers.push(new DiscordNotifier(p.name, p.url));
|
||||
this.notifiers.push(new DiscordNotifier(p.name, botName, p.url));
|
||||
break;
|
||||
default:
|
||||
this.logger.warn(`Notification provider type of ${p.type} not recognized.`);
|
||||
@@ -64,7 +64,7 @@ class NotificationManager {
|
||||
}
|
||||
}
|
||||
|
||||
handle(name: NotificationEventType, title: string, body?: string, causedBy?: string, logLevel?: string) {
|
||||
async handle(name: NotificationEventType, title: string, body?: string, causedBy?: string, logLevel?: string) {
|
||||
|
||||
if (this.notifiers.length === 0 || this.events.length === 0) {
|
||||
return;
|
||||
@@ -109,7 +109,7 @@ class NotificationManager {
|
||||
this.logger.info(`Sending notification for ${name} to providers: ${notifiers.map(x => `${x.name} (${x.type})`).join(', ')}`);
|
||||
|
||||
for (const n of notifiers) {
|
||||
n.handle({
|
||||
await n.handle({
|
||||
title: `${title} (${this.name})`,
|
||||
body: body || '',
|
||||
footer: footer.length > 0 ? footer.join('\n') : undefined,
|
||||
|
||||
@@ -5,9 +5,10 @@ import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {getAttributionIdentifier} from "../Utils/SnoowrapUtils";
|
||||
import dayjs from "dayjs";
|
||||
import {
|
||||
asSubmission,
|
||||
comparisonTextOp,
|
||||
FAIL,
|
||||
formatNumber,
|
||||
formatNumber, getActivitySubredditName, isSubmission,
|
||||
parseGenericValueOrPercentComparison,
|
||||
parseSubredditName,
|
||||
PASS
|
||||
@@ -52,8 +53,6 @@ export interface AttributionCriteria {
|
||||
/**
|
||||
* A list of domains whose Activities will be tested against `threshold`.
|
||||
*
|
||||
* If this is present then `aggregateOn` is ignored.
|
||||
*
|
||||
* The values are tested as partial strings so you do not need to include full URLs, just the part that matters.
|
||||
*
|
||||
* EX `["youtube"]` will match submissions with the domain `https://youtube.com/c/aChannel`
|
||||
@@ -97,7 +96,7 @@ export interface AttributionCriteria {
|
||||
exclude?: string[],
|
||||
|
||||
/**
|
||||
* If `domains` is not specified this list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`
|
||||
* This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`
|
||||
*
|
||||
* * If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)
|
||||
* * If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)
|
||||
@@ -190,9 +189,9 @@ export class AttributionRule extends Rule {
|
||||
let activities = thresholdOn === 'submissions' ? await this.resources.getAuthorSubmissions(item.author, {window: window}) : await this.resources.getAuthorActivities(item.author, {window: window});
|
||||
activities = activities.filter(act => {
|
||||
if (include.length > 0) {
|
||||
return include.some(x => x === act.subreddit.display_name.toLowerCase());
|
||||
return include.some(x => x === getActivitySubredditName(act).toLowerCase());
|
||||
} else if (exclude.length > 0) {
|
||||
return !exclude.some(x => x === act.subreddit.display_name.toLowerCase())
|
||||
return !exclude.some(x => x === getActivitySubredditName(act).toLowerCase())
|
||||
}
|
||||
return true;
|
||||
});
|
||||
@@ -219,7 +218,7 @@ export class AttributionRule extends Rule {
|
||||
|
||||
const realDomains: DomainInfo[] = domains.map(x => {
|
||||
if(x === SUBMISSION_DOMAIN) {
|
||||
if(!(item instanceof Submission)) {
|
||||
if(!(asSubmission(item))) {
|
||||
throw new SimpleError('Cannot run Attribution Rule with the domain SELF:AGG on a Comment');
|
||||
}
|
||||
return getAttributionIdentifier(item, consolidateMediaDomains);
|
||||
@@ -228,7 +227,7 @@ export class AttributionRule extends Rule {
|
||||
});
|
||||
const realDomainIdents = realDomains.map(x => x.aliases).flat(1).map(x => x.toLowerCase());
|
||||
|
||||
const submissions: Submission[] = thresholdOn === 'submissions' ? activities as Submission[] : activities.filter(x => x instanceof Submission) as Submission[];
|
||||
const submissions: Submission[] = thresholdOn === 'submissions' ? activities as Submission[] : activities.filter(x => isSubmission(x)) as Submission[];
|
||||
const aggregatedSubmissions = submissions.reduce((acc: Map<string, DomainAgg>, sub) => {
|
||||
const domainInfo = getAttributionIdentifier(sub, consolidateMediaDomains)
|
||||
|
||||
@@ -239,7 +238,7 @@ export class AttributionRule extends Rule {
|
||||
domainType = 'self';
|
||||
}
|
||||
|
||||
if(realDomains.length === 0 && aggregateOn.length !== 0) {
|
||||
if(aggregateOn.length !== 0) {
|
||||
if(domainType === 'media' && !aggregateOn.includes('media')) {
|
||||
return acc;
|
||||
}
|
||||
@@ -415,7 +414,7 @@ export interface AttributionOptions extends AttributionConfig, RuleOptions {
|
||||
/**
|
||||
* Aggregates all of the domain/media accounts attributed to an author's Submission history. If any domain is over the threshold the rule is triggered
|
||||
*
|
||||
* Available data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):
|
||||
* Available data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):
|
||||
*
|
||||
* ```
|
||||
* triggeredDomainCount => Number of domains that met the threshold
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
|
||||
import {ActivityWindowType, CompareValueOrPercent, ThresholdCriteria} from "../Common/interfaces";
|
||||
import {ActivityWindowType, CompareValueOrPercent, SubredditState, ThresholdCriteria} from "../Common/interfaces";
|
||||
import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {getAuthorActivities} from "../Utils/SnoowrapUtils";
|
||||
import dayjs from "dayjs";
|
||||
import {
|
||||
asSubmission,
|
||||
comparisonTextOp,
|
||||
FAIL,
|
||||
formatNumber,
|
||||
formatNumber, getActivitySubredditName, isSubmission,
|
||||
parseGenericValueOrPercentComparison, parseSubredditName,
|
||||
PASS,
|
||||
percentFromString
|
||||
percentFromString, toStrongSubredditState
|
||||
} from "../util";
|
||||
import {Comment} from "snoowrap";
|
||||
|
||||
export interface CommentThresholdCriteria extends ThresholdCriteria {
|
||||
/**
|
||||
@@ -23,42 +25,56 @@ export interface CommentThresholdCriteria extends ThresholdCriteria {
|
||||
asOp?: boolean
|
||||
}
|
||||
/**
|
||||
* If both `submission` and `comment` are defined then criteria will only trigger if BOTH thresholds are met
|
||||
* Criteria will only trigger if ALL present thresholds (comment, submission, total) are met
|
||||
* */
|
||||
export interface HistoryCriteria {
|
||||
|
||||
/**
|
||||
* A string containing a comparison operator and a value to compare submissions against
|
||||
* A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) submissions against
|
||||
*
|
||||
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign]`
|
||||
*
|
||||
* * EX `> 100` => greater than 100 submissions
|
||||
* * EX `<= 75%` => submissions are equal to or less than 75% of all Activities
|
||||
* * EX `> 100` => greater than 100 filtered submissions
|
||||
* * EX `<= 75%` => filtered submissions are equal to or less than 75% of unfiltered Activities
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
|
||||
* */
|
||||
submission?: CompareValueOrPercent
|
||||
/**
|
||||
* A string containing a comparison operator and a value to compare comments against
|
||||
* A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) comments against
|
||||
*
|
||||
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`
|
||||
*
|
||||
* * EX `> 100` => greater than 100 comments
|
||||
* * EX `<= 75%` => comments are equal to or less than 75% of all Activities
|
||||
* * EX `<= 75%` => comments are equal to or less than 75% of unfiltered Activities
|
||||
*
|
||||
* If your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:
|
||||
*
|
||||
* * EX `> 100 OP` => greater than 100 comments as OP
|
||||
* * EX `<= 25% as OP` => Comments as OP were less then or equal to 25% of **all Comments**
|
||||
* * EX `> 100 OP` => greater than 100 filtered comments as OP
|
||||
* * EX `<= 25% as OP` => **Filtered** comments as OP were less then or equal to 25% of **unfiltered Comments**
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
|
||||
* */
|
||||
comment?: CompareValueOrPercent
|
||||
|
||||
/**
|
||||
* A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`) activities against
|
||||
*
|
||||
* **Note:** This is only useful if using `include` or `exclude` otherwise percent will always be 100% and total === activityTotal
|
||||
*
|
||||
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`
|
||||
*
|
||||
* * EX `> 100` => greater than 100 filtered activities
|
||||
* * EX `<= 75%` => filtered activities are equal to or less than 75% of all Activities
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
|
||||
* */
|
||||
total?: CompareValueOrPercent
|
||||
|
||||
window: ActivityWindowType
|
||||
|
||||
/**
|
||||
* The minimum number of activities that must exist from the `window` results for this criteria to run
|
||||
* The minimum number of **filtered** activities that must exist from the `window` results for this criteria to run
|
||||
* @default 5
|
||||
* */
|
||||
minActivityCount?: number
|
||||
@@ -68,8 +84,9 @@ export interface HistoryCriteria {
|
||||
export class HistoryRule extends Rule {
|
||||
criteria: HistoryCriteria[];
|
||||
condition: 'AND' | 'OR';
|
||||
include: string[];
|
||||
exclude: string[];
|
||||
include: (string | SubredditState)[];
|
||||
exclude: (string | SubredditState)[];
|
||||
activityFilterFunc: (x: Submission|Comment) => Promise<boolean> = async (x) => true;
|
||||
|
||||
constructor(options: HistoryOptions) {
|
||||
super(options);
|
||||
@@ -85,8 +102,41 @@ export class HistoryRule extends Rule {
|
||||
if (this.criteria.length === 0) {
|
||||
throw new Error('Must provide at least one HistoryCriteria');
|
||||
}
|
||||
this.include = include.map(x => parseSubredditName(x).toLowerCase());
|
||||
this.exclude = exclude.map(x => parseSubredditName(x).toLowerCase());
|
||||
|
||||
this.include = include;
|
||||
this.exclude = exclude;
|
||||
|
||||
if(this.include.length > 0) {
|
||||
const subStates = include.map((x) => {
|
||||
if(typeof x === 'string') {
|
||||
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
|
||||
}
|
||||
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
|
||||
});
|
||||
this.activityFilterFunc = async (x: Submission|Comment) => {
|
||||
for(const ss of subStates) {
|
||||
if(await this.resources.testSubredditCriteria(x, ss)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
} else if(this.exclude.length > 0) {
|
||||
const subStates = exclude.map((x) => {
|
||||
if(typeof x === 'string') {
|
||||
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
|
||||
}
|
||||
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
|
||||
});
|
||||
this.activityFilterFunc = async (x: Submission|Comment) => {
|
||||
for(const ss of subStates) {
|
||||
if(await this.resources.testSubredditCriteria(x, ss)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
getKind(): string {
|
||||
@@ -107,25 +157,23 @@ export class HistoryRule extends Rule {
|
||||
|
||||
for (const criteria of this.criteria) {
|
||||
|
||||
const {comment, window, submission, minActivityCount = 5} = criteria;
|
||||
const {comment, window, submission, total, minActivityCount = 5} = criteria;
|
||||
|
||||
let activities = await this.resources.getAuthorActivities(item.author, {window: window});
|
||||
activities = activities.filter(act => {
|
||||
if (this.include.length > 0) {
|
||||
return this.include.some(x => x === act.subreddit.display_name.toLowerCase());
|
||||
} else if (this.exclude.length > 0) {
|
||||
return !this.exclude.some(x => x === act.subreddit.display_name.toLowerCase())
|
||||
const filteredActivities = [];
|
||||
for(const a of activities) {
|
||||
if(await this.activityFilterFunc(a)) {
|
||||
filteredActivities.push(a);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
if (activities.length < minActivityCount) {
|
||||
if (filteredActivities.length < minActivityCount) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const activityTotal = activities.length;
|
||||
const {submissionTotal, commentTotal, opTotal} = activities.reduce((acc, act) => {
|
||||
if(act instanceof Submission) {
|
||||
if(asSubmission(act)) {
|
||||
return {...acc, submissionTotal: acc.submissionTotal + 1};
|
||||
}
|
||||
let a = {...acc, commentTotal: acc.commentTotal + 1};
|
||||
@@ -134,6 +182,24 @@ export class HistoryRule extends Rule {
|
||||
}
|
||||
return a;
|
||||
},{submissionTotal: 0, commentTotal: 0, opTotal: 0});
|
||||
let fSubmissionTotal = submissionTotal;
|
||||
let fCommentTotal = commentTotal;
|
||||
let fOpTotal = opTotal;
|
||||
if(activities.length !== filteredActivities.length) {
|
||||
const filteredCounts = filteredActivities.reduce((acc, act) => {
|
||||
if(asSubmission(act)) {
|
||||
return {...acc, submissionTotal: acc.submissionTotal + 1};
|
||||
}
|
||||
let a = {...acc, commentTotal: acc.commentTotal + 1};
|
||||
if(act.is_submitter) {
|
||||
a.opTotal = a.opTotal + 1;
|
||||
}
|
||||
return a;
|
||||
},{submissionTotal: 0, commentTotal: 0, opTotal: 0});
|
||||
fSubmissionTotal = filteredCounts.submissionTotal;
|
||||
fCommentTotal = filteredCounts.commentTotal;
|
||||
fOpTotal = filteredCounts.opTotal;
|
||||
}
|
||||
|
||||
let commentTrigger = undefined;
|
||||
if(comment !== undefined) {
|
||||
@@ -142,15 +208,15 @@ export class HistoryRule extends Rule {
|
||||
if(isPercent) {
|
||||
const per = value / 100;
|
||||
if(asOp) {
|
||||
commentTrigger = comparisonTextOp(opTotal / commentTotal, operator, per);
|
||||
commentTrigger = comparisonTextOp(fOpTotal / commentTotal, operator, per);
|
||||
} else {
|
||||
commentTrigger = comparisonTextOp(commentTotal / activityTotal, operator, per);
|
||||
commentTrigger = comparisonTextOp(fCommentTotal / activityTotal, operator, per);
|
||||
}
|
||||
} else {
|
||||
if(asOp) {
|
||||
commentTrigger = comparisonTextOp(opTotal, operator, value);
|
||||
commentTrigger = comparisonTextOp(fOpTotal, operator, value);
|
||||
} else {
|
||||
commentTrigger = comparisonTextOp(commentTotal, operator, value);
|
||||
commentTrigger = comparisonTextOp(fCommentTotal, operator, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -160,27 +226,40 @@ export class HistoryRule extends Rule {
|
||||
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(submission);
|
||||
if(isPercent) {
|
||||
const per = value / 100;
|
||||
submissionTrigger = comparisonTextOp(submissionTotal / activityTotal, operator, per);
|
||||
submissionTrigger = comparisonTextOp(fSubmissionTotal / activityTotal, operator, per);
|
||||
} else {
|
||||
submissionTrigger = comparisonTextOp(submissionTotal, operator, value);
|
||||
submissionTrigger = comparisonTextOp(fSubmissionTotal, operator, value);
|
||||
}
|
||||
}
|
||||
|
||||
let totalTrigger = undefined;
|
||||
if(total !== undefined) {
|
||||
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(total);
|
||||
if(isPercent) {
|
||||
const per = value / 100;
|
||||
totalTrigger = comparisonTextOp(filteredActivities.length / activityTotal, operator, per);
|
||||
} else {
|
||||
totalTrigger = comparisonTextOp(filteredActivities.length, operator, value);
|
||||
}
|
||||
}
|
||||
|
||||
const firstActivity = activities[0];
|
||||
const lastActivity = activities[activities.length - 1];
|
||||
|
||||
const activityTotalWindow = dayjs.duration(dayjs(firstActivity.created_utc * 1000).diff(dayjs(lastActivity.created_utc * 1000)));
|
||||
const activityTotalWindow = activities.length === 0 ? dayjs.duration(0, 's') : dayjs.duration(dayjs(firstActivity.created_utc * 1000).diff(dayjs(lastActivity.created_utc * 1000)));
|
||||
|
||||
criteriaResults.push({
|
||||
criteria,
|
||||
activityTotal,
|
||||
activityTotalWindow,
|
||||
submissionTotal,
|
||||
commentTotal,
|
||||
opTotal,
|
||||
submissionTotal: fSubmissionTotal,
|
||||
commentTotal: fCommentTotal,
|
||||
opTotal: fOpTotal,
|
||||
filteredTotal: filteredActivities.length,
|
||||
submissionTrigger,
|
||||
commentTrigger,
|
||||
triggered: (submissionTrigger === undefined || submissionTrigger === true) && (commentTrigger === undefined || commentTrigger === true)
|
||||
totalTrigger,
|
||||
triggered: (submissionTrigger === undefined || submissionTrigger === true) && (commentTrigger === undefined || commentTrigger === true) && (totalTrigger === undefined || totalTrigger === true)
|
||||
});
|
||||
}
|
||||
|
||||
@@ -223,36 +302,50 @@ export class HistoryRule extends Rule {
|
||||
activityTotalWindow,
|
||||
submissionTotal,
|
||||
commentTotal,
|
||||
filteredTotal,
|
||||
opTotal,
|
||||
criteria: {
|
||||
comment,
|
||||
submission,
|
||||
total,
|
||||
window,
|
||||
},
|
||||
criteria,
|
||||
triggered,
|
||||
submissionTrigger,
|
||||
commentTrigger,
|
||||
totalTrigger,
|
||||
} = results;
|
||||
|
||||
const data: any = {
|
||||
activityTotal,
|
||||
submissionTotal,
|
||||
commentTotal,
|
||||
filteredTotal,
|
||||
opTotal,
|
||||
commentPercent: formatNumber((commentTotal/activityTotal)*100),
|
||||
submissionPercent: formatNumber((submissionTotal/activityTotal)*100),
|
||||
opPercent: formatNumber((opTotal/commentTotal)*100),
|
||||
filteredPercent: formatNumber((filteredTotal/activityTotal)*100),
|
||||
criteria,
|
||||
window: typeof window === 'number' ? `${activityTotal} Items` : activityTotalWindow.humanize(true),
|
||||
window: typeof window === 'number' || activityTotal === 0 ? `${activityTotal} Items` : activityTotalWindow.humanize(true),
|
||||
triggered,
|
||||
submissionTrigger,
|
||||
commentTrigger,
|
||||
totalTrigger,
|
||||
};
|
||||
|
||||
let thresholdSummary = [];
|
||||
let totalSummary;
|
||||
let submissionSummary;
|
||||
let commentSummary;
|
||||
if(total !== undefined) {
|
||||
const {operator, value, isPercent, displayText} = parseGenericValueOrPercentComparison(total);
|
||||
const suffix = !isPercent ? 'Items' : `(${formatNumber((filteredTotal/activityTotal)*100)}%) of ${activityTotal} Total`;
|
||||
totalSummary = `${includePassFailSymbols ? `${submissionTrigger ? PASS : FAIL} ` : ''}Filtered Activities (${filteredTotal}) were${totalTrigger ? '' : ' not'} ${displayText} ${suffix}`;
|
||||
data.totalSummary = totalSummary;
|
||||
thresholdSummary.push(totalSummary);
|
||||
}
|
||||
if(submission !== undefined) {
|
||||
const {operator, value, isPercent, displayText} = parseGenericValueOrPercentComparison(submission);
|
||||
const suffix = !isPercent ? 'Items' : `(${formatNumber((submissionTotal/activityTotal)*100)}%) of ${activityTotal} Total`;
|
||||
@@ -298,21 +391,45 @@ interface HistoryConfig {
|
||||
condition?: 'AND' | 'OR'
|
||||
|
||||
/**
|
||||
* Only include Submissions from this list of Subreddits (by name, case-insensitive)
|
||||
* If present, activities will be counted only if they are found in this list of Subreddits.
|
||||
*
|
||||
* EX `["mealtimevideos","askscience"]`
|
||||
* @examples ["mealtimevideos","askscience"]
|
||||
* @minItems 1
|
||||
* Each value in the list can be either:
|
||||
*
|
||||
* * string (name of subreddit)
|
||||
* * regular expression to run on the subreddit name
|
||||
* * `SubredditState`
|
||||
*
|
||||
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
|
||||
*
|
||||
* **Note:** This affects **post-window retrieval** activities. So that:
|
||||
*
|
||||
* * `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering
|
||||
* * all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**
|
||||
* * -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`
|
||||
*
|
||||
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
|
||||
* */
|
||||
include?: string[],
|
||||
include?: (string | SubredditState)[],
|
||||
/**
|
||||
* Do not include Submissions from this list of Subreddits (by name, case-insensitive)
|
||||
* If present, activities will be counted only if they are **NOT** found in this list of Subreddits
|
||||
*
|
||||
* EX `["mealtimevideos","askscience"]`
|
||||
* @examples ["mealtimevideos","askscience"]
|
||||
* @minItems 1
|
||||
* Each value in the list can be either:
|
||||
*
|
||||
* * string (name of subreddit)
|
||||
* * regular expression to run on the subreddit name
|
||||
* * `SubredditState`
|
||||
*
|
||||
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
|
||||
*
|
||||
* **Note:** This affects **post-window retrieval** activities. So that:
|
||||
*
|
||||
* * `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering
|
||||
* * all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**
|
||||
* * -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`
|
||||
*
|
||||
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
|
||||
* */
|
||||
exclude?: string[],
|
||||
exclude?: (string | SubredditState)[],
|
||||
}
|
||||
|
||||
export interface HistoryOptions extends HistoryConfig, RuleOptions {
|
||||
@@ -322,7 +439,7 @@ export interface HistoryOptions extends HistoryConfig, RuleOptions {
|
||||
/**
|
||||
* Aggregates an Author's submission and comment history. Rule can be triggered on count/percent of total (for either or both comment/sub totals) as well as comment OP total.
|
||||
*
|
||||
* Available data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):
|
||||
* Available data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):
|
||||
*
|
||||
* ```
|
||||
* activityTotal => Total number of activities
|
||||
|
||||
@@ -2,25 +2,26 @@ import {Rule, RuleJSONConfig, RuleOptions, RulePremise, RuleResult} from "./inde
|
||||
import {Comment, VoteableContent} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {
|
||||
activityWindowText,
|
||||
comparisonTextOp, FAIL, formatNumber,
|
||||
parseGenericValueOrPercentComparison, parseSubredditName,
|
||||
activityWindowText, asSubmission,
|
||||
comparisonTextOp, FAIL, formatNumber, getActivitySubredditName, isSubmission, objectToStringSummary,
|
||||
parseGenericValueOrPercentComparison, parseStringToRegex, parseSubredditName,
|
||||
parseUsableLinkIdentifier,
|
||||
PASS
|
||||
PASS, toStrongSubredditState
|
||||
} from "../util";
|
||||
import {
|
||||
ActivityWindow,
|
||||
ActivityWindowCriteria,
|
||||
ActivityWindowType,
|
||||
ReferenceSubmission,
|
||||
SubredditCriteria
|
||||
ActivityWindowType, CommentState,
|
||||
ReferenceSubmission, StrongSubredditState, SubmissionState,
|
||||
SubredditCriteria, SubredditState
|
||||
} from "../Common/interfaces";
|
||||
import {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
|
||||
const parseLink = parseUsableLinkIdentifier();
|
||||
|
||||
export class RecentActivityRule extends Rule {
|
||||
window: ActivityWindowType;
|
||||
thresholds: SubThreshold[];
|
||||
thresholds: ActivityThreshold[];
|
||||
useSubmissionAsReference: boolean;
|
||||
lookAt?: 'comments' | 'submissions';
|
||||
|
||||
@@ -67,14 +68,14 @@ export class RecentActivityRule extends Rule {
|
||||
|
||||
let viableActivity = activities;
|
||||
if (this.useSubmissionAsReference) {
|
||||
if (!(item instanceof Submission)) {
|
||||
if (!asSubmission(item)) {
|
||||
this.logger.warn('Cannot use post as reference because triggered item is not a Submission');
|
||||
} else if (item.is_self) {
|
||||
this.logger.warn('Cannot use post as reference because triggered Submission is not a link type');
|
||||
} else {
|
||||
const usableUrl = parseLink(await item.url);
|
||||
viableActivity = viableActivity.filter((x) => {
|
||||
if (!(x instanceof Submission)) {
|
||||
if (!asSubmission(x)) {
|
||||
return false;
|
||||
}
|
||||
if (x.url === undefined) {
|
||||
@@ -84,29 +85,59 @@ export class RecentActivityRule extends Rule {
|
||||
});
|
||||
}
|
||||
}
|
||||
const groupedActivity = viableActivity.reduce((grouped, activity) => {
|
||||
const s = activity.subreddit.display_name.toLowerCase();
|
||||
grouped[s] = (grouped[s] || []).concat(activity);
|
||||
return grouped;
|
||||
}, {} as Record<string, (Submission | Comment)[]>);
|
||||
|
||||
|
||||
const summaries = [];
|
||||
let totalTriggeredOn;
|
||||
for (const triggerSet of this.thresholds) {
|
||||
let currCount = 0;
|
||||
const presentSubs = [];
|
||||
const {threshold = '>= 1', subreddits = []} = triggerSet;
|
||||
for (const sub of subreddits.map(x => parseSubredditName(x))) {
|
||||
const isub = sub.toLowerCase();
|
||||
const {[isub]: tSub = []} = groupedActivity;
|
||||
if (tSub.length > 0) {
|
||||
currCount += tSub.length;
|
||||
presentSubs.push(sub);
|
||||
const presentSubs: string[] = [];
|
||||
let combinedKarma = 0;
|
||||
const {
|
||||
threshold = '>= 1',
|
||||
subreddits = [],
|
||||
karma: karmaThreshold,
|
||||
commentState,
|
||||
submissionState,
|
||||
} = triggerSet;
|
||||
|
||||
// convert subreddits array into entirely StrongSubredditState
|
||||
const subStates: StrongSubredditState[] = subreddits.map((x) => {
|
||||
if(typeof x === 'string') {
|
||||
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
|
||||
}
|
||||
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
|
||||
});
|
||||
|
||||
for(const activity of viableActivity) {
|
||||
if(asSubmission(activity) && submissionState !== undefined) {
|
||||
if(!(await this.resources.testItemCriteria(activity, [submissionState]))) {
|
||||
continue;
|
||||
}
|
||||
} else if(commentState !== undefined) {
|
||||
if(!(await this.resources.testItemCriteria(activity, [commentState]))) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
let inSubreddits = false;
|
||||
for(const ss of subStates) {
|
||||
const res = await this.resources.testSubredditCriteria(activity, ss);
|
||||
if(res) {
|
||||
inSubreddits = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(inSubreddits) {
|
||||
currCount++;
|
||||
combinedKarma += activity.score;
|
||||
const pSub = getActivitySubredditName(activity);
|
||||
if(!presentSubs.includes(pSub)) {
|
||||
presentSubs.push(pSub);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(threshold);
|
||||
let sum = {subsWithActivity: presentSubs, subreddits, count: currCount, threshold, triggered: false, testValue: currCount.toString()};
|
||||
let sum = {subsWithActivity: presentSubs, combinedKarma, karmaThreshold, subreddits: subStates.map(x => x.stateDescription), count: currCount, threshold, triggered: false, testValue: currCount.toString()};
|
||||
if (isPercent) {
|
||||
sum.testValue = `${formatNumber((currCount / viableActivity.length) * 100)}%`;
|
||||
if (comparisonTextOp(currCount / viableActivity.length, operator, value / 100)) {
|
||||
@@ -117,6 +148,15 @@ export class RecentActivityRule extends Rule {
|
||||
sum.triggered = true;
|
||||
totalTriggeredOn = sum;
|
||||
}
|
||||
// if we would trigger on threshold need to also test for karma
|
||||
if(totalTriggeredOn !== undefined && karmaThreshold !== undefined) {
|
||||
const {operator: opKarma, value: valueKarma} = parseGenericValueOrPercentComparison(karmaThreshold);
|
||||
if(!comparisonTextOp(combinedKarma, opKarma, valueKarma)) {
|
||||
sum.triggered = false;
|
||||
totalTriggeredOn = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
summaries.push(sum);
|
||||
// if either trigger condition is hit end the iteration early
|
||||
if (totalTriggeredOn !== undefined) {
|
||||
@@ -150,10 +190,15 @@ export class RecentActivityRule extends Rule {
|
||||
subreddits = [],
|
||||
subsWithActivity = [],
|
||||
threshold,
|
||||
triggered
|
||||
triggered,
|
||||
combinedKarma,
|
||||
karmaThreshold,
|
||||
} = summary;
|
||||
const relevantSubs = subsWithActivity.length === 0 ? subreddits : subsWithActivity;
|
||||
const totalSummary = `${testValue} activities over ${relevantSubs.length} subreddits ${triggered ? 'met' : 'did not meet'} threshold of ${threshold}`;
|
||||
let totalSummary = `${testValue} activities over ${relevantSubs.length} subreddits${karmaThreshold !== undefined ? ` with ${combinedKarma} combined karma` : ''} ${triggered ? 'met' : 'did not meet'} threshold of ${threshold}${karmaThreshold !== undefined ? ` and ${karmaThreshold} combined karma` : ''}`;
|
||||
if(triggered && subsWithActivity.length > 0) {
|
||||
totalSummary = `${totalSummary} -- subreddits: ${subsWithActivity.join(', ')}`;
|
||||
}
|
||||
return {
|
||||
result: totalSummary,
|
||||
data: {
|
||||
@@ -163,7 +208,8 @@ export class RecentActivityRule extends Rule {
|
||||
subCount: relevantSubs.length,
|
||||
totalCount: count,
|
||||
threshold,
|
||||
testValue
|
||||
testValue,
|
||||
karmaThreshold,
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -175,7 +221,16 @@ export class RecentActivityRule extends Rule {
|
||||
* @minProperties 1
|
||||
* @additionalProperties false
|
||||
* */
|
||||
export interface SubThreshold extends SubredditCriteria {
|
||||
export interface ActivityThreshold {
|
||||
/**
|
||||
* When present, a Submission will only be counted if it meets this criteria
|
||||
* */
|
||||
submissionState?: SubmissionState
|
||||
/**
|
||||
* When present, a Comment will only be counted if it meets this criteria
|
||||
* */
|
||||
commentState?: CommentState
|
||||
|
||||
/**
|
||||
* A string containing a comparison operator and a value to compare recent activities against
|
||||
*
|
||||
@@ -191,6 +246,35 @@ export interface SubThreshold extends SubredditCriteria {
|
||||
* @examples [">= 1"]
|
||||
* */
|
||||
threshold?: string
|
||||
|
||||
/**
|
||||
* Test the **combined karma** from Activities found in the specified subreddits
|
||||
*
|
||||
* Value is a string containing a comparison operator and a number of **combined karma** to compare against
|
||||
*
|
||||
* If specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied
|
||||
*
|
||||
* The syntax is `(< OR > OR <= OR >=) <number>`
|
||||
*
|
||||
* * EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
|
||||
* */
|
||||
karma?: string
|
||||
|
||||
/**
|
||||
* Activities will be counted if they are found in this list of Subreddits
|
||||
*
|
||||
* Each value in the list can be either:
|
||||
*
|
||||
* * string (name of subreddit)
|
||||
* * regular expression to run on the subreddit name
|
||||
* * `SubredditState`
|
||||
*
|
||||
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
|
||||
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
|
||||
* */
|
||||
subreddits?: (string | SubredditState)[]
|
||||
}
|
||||
|
||||
interface RecentActivityConfig extends ActivityWindow, ReferenceSubmission {
|
||||
@@ -203,7 +287,7 @@ interface RecentActivityConfig extends ActivityWindow, ReferenceSubmission {
|
||||
* A list of subreddits/count criteria that may trigger this rule. ANY SubThreshold will trigger this rule.
|
||||
* @minItems 1
|
||||
* */
|
||||
thresholds: SubThreshold[],
|
||||
thresholds: ActivityThreshold[],
|
||||
}
|
||||
|
||||
export interface RecentActivityRuleOptions extends RecentActivityConfig, RuleOptions {
|
||||
@@ -212,7 +296,7 @@ export interface RecentActivityRuleOptions extends RecentActivityConfig, RuleOpt
|
||||
/**
|
||||
* Checks a user's history for any Activity (Submission/Comment) in the subreddits specified in thresholds
|
||||
*
|
||||
* Available data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):
|
||||
* Available data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):
|
||||
*
|
||||
* ```
|
||||
* summary => comma-deliminated list of subreddits that hit the threshold and their count EX subredditA(1), subredditB(4),...
|
||||
|
||||
@@ -2,14 +2,16 @@ import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
|
||||
import {Comment} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {
|
||||
comparisonTextOp, FAIL, isExternalUrlSubmission, parseGenericValueComparison,
|
||||
parseGenericValueOrPercentComparison, parseRegex,
|
||||
PASS
|
||||
asSubmission,
|
||||
comparisonTextOp, FAIL, isExternalUrlSubmission, isSubmission, parseGenericValueComparison,
|
||||
parseGenericValueOrPercentComparison, parseRegex, parseStringToRegex,
|
||||
PASS, triggeredIndicator
|
||||
} from "../util";
|
||||
import {
|
||||
ActivityWindowType, JoinOperands,
|
||||
} from "../Common/interfaces";
|
||||
import dayjs from 'dayjs';
|
||||
import SimpleError from "../Utils/SimpleError";
|
||||
|
||||
export interface RegexCriteria {
|
||||
/**
|
||||
@@ -21,17 +23,11 @@ export interface RegexCriteria {
|
||||
/**
|
||||
* A valid Regular Expression to test content against
|
||||
*
|
||||
* Do not wrap expression in forward slashes
|
||||
* If no flags are specified then the **global** flag is used by default
|
||||
*
|
||||
* EX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`
|
||||
*
|
||||
* @examples ["reddit|FoxxMD"]
|
||||
* @examples ["/reddit|FoxxMD/ig"]
|
||||
* */
|
||||
regex: string,
|
||||
/**
|
||||
* Regex flags to use
|
||||
* */
|
||||
regexFlags?: string,
|
||||
|
||||
/**
|
||||
* Which content from an Activity to test the regex against
|
||||
@@ -134,12 +130,11 @@ export class RegexRule extends Rule {
|
||||
|
||||
let criteriaResults = [];
|
||||
|
||||
for (const criteria of this.criteria) {
|
||||
for (const [index, criteria] of this.criteria.entries()) {
|
||||
|
||||
const {
|
||||
name,
|
||||
name = (index + 1),
|
||||
regex,
|
||||
regexFlags,
|
||||
testOn: testOnVals = ['title', 'body'],
|
||||
lookAt = 'all',
|
||||
matchThreshold = '> 0',
|
||||
@@ -157,7 +152,10 @@ export class RegexRule extends Rule {
|
||||
}, []);
|
||||
|
||||
// check regex
|
||||
const reg = new RegExp(regex);
|
||||
const reg = parseStringToRegex(regex, 'g');
|
||||
if(reg === undefined) {
|
||||
throw new SimpleError(`Value given for regex on Criteria ${name} was not valid: ${regex}`);
|
||||
}
|
||||
// ok cool its a valid regex
|
||||
|
||||
const matchComparison = parseGenericValueComparison(matchThreshold);
|
||||
@@ -176,7 +174,7 @@ export class RegexRule extends Rule {
|
||||
|
||||
// first lets see if the activity we are checking satisfies thresholds
|
||||
// since we may be able to avoid api calls to get history
|
||||
let actMatches = this.getMatchesFromActivity(item, testOn, reg, regexFlags);
|
||||
let actMatches = this.getMatchesFromActivity(item, testOn, reg);
|
||||
matches = matches.concat(actMatches).slice(0, 100);
|
||||
matchCount += actMatches.length;
|
||||
|
||||
@@ -226,7 +224,7 @@ export class RegexRule extends Rule {
|
||||
|
||||
for (const h of history) {
|
||||
activitiesTested++;
|
||||
const aMatches = this.getMatchesFromActivity(h, testOn, reg, regexFlags);
|
||||
const aMatches = this.getMatchesFromActivity(h, testOn, reg);
|
||||
matches = matches.concat(aMatches).slice(0, 100);
|
||||
matchCount += aMatches.length;
|
||||
const matched = comparisonTextOp(aMatches.length, matchComparison.operator, matchComparison.value);
|
||||
@@ -300,30 +298,35 @@ export class RegexRule extends Rule {
|
||||
let index = 0;
|
||||
for (const c of criteriaResults) {
|
||||
index++;
|
||||
let msg = `Crit ${c.criteria.name || index} ${c.triggered ? PASS : FAIL}`;
|
||||
let msg = `Criteria ${c.criteria.name || `#${index}`} ${triggeredIndicator(c.triggered)}`;
|
||||
if (c.activityThresholdMet !== undefined) {
|
||||
msg = `${msg} -- Activity Match=> ${c.activityThresholdMet ? PASS : FAIL} ${c.activitiesMatchedCount} ${c.criteria.activityMatchThreshold} (Threshold ${c.criteria.matchThreshold})`;
|
||||
msg = `${msg} -- Activity Match ${triggeredIndicator(c.activityThresholdMet)} => ${c.activitiesMatchedCount} ${c.criteria.activityMatchThreshold} (Threshold ${c.criteria.matchThreshold})`;
|
||||
}
|
||||
if (c.totalThresholdMet !== undefined) {
|
||||
msg = `${msg} -- Total Matches=> ${c.totalThresholdMet ? PASS : FAIL} ${c.matchCount} ${c.criteria.totalMatchThreshold}`;
|
||||
msg = `${msg} -- Total Matches ${triggeredIndicator(c.totalThresholdMet)} => ${c.matchCount} ${c.criteria.totalMatchThreshold}`;
|
||||
} else {
|
||||
msg = `${msg} and ${c.matchCount} Total Matches`;
|
||||
}
|
||||
msg = `${msg} (Window: ${c.criteria.window})`;
|
||||
logSummary.push(msg);
|
||||
if(c.matches.length > 0) {
|
||||
let matchSample = `-- Matched Values: ${c.matches.slice(0, 3).map(x => `"${x}"`).join(', ')}${c.matches.length > 3 ? `, and ${c.matches.length - 3} more...` : ''}`;
|
||||
logSummary.push(`${msg} ${matchSample}`);
|
||||
} else {
|
||||
logSummary.push(msg);
|
||||
}
|
||||
}
|
||||
|
||||
const result = `${criteriaMet ? PASS : FAIL} ${logSummary.join(' || ')}`;
|
||||
const result = `${triggeredIndicator(criteriaMet)} ${logSummary.join(' || ')}`;
|
||||
this.logger.verbose(result);
|
||||
|
||||
return Promise.resolve([criteriaMet, this.getResult(criteriaMet, {result, data: criteriaResults})]);
|
||||
}
|
||||
|
||||
protected getMatchesFromActivity(a: (Submission | Comment), testOn: string[], reg: RegExp, flags?: string): string[] {
|
||||
protected getMatchesFromActivity(a: (Submission | Comment), testOn: string[], reg: RegExp): string[] {
|
||||
let m: string[] = [];
|
||||
// determine what content we are testing
|
||||
let contents: string[] = [];
|
||||
if (a instanceof Submission) {
|
||||
if (asSubmission(a)) {
|
||||
for (const l of testOn) {
|
||||
switch (l) {
|
||||
case 'title':
|
||||
@@ -346,7 +349,7 @@ export class RegexRule extends Rule {
|
||||
}
|
||||
|
||||
for (const c of contents) {
|
||||
const results = parseRegex(reg, c, flags);
|
||||
const results = parseRegex(reg, c);
|
||||
if (results.matched) {
|
||||
m = m.concat(results.matches);
|
||||
}
|
||||
@@ -379,7 +382,7 @@ export interface RegexRuleOptions extends RegexConfig, RuleOptions {
|
||||
*
|
||||
* Optionally, specify a `window` of the User's history to additionally test against
|
||||
*
|
||||
* Available data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):
|
||||
* Available data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):
|
||||
*
|
||||
* */
|
||||
export interface RegexRuleJSONConfig extends RegexConfig, RuleJSONConfig {
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
import {SubmissionRule, SubmissionRuleJSONConfig} from "./index";
|
||||
import {RuleOptions, RuleResult} from "../index";
|
||||
import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
|
||||
import {Comment} from "snoowrap";
|
||||
import {
|
||||
activityWindowText,
|
||||
comparisonTextOp, FAIL, isExternalUrlSubmission, isRedditMedia,
|
||||
activityWindowText, asSubmission,
|
||||
comparisonTextOp, FAIL, getActivitySubredditName, isExternalUrlSubmission, isRedditMedia,
|
||||
parseGenericValueComparison, parseSubredditName,
|
||||
parseUsableLinkIdentifier as linkParser, PASS
|
||||
} from "../../util";
|
||||
import {ActivityWindow, ActivityWindowType, ReferenceSubmission} from "../../Common/interfaces";
|
||||
parseUsableLinkIdentifier as linkParser, PASS, toStrongSubredditState
|
||||
} from "../util";
|
||||
import {
|
||||
ActivityWindow,
|
||||
ActivityWindowType,
|
||||
ReferenceSubmission,
|
||||
StrongSubredditState,
|
||||
SubredditState
|
||||
} from "../Common/interfaces";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import dayjs from "dayjs";
|
||||
import Fuse from 'fuse.js'
|
||||
@@ -26,7 +31,7 @@ interface RepeatActivityReducer {
|
||||
|
||||
const getActivityIdentifier = (activity: (Submission | Comment), length = 200) => {
|
||||
let identifier: string;
|
||||
if (activity instanceof Submission) {
|
||||
if (asSubmission(activity)) {
|
||||
if (activity.is_self) {
|
||||
identifier = `${activity.title}${activity.selftext.slice(0, length)}`;
|
||||
} else if(isRedditMedia(activity)) {
|
||||
@@ -45,14 +50,15 @@ const fuzzyOptions = {
|
||||
distance: 15
|
||||
};
|
||||
|
||||
export class RepeatActivityRule extends SubmissionRule {
|
||||
export class RepeatActivityRule extends Rule {
|
||||
threshold: string;
|
||||
window: ActivityWindowType;
|
||||
gapAllowance?: number;
|
||||
useSubmissionAsReference: boolean;
|
||||
lookAt: 'submissions' | 'all';
|
||||
include: string[];
|
||||
exclude: string[];
|
||||
include: (string | SubredditState)[];
|
||||
exclude: (string | SubredditState)[];
|
||||
activityFilterFunc: (x: Submission|Comment) => Promise<boolean> = async (x) => true;
|
||||
keepRemoved: boolean;
|
||||
minWordCount: number;
|
||||
|
||||
@@ -75,8 +81,40 @@ export class RepeatActivityRule extends SubmissionRule {
|
||||
this.window = window;
|
||||
this.gapAllowance = gapAllowance;
|
||||
this.useSubmissionAsReference = useSubmissionAsReference;
|
||||
this.include = include.map(x => parseSubredditName(x).toLowerCase());
|
||||
this.exclude = exclude.map(x => parseSubredditName(x).toLowerCase());
|
||||
this.include = include;
|
||||
this.exclude = exclude;
|
||||
|
||||
if(this.include.length > 0) {
|
||||
const subStates = include.map((x) => {
|
||||
if(typeof x === 'string') {
|
||||
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
|
||||
}
|
||||
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
|
||||
});
|
||||
this.activityFilterFunc = async (x: Submission|Comment) => {
|
||||
for(const ss of subStates) {
|
||||
if(await this.resources.testSubredditCriteria(x, ss)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
} else if(this.exclude.length > 0) {
|
||||
const subStates = exclude.map((x) => {
|
||||
if(typeof x === 'string') {
|
||||
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
|
||||
}
|
||||
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
|
||||
});
|
||||
this.activityFilterFunc = async (x: Submission|Comment) => {
|
||||
for(const ss of subStates) {
|
||||
if(await this.resources.testSubredditCriteria(x, ss)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
}
|
||||
this.lookAt = lookAt;
|
||||
}
|
||||
|
||||
@@ -95,18 +133,10 @@ export class RepeatActivityRule extends SubmissionRule {
|
||||
}
|
||||
}
|
||||
|
||||
async process(item: Submission): Promise<[boolean, RuleResult]> {
|
||||
const referenceUrl = await item.url;
|
||||
if (referenceUrl === undefined && this.useSubmissionAsReference) {
|
||||
this.logger.warn(`Rule not triggered because useSubmissionAsReference=true but submission is not a link`);
|
||||
return Promise.resolve([false, this.getResult(false)]);
|
||||
}
|
||||
|
||||
let filterFunc = (x: any) => true;
|
||||
if(this.include.length > 0) {
|
||||
filterFunc = (x: Submission|Comment) => this.include.includes(x.subreddit.display_name.toLowerCase());
|
||||
} else if(this.exclude.length > 0) {
|
||||
filterFunc = (x: Submission|Comment) => !this.exclude.includes(x.subreddit.display_name.toLowerCase());
|
||||
async process(item: Submission|Comment): Promise<[boolean, RuleResult]> {
|
||||
let referenceUrl;
|
||||
if(asSubmission(item) && this.useSubmissionAsReference) {
|
||||
referenceUrl = await item.url;
|
||||
}
|
||||
|
||||
let activities: (Submission | Comment)[] = [];
|
||||
@@ -119,13 +149,14 @@ export class RepeatActivityRule extends SubmissionRule {
|
||||
break;
|
||||
}
|
||||
|
||||
const condensedActivities = activities.reduce((acc: RepeatActivityReducer, activity: (Submission | Comment), index: number) => {
|
||||
const condensedActivities = await activities.reduce(async (accProm: Promise<RepeatActivityReducer>, activity: (Submission | Comment), index: number) => {
|
||||
const acc = await accProm;
|
||||
const {openSets = [], allSets = []} = acc;
|
||||
|
||||
let identifier = getActivityIdentifier(activity);
|
||||
const isUrl = isExternalUrlSubmission(activity);
|
||||
let fu = new Fuse([identifier], !isUrl ? fuzzyOptions : {...fuzzyOptions, distance: 5});
|
||||
const validSub = filterFunc(activity);
|
||||
const validSub = await this.activityFilterFunc(activity);
|
||||
let minMet = identifier.length >= this.minWordCount;
|
||||
|
||||
let updatedAllSets = [...allSets];
|
||||
@@ -176,7 +207,7 @@ export class RepeatActivityRule extends SubmissionRule {
|
||||
|
||||
return {openSets: updatedOpenSets, allSets: updatedAllSets};
|
||||
|
||||
}, {openSets: [], allSets: []});
|
||||
}, Promise.resolve({openSets: [], allSets: []}));
|
||||
|
||||
const allRepeatSets = [...condensedActivities.allSets, ...condensedActivities.openSets];
|
||||
|
||||
@@ -196,7 +227,7 @@ export class RepeatActivityRule extends SubmissionRule {
|
||||
let referenceSubmissions = identifierGroupedActivities.get(identifier);
|
||||
if(referenceSubmissions === undefined && isExternalUrlSubmission(item)) {
|
||||
// if external url sub then try by title
|
||||
identifier = item.title;
|
||||
identifier = (item as Submission).title;
|
||||
referenceSubmissions = identifierGroupedActivities.get(identifier);
|
||||
if(referenceSubmissions === undefined) {
|
||||
// didn't get by title so go back to url since that's the default
|
||||
@@ -225,7 +256,7 @@ export class RepeatActivityRule extends SubmissionRule {
|
||||
};
|
||||
for (let set of value) {
|
||||
const test = comparisonTextOp(set.length, operator, thresholdValue);
|
||||
const md = set.map((x: (Comment | Submission)) => `[${x instanceof Submission ? x.title : getActivityIdentifier(x, 50)}](https://reddit.com${x.permalink}) in ${x.subreddit_name_prefixed} on ${dayjs(x.created_utc * 1000).utc().format()}`);
|
||||
const md = set.map((x: (Comment | Submission)) => `[${asSubmission(x) ? x.title : getActivityIdentifier(x, 50)}](https://reddit.com${x.permalink}) in ${x.subreddit_name_prefixed} on ${dayjs(x.created_utc * 1000).utc().format()}`);
|
||||
|
||||
summaryData.sets.push(set);
|
||||
summaryData.largestTrigger = Math.max(summaryData.largestTrigger, set.length);
|
||||
@@ -296,21 +327,31 @@ interface RepeatActivityConfig extends ActivityWindow, ReferenceSubmission {
|
||||
* */
|
||||
gapAllowance?: number,
|
||||
/**
|
||||
* Only include Submissions from this list of Subreddits (by name, case-insensitive)
|
||||
* If present, activities will be counted only if they are found in this list of Subreddits
|
||||
*
|
||||
* EX `["mealtimevideos","askscience"]`
|
||||
* @examples ["mealtimevideos","askscience"]
|
||||
* @minItems 1
|
||||
* Each value in the list can be either:
|
||||
*
|
||||
* * string (name of subreddit)
|
||||
* * regular expression to run on the subreddit name
|
||||
* * `SubredditState`
|
||||
*
|
||||
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
|
||||
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
|
||||
* */
|
||||
include?: string[],
|
||||
include?: (string | SubredditState)[],
|
||||
/**
|
||||
* Do not include Submissions from this list of Subreddits (by name, case-insensitive)
|
||||
* If present, activities will be counted only if they are **NOT** found in this list of Subreddits
|
||||
*
|
||||
* EX `["mealtimevideos","askscience"]`
|
||||
* @examples ["mealtimevideos","askscience"]
|
||||
* @minItems 1
|
||||
* Each value in the list can be either:
|
||||
*
|
||||
* * string (name of subreddit)
|
||||
* * regular expression to run on the subreddit name
|
||||
* * `SubredditState`
|
||||
*
|
||||
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
|
||||
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
|
||||
* */
|
||||
exclude?: string[],
|
||||
exclude?: (string | SubredditState)[],
|
||||
|
||||
/**
|
||||
* If present determines which activities to consider for gapAllowance.
|
||||
@@ -354,7 +395,7 @@ export interface RepeatActivityOptions extends RepeatActivityConfig, RuleOptions
|
||||
/**
|
||||
* Checks a user's history for Submissions with identical content
|
||||
*
|
||||
* Available data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):
|
||||
* Available data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):
|
||||
*
|
||||
* ```
|
||||
* count => Total number of repeat Submissions
|
||||
@@ -362,7 +403,7 @@ export interface RepeatActivityOptions extends RepeatActivityConfig, RuleOptions
|
||||
* url => Url of the submission that triggered the rule
|
||||
* ```
|
||||
* */
|
||||
export interface RepeatActivityJSONConfig extends RepeatActivityConfig, SubmissionRuleJSONConfig {
|
||||
export interface RepeatActivityJSONConfig extends RepeatActivityConfig, RuleJSONConfig {
|
||||
kind: 'repeatActivity'
|
||||
}
|
||||
|
||||
@@ -1,34 +1,36 @@
|
||||
import {RecentActivityRule, RecentActivityRuleJSONConfig} from "./RecentActivityRule";
|
||||
import RepeatActivityRule, {RepeatActivityJSONConfig} from "./SubmissionRule/RepeatActivityRule";
|
||||
import RepeatActivityRule, {RepeatActivityJSONConfig} from "./RepeatActivityRule";
|
||||
import {Rule, RuleJSONConfig} from "./index";
|
||||
import AuthorRule, {AuthorRuleJSONConfig} from "./AuthorRule";
|
||||
import {AttributionJSONConfig, AttributionRule} from "./AttributionRule";
|
||||
import {Logger} from "winston";
|
||||
import HistoryRule, {HistoryJSONConfig} from "./HistoryRule";
|
||||
import RegexRule, {RegexRuleJSONConfig} from "./RegexRule";
|
||||
import {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import Snoowrap from "snoowrap";
|
||||
|
||||
export function ruleFactory
|
||||
(config: RuleJSONConfig, logger: Logger, subredditName: string): Rule {
|
||||
(config: RuleJSONConfig, logger: Logger, subredditName: string, resources: SubredditResources, client: Snoowrap): Rule {
|
||||
let cfg;
|
||||
switch (config.kind) {
|
||||
case 'recentActivity':
|
||||
cfg = config as RecentActivityRuleJSONConfig;
|
||||
return new RecentActivityRule({...cfg, logger, subredditName});
|
||||
return new RecentActivityRule({...cfg, logger, subredditName, resources, client});
|
||||
case 'repeatActivity':
|
||||
cfg = config as RepeatActivityJSONConfig;
|
||||
return new RepeatActivityRule({...cfg, logger, subredditName});
|
||||
return new RepeatActivityRule({...cfg, logger, subredditName, resources, client});
|
||||
case 'author':
|
||||
cfg = config as AuthorRuleJSONConfig;
|
||||
return new AuthorRule({...cfg, logger, subredditName});
|
||||
return new AuthorRule({...cfg, logger, subredditName, resources, client});
|
||||
case 'attribution':
|
||||
cfg = config as AttributionJSONConfig;
|
||||
return new AttributionRule({...cfg, logger, subredditName});
|
||||
return new AttributionRule({...cfg, logger, subredditName, resources, client});
|
||||
case 'history':
|
||||
cfg = config as HistoryJSONConfig;
|
||||
return new HistoryRule({...cfg, logger, subredditName});
|
||||
return new HistoryRule({...cfg, logger, subredditName, resources, client});
|
||||
case 'regex':
|
||||
cfg = config as RegexRuleJSONConfig;
|
||||
return new RegexRule({...cfg, logger, subredditName});
|
||||
return new RegexRule({...cfg, logger, subredditName, resources, client});
|
||||
default:
|
||||
throw new Error('rule "kind" was not recognized.');
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import {IRule, Triggerable, Rule, RuleJSONConfig, RuleResult, RuleSetResult} from "./index";
|
||||
import {Comment, Submission} from "snoowrap";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {ruleFactory} from "./RuleFactory";
|
||||
import {createAjvFactory, mergeArr} from "../util";
|
||||
import {Logger} from "winston";
|
||||
@@ -7,6 +7,7 @@ import {JoinCondition, JoinOperands} from "../Common/interfaces";
|
||||
import * as RuleSchema from '../Schema/Rule.json';
|
||||
import Ajv from 'ajv';
|
||||
import {RuleJson, RuleObjectJson} from "../Common/types";
|
||||
import {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
|
||||
export class RuleSet implements IRuleSet {
|
||||
rules: Rule[] = [];
|
||||
@@ -24,7 +25,7 @@ export class RuleSet implements IRuleSet {
|
||||
} else {
|
||||
const valid = ajv.validate(RuleSchema, r);
|
||||
if (valid) {
|
||||
this.rules.push(ruleFactory(r as RuleJSONConfig, logger, options.subredditName));
|
||||
this.rules.push(ruleFactory(r as RuleJSONConfig, logger, options.subredditName, options.resources, options.client));
|
||||
} else {
|
||||
this.logger.warn('Could not build rule because of JSON errors', {}, {errors: ajv.errors, obj: r});
|
||||
}
|
||||
@@ -85,6 +86,8 @@ export interface RuleSetOptions extends IRuleSet {
|
||||
rules: Array<IRule | RuleJSONConfig>,
|
||||
logger: Logger
|
||||
subredditName: string
|
||||
resources: SubredditResources
|
||||
client: Snoowrap
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.SubmissionRule = void 0;
|
||||
const index_1 = require("../index");
|
||||
class SubmissionRule extends index_1.Rule {
|
||||
}
|
||||
exports.SubmissionRule = SubmissionRule;
|
||||
//# sourceMappingURL=index.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":";;;AAAA,oCAAqD;AAErD,MAAsB,cAAe,SAAQ,YAAI;CAEhD;AAFD,wCAEC"}
|
||||
@@ -1,10 +1,9 @@
|
||||
import {Comment} from "snoowrap";
|
||||
import Snoowrap, {Comment} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {Logger} from "winston";
|
||||
import {findResultByPremise, mergeArr} from "../util";
|
||||
import ResourceManager, {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import {ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
|
||||
import {isItem} from "../Utils/SnoowrapUtils";
|
||||
import Author, {AuthorOptions} from "../Author/Author";
|
||||
|
||||
export interface RuleOptions {
|
||||
@@ -13,6 +12,8 @@ export interface RuleOptions {
|
||||
itemIs?: TypedActivityStates;
|
||||
logger: Logger
|
||||
subredditName: string;
|
||||
resources: SubredditResources
|
||||
client: Snoowrap
|
||||
}
|
||||
|
||||
export interface RulePremise {
|
||||
@@ -31,6 +32,11 @@ export interface RuleResult extends ResultContext {
|
||||
triggered: (boolean | null)
|
||||
}
|
||||
|
||||
export type FormattedRuleResult = RuleResult & {
|
||||
triggered: string
|
||||
result: string
|
||||
}
|
||||
|
||||
export interface RuleSetResult {
|
||||
results: RuleResult[],
|
||||
condition: 'OR' | 'AND',
|
||||
@@ -51,6 +57,7 @@ export abstract class Rule implements IRule, Triggerable {
|
||||
authorIs: AuthorOptions;
|
||||
itemIs: TypedActivityStates;
|
||||
resources: SubredditResources;
|
||||
client: Snoowrap;
|
||||
|
||||
constructor(options: RuleOptions) {
|
||||
const {
|
||||
@@ -62,9 +69,12 @@ export abstract class Rule implements IRule, Triggerable {
|
||||
} = {},
|
||||
itemIs = [],
|
||||
subredditName,
|
||||
resources,
|
||||
client,
|
||||
} = options;
|
||||
this.name = name;
|
||||
this.resources = ResourceManager.get(subredditName) as SubredditResources;
|
||||
this.resources = resources;
|
||||
this.client = client;
|
||||
|
||||
this.authorIs = {
|
||||
exclude: exclude.map(x => new Author(x)),
|
||||
@@ -83,7 +93,7 @@ export abstract class Rule implements IRule, Triggerable {
|
||||
this.logger.debug(`Returning existing result of ${existingResult.triggered ? '✔️' : '❌'}`);
|
||||
return Promise.resolve([existingResult.triggered, {...existingResult, name: this.name}]);
|
||||
}
|
||||
const [itemPass, crit] = isItem(item, this.itemIs, this.logger);
|
||||
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
|
||||
if (!itemPass) {
|
||||
this.logger.verbose(`(Skipped) Item did not pass 'itemIs' test`);
|
||||
return Promise.resolve([null, this.getResult(null, {result: `Item did not pass 'itemIs' test`})]);
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"properties": {
|
||||
"age": {
|
||||
"description": "Test the age of the Author's account (when it was created) against this comparison\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>`\n\n* EX `> 100 days` => Passes if Author's account is older than 100 days\n* EX `<= 2 months` => Passes if Author's account is younger than or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
|
||||
"pattern": "^\\s*(?<opStr>>|>=|<|<=)\\s*(?<time>\\d+)\\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\\s*$",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"commentKarma": {
|
||||
@@ -148,16 +148,34 @@
|
||||
"type": "boolean"
|
||||
},
|
||||
"op": {
|
||||
"description": "Is this Comment Author also the Author of the Submission this comment is in?",
|
||||
"type": "boolean"
|
||||
},
|
||||
"removed": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"spam": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"stickied": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"submissionState": {
|
||||
"description": "A list of SubmissionState attributes to test the Submission this comment is in",
|
||||
"items": {
|
||||
"$ref": "#/definitions/SubmissionState"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
@@ -186,6 +204,12 @@
|
||||
"is_self": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"link_flair_css_class": {
|
||||
"type": "string"
|
||||
},
|
||||
"link_flair_text": {
|
||||
"type": "string"
|
||||
},
|
||||
"locked": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -199,6 +223,16 @@
|
||||
"removed": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"spam": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -207,6 +241,10 @@
|
||||
},
|
||||
"stickied": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"title": {
|
||||
"description": "A valid regular expression to match against the title of the submission",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
@@ -278,6 +316,14 @@
|
||||
],
|
||||
"type": "boolean"
|
||||
},
|
||||
"enable": {
|
||||
"default": true,
|
||||
"description": "If set to `false` the Action will not be run",
|
||||
"examples": [
|
||||
true
|
||||
],
|
||||
"type": "boolean"
|
||||
},
|
||||
"itemIs": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -303,6 +349,7 @@
|
||||
"comment",
|
||||
"flair",
|
||||
"lock",
|
||||
"message",
|
||||
"remove",
|
||||
"report",
|
||||
"usernote"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,27 +1,231 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"BotConnection": {
|
||||
"description": "Configuration required to connect to a CM Server",
|
||||
"properties": {
|
||||
"host": {
|
||||
"description": "The hostname and port the CM Server is listening on EX `localhost:8085`",
|
||||
"type": "string"
|
||||
},
|
||||
"secret": {
|
||||
"description": "The **shared secret** used to sign API calls from the Client to the Server.\n\nThis value should be the same as what is specified in the target CM's `api.secret` configuration",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"host",
|
||||
"secret"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"BotInstanceJsonConfig": {
|
||||
"description": "The configuration for an **individual reddit account** ContextMod will run as a bot.\n\nMultiple bot configs may be specified (one per reddit account).\n\n**NOTE:** If `bots` is not specified in a `FILE` then a default `bot` is generated using `ENV/ARG` values IE `CLIENT_ID`, etc...but if `bots` IS specified the default is not generated.",
|
||||
"properties": {
|
||||
"caching": {
|
||||
"$ref": "#/definitions/OperatorCacheConfig",
|
||||
"description": "Settings to configure the default caching behavior for this bot\n\nEvery setting not specified will default to what is specified by the global operator caching config"
|
||||
},
|
||||
"credentials": {
|
||||
"$ref": "#/definitions/RedditCredentials",
|
||||
"description": "Credentials required for the bot to interact with Reddit's API\n\nThese credentials will provided to both the API and Web interface unless otherwise specified with the `web.credentials` property\n\nRefer to the [required credentials table](https://github.com/FoxxMD/context-mod/blob/master/docs/operatorConfiguration.md#minimum-required-configuration) to see what is necessary to run the bot.",
|
||||
"examples": [
|
||||
{
|
||||
"accessToken": "p75_1c467b2",
|
||||
"clientId": "f4b4df1_9oiu",
|
||||
"clientSecret": "34v5q1c564_yt7",
|
||||
"redirectUri": "http://localhost:8085/callback",
|
||||
"refreshToken": "34_f1w1v4"
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"nanny": {
|
||||
"description": "Settings related to managing heavy API usage.",
|
||||
"properties": {
|
||||
"hardLimit": {
|
||||
"default": 50,
|
||||
"description": "When `api limit remaining` reaches this number the application will pause all event polling until the api limit is reset.",
|
||||
"examples": [
|
||||
50
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"softLimit": {
|
||||
"default": 250,
|
||||
"description": "When `api limit remaining` reaches this number the application will attempt to put heavy-usage subreddits in a **slow mode** where activity processed is slowed to one every 1.5 seconds until the api limit is reset.",
|
||||
"examples": [
|
||||
250
|
||||
],
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"notifications": {
|
||||
"$ref": "#/definitions/NotificationConfig",
|
||||
"description": "Settings to configure 3rd party notifications for when behavior occurs"
|
||||
},
|
||||
"polling": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PollingDefaults"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"sharedMod": {
|
||||
"default": false,
|
||||
"description": "If set to `true` all subreddits polling unmoderated/modqueue with default polling settings will share a request to \"r/mod\"\notherwise each subreddit will poll its own mod view\n\n* ENV => `SHARE_MOD`\n* ARG => `--shareMod`",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"description": "Settings related to default polling configurations for subreddits"
|
||||
},
|
||||
"queue": {
|
||||
"description": "Settings related to default configurations for queue behavior for subreddits",
|
||||
"properties": {
|
||||
"maxWorkers": {
|
||||
"default": 1,
|
||||
"description": "Set the number of maximum concurrent workers any subreddit can use.\n\nSubreddits may define their own number of max workers in their config but the application will never allow any subreddit's max workers to be larger than the operator\n\nNOTE: Do not increase this unless you are certain you know what you are doing! The default is suitable for the majority of use cases.",
|
||||
"examples": [
|
||||
1
|
||||
],
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"snoowrap": {
|
||||
"description": "Settings to control some [Snoowrap](https://github.com/not-an-aardvark/snoowrap) behavior",
|
||||
"properties": {
|
||||
"debug": {
|
||||
"description": "Manually set the debug status for snoowrap\n\nWhen snoowrap has `debug: true` it will log the http status response of reddit api requests to at the `debug` level\n\n* Set to `true` to always output\n* Set to `false` to never output\n\nIf not present or `null` will be set based on `logLevel`\n\n* ENV => `SNOO_DEBUG`\n* ARG => `--snooDebug`",
|
||||
"type": "boolean"
|
||||
},
|
||||
"proxy": {
|
||||
"description": "Proxy all requests to Reddit's API through this endpoint\n\n* ENV => `PROXY`\n* ARG => `--proxy <proxyEndpoint>`",
|
||||
"examples": [
|
||||
"http://localhost:4443"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"subreddits": {
|
||||
"description": "Settings related to bot behavior for subreddits it is managing",
|
||||
"properties": {
|
||||
"dryRun": {
|
||||
"default": false,
|
||||
"description": "If `true` then all subreddits will run in dry run mode, overriding configurations\n\n* ENV => `DRYRUN`\n* ARG => `--dryRun`",
|
||||
"examples": [
|
||||
false
|
||||
],
|
||||
"type": "boolean"
|
||||
},
|
||||
"exclude": {
|
||||
"description": "Names of subreddits the bot should NOT run, based on what subreddits it moderates\n\nThis setting is ignored if `names` is specified",
|
||||
"examples": [
|
||||
[
|
||||
"mealtimevideos",
|
||||
"programminghumor"
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"heartbeatInterval": {
|
||||
"default": 300,
|
||||
"description": "Interval, in seconds, to perform application heartbeat\n\nOn heartbeat the application does several things:\n\n* Log output with current api rate remaining and other statistics\n* Tries to retrieve and parse configurations for any subreddits with invalid configuration state\n* Restarts any bots stopped/paused due to polling issues, general errors, or invalid configs (if new config is valid)\n\n* ENV => `HEARTBEAT`\n* ARG => `--heartbeat <sec>`",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"names": {
|
||||
"description": "Names of subreddits for bot to run on\n\nIf not present or `null` bot will run on all subreddits it is a moderator of\n\n* ENV => `SUBREDDITS` (comma-separated)\n* ARG => `--subreddits <list...>`",
|
||||
"examples": [
|
||||
[
|
||||
"mealtimevideos",
|
||||
"programminghumor"
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"wikiConfig": {
|
||||
"default": "botconfig/contextbot",
|
||||
"description": "The default relative url to the ContextMod wiki page EX `https://reddit.com/r/subreddit/wiki/<path>`\n\n* ENV => `WIKI_CONFIG`\n* ARG => `--wikiConfig <path>`",
|
||||
"examples": [
|
||||
"botconfig/contextbot"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"CacheOptions": {
|
||||
"additionalProperties": {
|
||||
},
|
||||
"description": "Configure granular settings for a cache provider with this object",
|
||||
"properties": {
|
||||
"auth_pass": {
|
||||
"description": "(`redis`) the authentication passphrase (if enabled)",
|
||||
"type": "string"
|
||||
},
|
||||
"db": {
|
||||
"default": 0,
|
||||
"description": "(`redis`) the db number to use",
|
||||
"examples": [
|
||||
0
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"host": {
|
||||
"default": "localhost",
|
||||
"description": "(`redis`) hostname",
|
||||
"examples": [
|
||||
"localhost"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"max": {
|
||||
"default": 500,
|
||||
"description": "(`memory`) The maximum number of keys (unique cache calls) to store in cache\n\nWhen the maximum number of keys is reached the cache will being dropping the [least-recently-used](https://github.com/isaacs/node-lru-cache) key to keep the cache at `max` size.\n\nThis will determine roughly how large in **RAM** each `memory` cache can be, based on how large your `window` criteria are. Consider this example:\n\n* all `window` criteria in a subreddit's rules are `\"window\": 100`\n* `\"max\": 500`\n* Maximum size of **each** memory cache will be `500 x 100 activities = 50,000 activities`\n * So the shared cache would be max 50k activities and\n * Every additional private cache (when a subreddit configures their cache separately) will also be max 50k activities",
|
||||
"examples": [
|
||||
500
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"port": {
|
||||
"default": 6379,
|
||||
"description": "(`redis`) port to connect on",
|
||||
"examples": [
|
||||
6379
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"store": {
|
||||
"$ref": "#/definitions/CacheProvider"
|
||||
},
|
||||
"ttl": {
|
||||
"default": 60,
|
||||
"description": "The default TTL, in seconds, for the cache provider.\n\nCan mostly be ignored since TTLs are defined for each cache object",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
@@ -31,6 +235,7 @@
|
||||
"type": "object"
|
||||
},
|
||||
"CacheProvider": {
|
||||
"description": "Available cache providers",
|
||||
"enum": [
|
||||
"memory",
|
||||
"none",
|
||||
@@ -85,6 +290,7 @@
|
||||
"type": "array"
|
||||
},
|
||||
"providers": {
|
||||
"description": "A list of notification providers (Discord, etc..) to configure. Each object in the list is one provider. Multiple of the same provider can be provided but must have different names",
|
||||
"items": {
|
||||
"$ref": "#/definitions/DiscordProviderConfig"
|
||||
},
|
||||
@@ -124,6 +330,114 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"OperatorCacheConfig": {
|
||||
"properties": {
|
||||
"actionedEventsDefault": {
|
||||
"default": 25,
|
||||
"description": "The **default** number of Events that the cache will store triggered result summaries for\n\nThese summaries are viewable through the Web UI.\n\nThe value specified cannot be larger than `actionedEventsMax` for the global/bot config (if set)",
|
||||
"type": "number"
|
||||
},
|
||||
"actionedEventsMax": {
|
||||
"default": 25,
|
||||
"description": "The **maximum** number of Events that the cache should store triggered result summaries for\n\nThese summaries are viewable through the Web UI.\n\nThe value specified by a subreddit cannot be larger than the value set by the Operator for the global/bot config (if set)",
|
||||
"type": "number"
|
||||
},
|
||||
"authorTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"commentTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, a comment should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"filterCriteriaTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CacheOptions"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"none",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"description": "The cache provider and, optionally, a custom configuration for that provider\n\nIf not present or `null` provider will be `memory`.\n\nTo specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`"
|
||||
},
|
||||
"submissionTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, a submission should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"subredditTTL": {
|
||||
"default": 600,
|
||||
"description": "Amount of time, in seconds, a subreddit (attributes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
600
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"userNotesTTL": {
|
||||
"default": 300,
|
||||
"description": "Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"wikiTTL": {
|
||||
"default": 300,
|
||||
"description": "Amount of time, in seconds, wiki content pages should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"PollingDefaults": {
|
||||
"properties": {
|
||||
"delayUntil": {
|
||||
@@ -148,99 +462,125 @@
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"api": {
|
||||
"properties": {
|
||||
"hardLimit": {
|
||||
"type": "number"
|
||||
},
|
||||
"softLimit": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"caching": {
|
||||
"anyOf": [
|
||||
"RedditCredentials": {
|
||||
"description": "Credentials required for the bot to interact with Reddit's API\n\nThese credentials will provided to both the API and Web interface unless otherwise specified with the `web.credentials` property\n\nRefer to the [required credentials table](https://github.com/FoxxMD/context-mod/blob/master/docs/operatorConfiguration.md#minimum-required-configuration) to see what is necessary to run the bot.",
|
||||
"examples": [
|
||||
{
|
||||
"properties": {
|
||||
"authorTTL": {
|
||||
"default": 10000,
|
||||
"description": "Amount of time, in milliseconds, author activities (Comments/Submission) should be cached",
|
||||
"examples": [
|
||||
10000
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CacheOptions"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"none",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"userNotesTTL": {
|
||||
"default": 60000,
|
||||
"description": "Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached",
|
||||
"examples": [
|
||||
60000
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"wikiTTL": {
|
||||
"default": 300000,
|
||||
"description": "Amount of time, in milliseconds, wiki content pages should be cached",
|
||||
"examples": [
|
||||
300000
|
||||
],
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"none",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
"accessToken": "p75_1c467b2",
|
||||
"clientId": "f4b4df1_9oiu",
|
||||
"clientSecret": "34v5q1c564_yt7",
|
||||
"redirectUri": "http://localhost:8085/callback",
|
||||
"refreshToken": "34_f1w1v4"
|
||||
}
|
||||
]
|
||||
},
|
||||
"credentials": {
|
||||
],
|
||||
"properties": {
|
||||
"accessToken": {
|
||||
"description": "Access token retrieved from authenticating an account with your Reddit Application\n\n* ENV => `ACCESS_TOKEN`\n* ARG => `--accessToken <token>`",
|
||||
"examples": [
|
||||
"p75_1c467b2"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"clientId": {
|
||||
"description": "Client ID for your Reddit application\n\n* ENV => `CLIENT_ID`\n* ARG => `--clientId <id>`",
|
||||
"examples": [
|
||||
"f4b4df1c7b2"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"clientSecret": {
|
||||
"type": "string"
|
||||
},
|
||||
"redirectUri": {
|
||||
"description": "Client Secret for your Reddit application\n\n* ENV => `CLIENT_SECRET`\n* ARG => `--clientSecret <id>`",
|
||||
"examples": [
|
||||
"34v5q1c56ub"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"refreshToken": {
|
||||
"description": "Refresh token retrieved from authenticating an account with your Reddit Application\n\n* ENV => `REFRESH_TOKEN`\n* ARG => `--refreshToken <token>`",
|
||||
"examples": [
|
||||
"34_f1w1v4"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"WebCredentials": {
|
||||
"description": "Separate credentials for the web interface can be provided when also running the api.\n\nAll properties not specified will default to values given in ENV/ARG credential properties\n\nRefer to the [required credentials table](https://github.com/FoxxMD/context-mod/blob/master/docs/operatorConfiguration.md#minimum-required-configuration) to see what is necessary for the web interface.",
|
||||
"examples": [
|
||||
{
|
||||
"clientId": "f4b4df1_9oiu",
|
||||
"clientSecret": "34v5q1c564_yt7",
|
||||
"redirectUri": "http://localhost:8085/callback"
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"clientId": {
|
||||
"description": "Client ID for your Reddit application",
|
||||
"examples": [
|
||||
"f4b4df1_9oiu"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"clientSecret": {
|
||||
"description": "Client Secret for your Reddit application",
|
||||
"examples": [
|
||||
"34v5q1c564_yt7"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"redirectUri": {
|
||||
"description": "Redirect URI for your Reddit application\n\nUsed for:\n\n* accessing the web interface for monitoring bots\n* authenticating an account to use for a bot instance\n\n* ENV => `REDIRECT_URI`\n* ARG => `--redirectUri <uri>`",
|
||||
"examples": [
|
||||
"http://localhost:8085/callback"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"description": "Configuration for application-level settings IE for running the bot instance\n\n* To load a JSON configuration **from the command line** use the `-c` cli argument EX: `node src/index.js -c /path/to/JSON/config.json`\n* To load a JSON configuration **using an environmental variable** use `OPERATOR_CONFIG` EX: `OPERATOR_CONFIG=/path/to/JSON/config.json`",
|
||||
"properties": {
|
||||
"api": {
|
||||
"description": "Configuration for the **Server** application. See [Architecture Documentation](https://github.com/FoxxMD/context-mod/blob/master/docs/serverClientArchitecture.md) for more info",
|
||||
"properties": {
|
||||
"friendly": {
|
||||
"description": "A friendly name for this server. This will override `friendly` in `BotConnection` if specified.",
|
||||
"type": "string"
|
||||
},
|
||||
"port": {
|
||||
"default": 8095,
|
||||
"description": "The port the server listens on for API requests",
|
||||
"examples": [
|
||||
8095
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"secret": {
|
||||
"description": "The **shared secret** used to verify API requests come from an authenticated client.\n\nUse this same value for the `secret` value in a `BotConnection` object to connect to this Server",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"bots": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/BotInstanceJsonConfig"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"caching": {
|
||||
"$ref": "#/definitions/OperatorCacheConfig",
|
||||
"description": "Settings to configure the default caching behavior globally\n\nThese settings will be used by each bot, and subreddit, that does not specify their own"
|
||||
},
|
||||
"logging": {
|
||||
"description": "Settings to configure global logging defaults",
|
||||
"properties": {
|
||||
"level": {
|
||||
"default": "verbose",
|
||||
"description": "The minimum log level to output. The log level set will output logs at its level **and all levels above it:**\n\n * `error`\n * `warn`\n * `info`\n * `verbose`\n * `debug`\n\n Note: `verbose` will display *a lot* of information on the status/result of run rules/checks/actions etc. which is very useful for testing configurations. Once your bot is stable changing the level to `info` will reduce log noise.\n\n * ENV => `LOG_LEVEL`\n * ARG => `--logLevel <level>`",
|
||||
"enum": [
|
||||
"debug",
|
||||
"error",
|
||||
@@ -248,86 +588,128 @@
|
||||
"verbose",
|
||||
"warn"
|
||||
],
|
||||
"examples": [
|
||||
"verbose"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"description": "The absolute path to a directory where rotating log files should be stored.\n\n* If not present or `null` no log files will be created\n* If `true` logs will be stored at `[working directory]/logs`\n\n* ENV => `LOG_DIR`\n* ARG => `--logDir [dir]`",
|
||||
"examples": [
|
||||
"/var/log/contextmod"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"mode": {
|
||||
"default": "all",
|
||||
"description": "Mode to run ContextMod in\n\n* `all` (default) - Run the api and the web interface\n* `client` - Run web interface only\n* `server` - Run the api/bots only",
|
||||
"enum": [
|
||||
"all",
|
||||
"client",
|
||||
"server"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"notifications": {
|
||||
"$ref": "#/definitions/NotificationConfig"
|
||||
"$ref": "#/definitions/NotificationConfig",
|
||||
"description": "Settings to configure 3rd party notifications for when ContextMod behavior occurs"
|
||||
},
|
||||
"operator": {
|
||||
"description": "Settings related to the user(s) running this ContextMod instance and information on the bot",
|
||||
"properties": {
|
||||
"display": {
|
||||
"description": "A **public** name to display to users of the web interface. Use this to help moderators using your bot identify who is the operator in case they need to contact you.\n\nLeave undefined for no public name to be displayed.\n\n* ENV => `OPERATOR_DISPLAY`\n* ARG => `--operatorDisplay <name>`",
|
||||
"examples": [
|
||||
"Moderators of r/MySubreddit"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"polling": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PollingDefaults"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"interval": {
|
||||
"type": "number"
|
||||
"anyOf": [
|
||||
{
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"limit": {
|
||||
"type": "number"
|
||||
},
|
||||
"sharedMod": {
|
||||
"type": "boolean"
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"snoowrap": {
|
||||
"properties": {
|
||||
"debug": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"proxy": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"subreddits": {
|
||||
"properties": {
|
||||
"dryRun": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"heartbeatInterval": {
|
||||
"type": "number"
|
||||
},
|
||||
"names": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"wikiConfig": {
|
||||
"type": "string"
|
||||
],
|
||||
"description": "The name, or names, of the Reddit accounts, without prefix, that the operators of this bot uses.\n\nThis is used for showing more information in the web interface IE show all logs/subreddits if even not a moderator.\n\nEX -- User is /u/FoxxMD then `\"name\": [\"FoxxMD\"]`\n\n* ENV => `OPERATOR` (if list, comma-delimited)\n* ARG => `--operator <name...>`",
|
||||
"examples": [
|
||||
[
|
||||
"FoxxMD",
|
||||
"AnotherUser"
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"web": {
|
||||
"description": "Settings for the web interface",
|
||||
"properties": {
|
||||
"enabled": {
|
||||
"type": "boolean"
|
||||
"caching": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CacheOptions"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"description": "Caching provider to use for session and invite data\n\nIf none is provided the top-level caching provider is used"
|
||||
},
|
||||
"clients": {
|
||||
"description": "A list of CM Servers this Client should connect to.\n\nIf not specified a default `BotConnection` for this instance is generated",
|
||||
"examples": [
|
||||
[
|
||||
{
|
||||
"host": "localhost:8095",
|
||||
"secret": "aRandomString"
|
||||
}
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/definitions/BotConnection"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"credentials": {
|
||||
"$ref": "#/definitions/WebCredentials",
|
||||
"description": "Separate credentials for the web interface can be provided when also running the api.\n\nAll properties not specified will default to values given in ENV/ARG credential properties\n\nRefer to the [required credentials table](https://github.com/FoxxMD/context-mod/blob/master/docs/operatorConfiguration.md#minimum-required-configuration) to see what is necessary for the web interface.",
|
||||
"examples": [
|
||||
{
|
||||
"clientId": "f4b4df1_9oiu",
|
||||
"clientSecret": "34v5q1c564_yt7",
|
||||
"redirectUri": "http://localhost:8085/callback"
|
||||
}
|
||||
]
|
||||
},
|
||||
"invites": {
|
||||
"description": "Settings related to oauth flow invites",
|
||||
"properties": {
|
||||
"maxAge": {
|
||||
"default": 0,
|
||||
"description": "Number of seconds an invite should be valid for\n\n If `0` or not specified (default) invites do not expire",
|
||||
"examples": [
|
||||
0
|
||||
],
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"logLevel": {
|
||||
"description": "The default log level to filter to in the web interface\n\nIf not specified or `null` will be same as global `logLevel`",
|
||||
"enum": [
|
||||
"debug",
|
||||
"error",
|
||||
@@ -338,28 +720,50 @@
|
||||
"type": "string"
|
||||
},
|
||||
"maxLogs": {
|
||||
"default": 200,
|
||||
"description": "Maximum number of log statements to keep in memory for each subreddit",
|
||||
"examples": [
|
||||
200
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"operators": {
|
||||
"description": "The name, or names, of the Reddit accounts, without prefix, that the operators of this **web interface** uses.\n\n**Note:** This is **not the same** as the top-level `operator` property. This allows specified users to see the status of all `clients` but **not** access to them -- that must still be specified in the `operator.name` property in the configuration of each bot.\n\n\nEX -- User is /u/FoxxMD then `\"name\": [\"FoxxMD\"]`",
|
||||
"examples": [
|
||||
[
|
||||
"FoxxMD",
|
||||
"AnotherUser"
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"port": {
|
||||
"default": 8085,
|
||||
"description": "The port for the web interface\n\n* ENV => `PORT`\n* ARG => `--port <number>`",
|
||||
"examples": [
|
||||
8085
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"session": {
|
||||
"description": "Settings to configure the behavior of user sessions -- the session is what the web interface uses to identify logged in users.",
|
||||
"properties": {
|
||||
"provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CacheOptions"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
"maxAge": {
|
||||
"default": 86400,
|
||||
"description": "Number of seconds a session should be valid for.\n\nDefault is 1 day",
|
||||
"examples": [
|
||||
86400
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"secret": {
|
||||
"description": "The secret value used to encrypt session data\n\nIf provider is persistent (`redis`) specifying a value here will ensure sessions are valid between application restarts\n\nWhen not present or `null` a random string is generated on application start",
|
||||
"examples": [
|
||||
"definitelyARandomString"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -24,6 +24,72 @@
|
||||
}
|
||||
],
|
||||
"definitions": {
|
||||
"ActivityThreshold": {
|
||||
"additionalProperties": false,
|
||||
"description": "At least one count property must be present. If both are present then either can trigger the rule",
|
||||
"minProperties": 1,
|
||||
"properties": {
|
||||
"commentState": {
|
||||
"$ref": "#/definitions/CommentState",
|
||||
"description": "When present, a Comment will only be counted if it meets this criteria",
|
||||
"examples": [
|
||||
{
|
||||
"op": true,
|
||||
"removed": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"karma": {
|
||||
"description": "Test the **combined karma** from Activities found in the specified subreddits\n\nValue is a string containing a comparison operator and a number of **combined karma** to compare against\n\nIf specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"submissionState": {
|
||||
"$ref": "#/definitions/SubmissionState",
|
||||
"description": "When present, a Submission will only be counted if it meets this criteria",
|
||||
"examples": [
|
||||
{
|
||||
"over_18": true,
|
||||
"removed": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"subreddits": {
|
||||
"description": "Activities will be counted if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
|
||||
"examples": [
|
||||
[
|
||||
"mealtimevideos",
|
||||
"askscience",
|
||||
"/onlyfans*/i",
|
||||
{
|
||||
"over18": true
|
||||
}
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubredditState"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"threshold": {
|
||||
"default": ">= 1",
|
||||
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
|
||||
"examples": [
|
||||
">= 1"
|
||||
],
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"ActivityWindowCriteria": {
|
||||
"additionalProperties": false,
|
||||
"description": "Multiple properties that may be used to define what range of Activity to retrieve.\n\nMay specify one, or both properties along with the `satisfyOn` property, to affect the retrieval behavior.",
|
||||
@@ -113,7 +179,7 @@
|
||||
"properties": {
|
||||
"aggregateOn": {
|
||||
"default": "undefined",
|
||||
"description": "If `domains` is not specified this list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
|
||||
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
|
||||
"examples": [
|
||||
[
|
||||
]
|
||||
@@ -141,7 +207,7 @@
|
||||
[
|
||||
]
|
||||
],
|
||||
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nIf this is present then `aggregateOn` is ignored.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
|
||||
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -230,7 +296,7 @@
|
||||
"type": "object"
|
||||
},
|
||||
"AttributionJSONConfig": {
|
||||
"description": "Aggregates all of the domain/media accounts attributed to an author's Submission history. If any domain is over the threshold the rule is triggered\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):\n\n```\ntriggeredDomainCount => Number of domains that met the threshold\nactivityTotal => Number of Activities considered from window\nwindow => The date range of the Activities considered\nlargestCount => The count from the largest aggregated domain\nlargestPercentage => The percentage of Activities the largest aggregated domain comprises\nsmallestCount => The count from the smallest aggregated domain\nsmallestPercentage => The percentage of Activities the smallest aggregated domain comprises\ncountRange => A convenience string displaying \"smallestCount - largestCount\" or just one number if both are the same\npercentRange => A convenience string displaying \"smallestPercentage - largestPercentage\" or just one percentage if both are the same\ndomains => An array of all the domain URLs that met the threshold\ndomainsDelim => A comma-delimited string of all the domain URLs that met the threshold\ntitles => The friendly-name of the domain if one is present, otherwise the URL (IE youtube.com/c/34ldfa343 => \"My Youtube Channel Title\")\ntitlesDelim => A comma-delimited string of all the domain friendly-names\nthreshold => The threshold you configured for this Rule to trigger\nurl => Url of the submission that triggered the rule\n```",
|
||||
"description": "Aggregates all of the domain/media accounts attributed to an author's Submission history. If any domain is over the threshold the rule is triggered\n\nAvailable data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):\n\n```\ntriggeredDomainCount => Number of domains that met the threshold\nactivityTotal => Number of Activities considered from window\nwindow => The date range of the Activities considered\nlargestCount => The count from the largest aggregated domain\nlargestPercentage => The percentage of Activities the largest aggregated domain comprises\nsmallestCount => The count from the smallest aggregated domain\nsmallestPercentage => The percentage of Activities the smallest aggregated domain comprises\ncountRange => A convenience string displaying \"smallestCount - largestCount\" or just one number if both are the same\npercentRange => A convenience string displaying \"smallestPercentage - largestPercentage\" or just one percentage if both are the same\ndomains => An array of all the domain URLs that met the threshold\ndomainsDelim => A comma-delimited string of all the domain URLs that met the threshold\ntitles => The friendly-name of the domain if one is present, otherwise the URL (IE youtube.com/c/34ldfa343 => \"My Youtube Channel Title\")\ntitlesDelim => A comma-delimited string of all the domain friendly-names\nthreshold => The threshold you configured for this Rule to trigger\nurl => Url of the submission that triggered the rule\n```",
|
||||
"properties": {
|
||||
"authorIs": {
|
||||
"$ref": "#/definitions/AuthorOptions",
|
||||
@@ -330,7 +396,7 @@
|
||||
"properties": {
|
||||
"age": {
|
||||
"description": "Test the age of the Author's account (when it was created) against this comparison\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>`\n\n* EX `> 100 days` => Passes if Author's account is older than 100 days\n* EX `<= 2 months` => Passes if Author's account is younger than or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
|
||||
"pattern": "^\\s*(?<opStr>>|>=|<|<=)\\s*(?<time>\\d+)\\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\\s*$",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"commentKarma": {
|
||||
@@ -532,16 +598,34 @@
|
||||
"type": "boolean"
|
||||
},
|
||||
"op": {
|
||||
"description": "Is this Comment Author also the Author of the Submission this comment is in?",
|
||||
"type": "boolean"
|
||||
},
|
||||
"removed": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"spam": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"stickied": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"submissionState": {
|
||||
"description": "A list of SubmissionState attributes to test the Submission this comment is in",
|
||||
"items": {
|
||||
"$ref": "#/definitions/SubmissionState"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
@@ -603,23 +687,28 @@
|
||||
"type": "object"
|
||||
},
|
||||
"HistoryCriteria": {
|
||||
"description": "If both `submission` and `comment` are defined then criteria will only trigger if BOTH thresholds are met",
|
||||
"description": "Criteria will only trigger if ALL present thresholds (comment, submission, total) are met",
|
||||
"properties": {
|
||||
"comment": {
|
||||
"description": "A string containing a comparison operator and a value to compare comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of all Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 comments as OP\n* EX `<= 25% as OP` => Comments as OP were less then or equal to 25% of **all Comments**",
|
||||
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of unfiltered Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 filtered comments as OP\n* EX `<= 25% as OP` => **Filtered** comments as OP were less then or equal to 25% of **unfiltered Comments**",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"minActivityCount": {
|
||||
"default": 5,
|
||||
"description": "The minimum number of activities that must exist from the `window` results for this criteria to run",
|
||||
"description": "The minimum number of **filtered** activities that must exist from the `window` results for this criteria to run",
|
||||
"type": "number"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"submission": {
|
||||
"description": "A string containing a comparison operator and a value to compare submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 submissions\n* EX `<= 75%` => submissions are equal to or less than 75% of all Activities",
|
||||
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 filtered submissions\n* EX `<= 75%` => filtered submissions are equal to or less than 75% of unfiltered Activities",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"total": {
|
||||
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`) activities against\n\n**Note:** This is only useful if using `include` or `exclude` otherwise percent will always be 100% and total === activityTotal\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 filtered activities\n* EX `<= 75%` => filtered activities are equal to or less than 75% of all Activities",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
@@ -650,7 +739,7 @@
|
||||
"type": "object"
|
||||
},
|
||||
"HistoryJSONConfig": {
|
||||
"description": "Aggregates an Author's submission and comment history. Rule can be triggered on count/percent of total (for either or both comment/sub totals) as well as comment OP total.\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):\n\n```\nactivityTotal => Total number of activities\nsubmissionTotal => Total number of submissions\ncommentTotal => Total number of comments\nopTotal => Total number of comments as OP\nthresholdSummary => A text summary of the first Criteria triggered with totals/percentages\ncriteria => The ThresholdCriteria object\nwindow => A text summary of the range of Activities considered (# of Items if number, time range if Duration)\n```",
|
||||
"description": "Aggregates an Author's submission and comment history. Rule can be triggered on count/percent of total (for either or both comment/sub totals) as well as comment OP total.\n\nAvailable data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):\n\n```\nactivityTotal => Total number of activities\nsubmissionTotal => Total number of submissions\ncommentTotal => Total number of comments\nopTotal => Total number of comments as OP\nthresholdSummary => A text summary of the first Criteria triggered with totals/percentages\ncriteria => The ThresholdCriteria object\nwindow => A text summary of the range of Activities considered (# of Items if number, time range if Duration)\n```",
|
||||
"properties": {
|
||||
"authorIs": {
|
||||
"$ref": "#/definitions/AuthorOptions",
|
||||
@@ -688,27 +777,51 @@
|
||||
"type": "array"
|
||||
},
|
||||
"exclude": {
|
||||
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
|
||||
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n**Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
|
||||
"examples": [
|
||||
"mealtimevideos",
|
||||
"askscience"
|
||||
[
|
||||
"mealtimevideos",
|
||||
"askscience",
|
||||
"/onlyfans*/i",
|
||||
{
|
||||
"over18": true
|
||||
}
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubredditState"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"minItems": 1,
|
||||
"type": "array"
|
||||
},
|
||||
"include": {
|
||||
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
|
||||
"description": "If present, activities will be counted only if they are found in this list of Subreddits.\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n **Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
|
||||
"examples": [
|
||||
"mealtimevideos",
|
||||
"askscience"
|
||||
[
|
||||
"mealtimevideos",
|
||||
"askscience",
|
||||
"/onlyfans*/i",
|
||||
{
|
||||
"over18": true
|
||||
}
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubredditState"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"minItems": 1,
|
||||
"type": "array"
|
||||
},
|
||||
"itemIs": {
|
||||
@@ -751,7 +864,7 @@
|
||||
"type": "object"
|
||||
},
|
||||
"RecentActivityRuleJSONConfig": {
|
||||
"description": "Checks a user's history for any Activity (Submission/Comment) in the subreddits specified in thresholds\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):\n\n```\nsummary => comma-deliminated list of subreddits that hit the threshold and their count EX subredditA(1), subredditB(4),...\nsubCount => Total number of subreddits that hit the threshold\ntotalCount => Total number of all activity occurrences in subreddits\n```",
|
||||
"description": "Checks a user's history for any Activity (Submission/Comment) in the subreddits specified in thresholds\n\nAvailable data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):\n\n```\nsummary => comma-deliminated list of subreddits that hit the threshold and their count EX subredditA(1), subredditB(4),...\nsubCount => Total number of subreddits that hit the threshold\ntotalCount => Total number of all activity occurrences in subreddits\n```",
|
||||
"properties": {
|
||||
"authorIs": {
|
||||
"$ref": "#/definitions/AuthorOptions",
|
||||
@@ -822,7 +935,7 @@
|
||||
"thresholds": {
|
||||
"description": "A list of subreddits/count criteria that may trigger this rule. ANY SubThreshold will trigger this rule.",
|
||||
"items": {
|
||||
"$ref": "#/definitions/SubThreshold"
|
||||
"$ref": "#/definitions/ActivityThreshold"
|
||||
},
|
||||
"minItems": 1,
|
||||
"type": "array"
|
||||
@@ -859,6 +972,49 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"RegExp": {
|
||||
"properties": {
|
||||
"dotAll": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"flags": {
|
||||
"type": "string"
|
||||
},
|
||||
"global": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"ignoreCase": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"lastIndex": {
|
||||
"type": "number"
|
||||
},
|
||||
"multiline": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"source": {
|
||||
"type": "string"
|
||||
},
|
||||
"sticky": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"unicode": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"dotAll",
|
||||
"flags",
|
||||
"global",
|
||||
"ignoreCase",
|
||||
"lastIndex",
|
||||
"multiline",
|
||||
"source",
|
||||
"sticky",
|
||||
"unicode"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"RegexCriteria": {
|
||||
"properties": {
|
||||
"activityMatchThreshold": {
|
||||
@@ -897,16 +1053,12 @@
|
||||
"type": "string"
|
||||
},
|
||||
"regex": {
|
||||
"description": "A valid Regular Expression to test content against\n\nDo not wrap expression in forward slashes\n\nEX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`",
|
||||
"description": "A valid Regular Expression to test content against\n\nIf no flags are specified then the **global** flag is used by default",
|
||||
"examples": [
|
||||
"reddit|FoxxMD"
|
||||
"/reddit|FoxxMD/ig"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"regexFlags": {
|
||||
"description": "Regex flags to use",
|
||||
"type": "string"
|
||||
},
|
||||
"testOn": {
|
||||
"default": [
|
||||
"title",
|
||||
@@ -959,7 +1111,7 @@
|
||||
"type": "object"
|
||||
},
|
||||
"RegexRuleJSONConfig": {
|
||||
"description": "Test a (list of) Regular Expression against the contents or title of an Activity\n\nOptionally, specify a `window` of the User's history to additionally test against\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):",
|
||||
"description": "Test a (list of) Regular Expression against the contents or title of an Activity\n\nOptionally, specify a `window` of the User's history to additionally test against\n\nAvailable data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):",
|
||||
"properties": {
|
||||
"authorIs": {
|
||||
"$ref": "#/definitions/AuthorOptions",
|
||||
@@ -1046,7 +1198,7 @@
|
||||
"type": "object"
|
||||
},
|
||||
"RepeatActivityJSONConfig": {
|
||||
"description": "Checks a user's history for Submissions with identical content\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):\n\n```\ncount => Total number of repeat Submissions\nthreshold => The threshold you configured for this Rule to trigger\nurl => Url of the submission that triggered the rule\n```",
|
||||
"description": "Checks a user's history for Submissions with identical content\n\nAvailable data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):\n\n```\ncount => Total number of repeat Submissions\nthreshold => The threshold you configured for this Rule to trigger\nurl => Url of the submission that triggered the rule\n```",
|
||||
"properties": {
|
||||
"authorIs": {
|
||||
"$ref": "#/definitions/AuthorOptions",
|
||||
@@ -1068,15 +1220,27 @@
|
||||
]
|
||||
},
|
||||
"exclude": {
|
||||
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
|
||||
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
|
||||
"examples": [
|
||||
"mealtimevideos",
|
||||
"askscience"
|
||||
[
|
||||
"mealtimevideos",
|
||||
"askscience",
|
||||
"/onlyfans*/i",
|
||||
{
|
||||
"over18": true
|
||||
}
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubredditState"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"minItems": 1,
|
||||
"type": "array"
|
||||
},
|
||||
"gapAllowance": {
|
||||
@@ -1084,15 +1248,27 @@
|
||||
"type": "number"
|
||||
},
|
||||
"include": {
|
||||
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
|
||||
"description": "If present, activities will be counted only if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
|
||||
"examples": [
|
||||
"mealtimevideos",
|
||||
"askscience"
|
||||
[
|
||||
"mealtimevideos",
|
||||
"askscience",
|
||||
"/onlyfans*/i",
|
||||
{
|
||||
"over18": true
|
||||
}
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubredditState"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"minItems": 1,
|
||||
"type": "array"
|
||||
},
|
||||
"itemIs": {
|
||||
@@ -1182,40 +1358,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SubThreshold": {
|
||||
"additionalProperties": false,
|
||||
"description": "At least one count property must be present. If both are present then either can trigger the rule",
|
||||
"minProperties": 1,
|
||||
"properties": {
|
||||
"subreddits": {
|
||||
"description": "A list of Subreddits (by name, case-insensitive) to look for.\n\nEX [\"mealtimevideos\",\"askscience\"]",
|
||||
"examples": [
|
||||
[
|
||||
"mealtimevideos",
|
||||
"askscience"
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"minItems": 1,
|
||||
"type": "array"
|
||||
},
|
||||
"threshold": {
|
||||
"default": ">= 1",
|
||||
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
|
||||
"examples": [
|
||||
">= 1"
|
||||
],
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"subreddits"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SubmissionState": {
|
||||
"description": "Different attributes a `Submission` can be in. Only include a property if you want to check it.",
|
||||
"examples": [
|
||||
@@ -1240,6 +1382,12 @@
|
||||
"is_self": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"link_flair_css_class": {
|
||||
"type": "string"
|
||||
},
|
||||
"link_flair_text": {
|
||||
"type": "string"
|
||||
},
|
||||
"locked": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1253,6 +1401,16 @@
|
||||
"removed": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"spam": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1261,6 +1419,48 @@
|
||||
},
|
||||
"stickied": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"title": {
|
||||
"description": "A valid regular expression to match against the title of the submission",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"SubredditState": {
|
||||
"description": "Different attributes a `Subreddit` can be in. Only include a property if you want to check it.",
|
||||
"examples": [
|
||||
{
|
||||
"over18": true
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/RegExp"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"description": "The name the subreddit.\n\nCan be a normal string (will check case-insensitive) or a regular expression\n\nEX `[\"mealtimevideos\", \"/onlyfans*\\/i\"]`",
|
||||
"examples": [
|
||||
"mealtimevideos",
|
||||
"/onlyfans*/i"
|
||||
]
|
||||
},
|
||||
"over18": {
|
||||
"description": "Is subreddit NSFW/over 18?\n\n**Note**: This is **mod-controlled flag** so it is up to the mods of the subreddit to correctly mark their subreddit as NSFW",
|
||||
"type": "boolean"
|
||||
},
|
||||
"quarantine": {
|
||||
"description": "Is subreddit quarantined?",
|
||||
"type": "boolean"
|
||||
},
|
||||
"stateDescription": {
|
||||
"description": "A friendly description of what this State is trying to parse",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
|
||||
@@ -1,6 +1,72 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ActivityThreshold": {
|
||||
"additionalProperties": false,
|
||||
"description": "At least one count property must be present. If both are present then either can trigger the rule",
|
||||
"minProperties": 1,
|
||||
"properties": {
|
||||
"commentState": {
|
||||
"$ref": "#/definitions/CommentState",
|
||||
"description": "When present, a Comment will only be counted if it meets this criteria",
|
||||
"examples": [
|
||||
{
|
||||
"op": true,
|
||||
"removed": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"karma": {
|
||||
"description": "Test the **combined karma** from Activities found in the specified subreddits\n\nValue is a string containing a comparison operator and a number of **combined karma** to compare against\n\nIf specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"submissionState": {
|
||||
"$ref": "#/definitions/SubmissionState",
|
||||
"description": "When present, a Submission will only be counted if it meets this criteria",
|
||||
"examples": [
|
||||
{
|
||||
"over_18": true,
|
||||
"removed": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"subreddits": {
|
||||
"description": "Activities will be counted if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
|
||||
"examples": [
|
||||
[
|
||||
"mealtimevideos",
|
||||
"askscience",
|
||||
"/onlyfans*/i",
|
||||
{
|
||||
"over18": true
|
||||
}
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubredditState"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"threshold": {
|
||||
"default": ">= 1",
|
||||
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
|
||||
"examples": [
|
||||
">= 1"
|
||||
],
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"ActivityWindowCriteria": {
|
||||
"additionalProperties": false,
|
||||
"description": "Multiple properties that may be used to define what range of Activity to retrieve.\n\nMay specify one, or both properties along with the `satisfyOn` property, to affect the retrieval behavior.",
|
||||
@@ -90,7 +156,7 @@
|
||||
"properties": {
|
||||
"aggregateOn": {
|
||||
"default": "undefined",
|
||||
"description": "If `domains` is not specified this list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
|
||||
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
|
||||
"examples": [
|
||||
[
|
||||
]
|
||||
@@ -118,7 +184,7 @@
|
||||
[
|
||||
]
|
||||
],
|
||||
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nIf this is present then `aggregateOn` is ignored.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
|
||||
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -207,7 +273,7 @@
|
||||
"type": "object"
|
||||
},
|
||||
"AttributionJSONConfig": {
|
||||
"description": "Aggregates all of the domain/media accounts attributed to an author's Submission history. If any domain is over the threshold the rule is triggered\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):\n\n```\ntriggeredDomainCount => Number of domains that met the threshold\nactivityTotal => Number of Activities considered from window\nwindow => The date range of the Activities considered\nlargestCount => The count from the largest aggregated domain\nlargestPercentage => The percentage of Activities the largest aggregated domain comprises\nsmallestCount => The count from the smallest aggregated domain\nsmallestPercentage => The percentage of Activities the smallest aggregated domain comprises\ncountRange => A convenience string displaying \"smallestCount - largestCount\" or just one number if both are the same\npercentRange => A convenience string displaying \"smallestPercentage - largestPercentage\" or just one percentage if both are the same\ndomains => An array of all the domain URLs that met the threshold\ndomainsDelim => A comma-delimited string of all the domain URLs that met the threshold\ntitles => The friendly-name of the domain if one is present, otherwise the URL (IE youtube.com/c/34ldfa343 => \"My Youtube Channel Title\")\ntitlesDelim => A comma-delimited string of all the domain friendly-names\nthreshold => The threshold you configured for this Rule to trigger\nurl => Url of the submission that triggered the rule\n```",
|
||||
"description": "Aggregates all of the domain/media accounts attributed to an author's Submission history. If any domain is over the threshold the rule is triggered\n\nAvailable data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):\n\n```\ntriggeredDomainCount => Number of domains that met the threshold\nactivityTotal => Number of Activities considered from window\nwindow => The date range of the Activities considered\nlargestCount => The count from the largest aggregated domain\nlargestPercentage => The percentage of Activities the largest aggregated domain comprises\nsmallestCount => The count from the smallest aggregated domain\nsmallestPercentage => The percentage of Activities the smallest aggregated domain comprises\ncountRange => A convenience string displaying \"smallestCount - largestCount\" or just one number if both are the same\npercentRange => A convenience string displaying \"smallestPercentage - largestPercentage\" or just one percentage if both are the same\ndomains => An array of all the domain URLs that met the threshold\ndomainsDelim => A comma-delimited string of all the domain URLs that met the threshold\ntitles => The friendly-name of the domain if one is present, otherwise the URL (IE youtube.com/c/34ldfa343 => \"My Youtube Channel Title\")\ntitlesDelim => A comma-delimited string of all the domain friendly-names\nthreshold => The threshold you configured for this Rule to trigger\nurl => Url of the submission that triggered the rule\n```",
|
||||
"properties": {
|
||||
"authorIs": {
|
||||
"$ref": "#/definitions/AuthorOptions",
|
||||
@@ -307,7 +373,7 @@
|
||||
"properties": {
|
||||
"age": {
|
||||
"description": "Test the age of the Author's account (when it was created) against this comparison\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>`\n\n* EX `> 100 days` => Passes if Author's account is older than 100 days\n* EX `<= 2 months` => Passes if Author's account is younger than or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
|
||||
"pattern": "^\\s*(?<opStr>>|>=|<|<=)\\s*(?<time>\\d+)\\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\\s*$",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"commentKarma": {
|
||||
@@ -509,16 +575,34 @@
|
||||
"type": "boolean"
|
||||
},
|
||||
"op": {
|
||||
"description": "Is this Comment Author also the Author of the Submission this comment is in?",
|
||||
"type": "boolean"
|
||||
},
|
||||
"removed": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"spam": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"stickied": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"submissionState": {
|
||||
"description": "A list of SubmissionState attributes to test the Submission this comment is in",
|
||||
"items": {
|
||||
"$ref": "#/definitions/SubmissionState"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
@@ -580,23 +664,28 @@
|
||||
"type": "object"
|
||||
},
|
||||
"HistoryCriteria": {
|
||||
"description": "If both `submission` and `comment` are defined then criteria will only trigger if BOTH thresholds are met",
|
||||
"description": "Criteria will only trigger if ALL present thresholds (comment, submission, total) are met",
|
||||
"properties": {
|
||||
"comment": {
|
||||
"description": "A string containing a comparison operator and a value to compare comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of all Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 comments as OP\n* EX `<= 25% as OP` => Comments as OP were less then or equal to 25% of **all Comments**",
|
||||
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of unfiltered Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 filtered comments as OP\n* EX `<= 25% as OP` => **Filtered** comments as OP were less then or equal to 25% of **unfiltered Comments**",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"minActivityCount": {
|
||||
"default": 5,
|
||||
"description": "The minimum number of activities that must exist from the `window` results for this criteria to run",
|
||||
"description": "The minimum number of **filtered** activities that must exist from the `window` results for this criteria to run",
|
||||
"type": "number"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"submission": {
|
||||
"description": "A string containing a comparison operator and a value to compare submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 submissions\n* EX `<= 75%` => submissions are equal to or less than 75% of all Activities",
|
||||
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 filtered submissions\n* EX `<= 75%` => filtered submissions are equal to or less than 75% of unfiltered Activities",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"total": {
|
||||
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`) activities against\n\n**Note:** This is only useful if using `include` or `exclude` otherwise percent will always be 100% and total === activityTotal\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 filtered activities\n* EX `<= 75%` => filtered activities are equal to or less than 75% of all Activities",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
@@ -627,7 +716,7 @@
|
||||
"type": "object"
|
||||
},
|
||||
"HistoryJSONConfig": {
|
||||
"description": "Aggregates an Author's submission and comment history. Rule can be triggered on count/percent of total (for either or both comment/sub totals) as well as comment OP total.\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):\n\n```\nactivityTotal => Total number of activities\nsubmissionTotal => Total number of submissions\ncommentTotal => Total number of comments\nopTotal => Total number of comments as OP\nthresholdSummary => A text summary of the first Criteria triggered with totals/percentages\ncriteria => The ThresholdCriteria object\nwindow => A text summary of the range of Activities considered (# of Items if number, time range if Duration)\n```",
|
||||
"description": "Aggregates an Author's submission and comment history. Rule can be triggered on count/percent of total (for either or both comment/sub totals) as well as comment OP total.\n\nAvailable data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):\n\n```\nactivityTotal => Total number of activities\nsubmissionTotal => Total number of submissions\ncommentTotal => Total number of comments\nopTotal => Total number of comments as OP\nthresholdSummary => A text summary of the first Criteria triggered with totals/percentages\ncriteria => The ThresholdCriteria object\nwindow => A text summary of the range of Activities considered (# of Items if number, time range if Duration)\n```",
|
||||
"properties": {
|
||||
"authorIs": {
|
||||
"$ref": "#/definitions/AuthorOptions",
|
||||
@@ -665,27 +754,51 @@
|
||||
"type": "array"
|
||||
},
|
||||
"exclude": {
|
||||
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
|
||||
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n**Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
|
||||
"examples": [
|
||||
"mealtimevideos",
|
||||
"askscience"
|
||||
[
|
||||
"mealtimevideos",
|
||||
"askscience",
|
||||
"/onlyfans*/i",
|
||||
{
|
||||
"over18": true
|
||||
}
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubredditState"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"minItems": 1,
|
||||
"type": "array"
|
||||
},
|
||||
"include": {
|
||||
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
|
||||
"description": "If present, activities will be counted only if they are found in this list of Subreddits.\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n **Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
|
||||
"examples": [
|
||||
"mealtimevideos",
|
||||
"askscience"
|
||||
[
|
||||
"mealtimevideos",
|
||||
"askscience",
|
||||
"/onlyfans*/i",
|
||||
{
|
||||
"over18": true
|
||||
}
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubredditState"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"minItems": 1,
|
||||
"type": "array"
|
||||
},
|
||||
"itemIs": {
|
||||
@@ -728,7 +841,7 @@
|
||||
"type": "object"
|
||||
},
|
||||
"RecentActivityRuleJSONConfig": {
|
||||
"description": "Checks a user's history for any Activity (Submission/Comment) in the subreddits specified in thresholds\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):\n\n```\nsummary => comma-deliminated list of subreddits that hit the threshold and their count EX subredditA(1), subredditB(4),...\nsubCount => Total number of subreddits that hit the threshold\ntotalCount => Total number of all activity occurrences in subreddits\n```",
|
||||
"description": "Checks a user's history for any Activity (Submission/Comment) in the subreddits specified in thresholds\n\nAvailable data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):\n\n```\nsummary => comma-deliminated list of subreddits that hit the threshold and their count EX subredditA(1), subredditB(4),...\nsubCount => Total number of subreddits that hit the threshold\ntotalCount => Total number of all activity occurrences in subreddits\n```",
|
||||
"properties": {
|
||||
"authorIs": {
|
||||
"$ref": "#/definitions/AuthorOptions",
|
||||
@@ -799,7 +912,7 @@
|
||||
"thresholds": {
|
||||
"description": "A list of subreddits/count criteria that may trigger this rule. ANY SubThreshold will trigger this rule.",
|
||||
"items": {
|
||||
"$ref": "#/definitions/SubThreshold"
|
||||
"$ref": "#/definitions/ActivityThreshold"
|
||||
},
|
||||
"minItems": 1,
|
||||
"type": "array"
|
||||
@@ -836,6 +949,49 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"RegExp": {
|
||||
"properties": {
|
||||
"dotAll": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"flags": {
|
||||
"type": "string"
|
||||
},
|
||||
"global": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"ignoreCase": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"lastIndex": {
|
||||
"type": "number"
|
||||
},
|
||||
"multiline": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"source": {
|
||||
"type": "string"
|
||||
},
|
||||
"sticky": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"unicode": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"dotAll",
|
||||
"flags",
|
||||
"global",
|
||||
"ignoreCase",
|
||||
"lastIndex",
|
||||
"multiline",
|
||||
"source",
|
||||
"sticky",
|
||||
"unicode"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"RegexCriteria": {
|
||||
"properties": {
|
||||
"activityMatchThreshold": {
|
||||
@@ -874,16 +1030,12 @@
|
||||
"type": "string"
|
||||
},
|
||||
"regex": {
|
||||
"description": "A valid Regular Expression to test content against\n\nDo not wrap expression in forward slashes\n\nEX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`",
|
||||
"description": "A valid Regular Expression to test content against\n\nIf no flags are specified then the **global** flag is used by default",
|
||||
"examples": [
|
||||
"reddit|FoxxMD"
|
||||
"/reddit|FoxxMD/ig"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"regexFlags": {
|
||||
"description": "Regex flags to use",
|
||||
"type": "string"
|
||||
},
|
||||
"testOn": {
|
||||
"default": [
|
||||
"title",
|
||||
@@ -936,7 +1088,7 @@
|
||||
"type": "object"
|
||||
},
|
||||
"RegexRuleJSONConfig": {
|
||||
"description": "Test a (list of) Regular Expression against the contents or title of an Activity\n\nOptionally, specify a `window` of the User's history to additionally test against\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):",
|
||||
"description": "Test a (list of) Regular Expression against the contents or title of an Activity\n\nOptionally, specify a `window` of the User's history to additionally test against\n\nAvailable data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):",
|
||||
"properties": {
|
||||
"authorIs": {
|
||||
"$ref": "#/definitions/AuthorOptions",
|
||||
@@ -1023,7 +1175,7 @@
|
||||
"type": "object"
|
||||
},
|
||||
"RepeatActivityJSONConfig": {
|
||||
"description": "Checks a user's history for Submissions with identical content\n\nAvailable data for [Action templating](https://github.com/FoxxMD/reddit-context-bot#action-templating):\n\n```\ncount => Total number of repeat Submissions\nthreshold => The threshold you configured for this Rule to trigger\nurl => Url of the submission that triggered the rule\n```",
|
||||
"description": "Checks a user's history for Submissions with identical content\n\nAvailable data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):\n\n```\ncount => Total number of repeat Submissions\nthreshold => The threshold you configured for this Rule to trigger\nurl => Url of the submission that triggered the rule\n```",
|
||||
"properties": {
|
||||
"authorIs": {
|
||||
"$ref": "#/definitions/AuthorOptions",
|
||||
@@ -1045,15 +1197,27 @@
|
||||
]
|
||||
},
|
||||
"exclude": {
|
||||
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
|
||||
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
|
||||
"examples": [
|
||||
"mealtimevideos",
|
||||
"askscience"
|
||||
[
|
||||
"mealtimevideos",
|
||||
"askscience",
|
||||
"/onlyfans*/i",
|
||||
{
|
||||
"over18": true
|
||||
}
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubredditState"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"minItems": 1,
|
||||
"type": "array"
|
||||
},
|
||||
"gapAllowance": {
|
||||
@@ -1061,15 +1225,27 @@
|
||||
"type": "number"
|
||||
},
|
||||
"include": {
|
||||
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
|
||||
"description": "If present, activities will be counted only if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
|
||||
"examples": [
|
||||
"mealtimevideos",
|
||||
"askscience"
|
||||
[
|
||||
"mealtimevideos",
|
||||
"askscience",
|
||||
"/onlyfans*/i",
|
||||
{
|
||||
"over18": true
|
||||
}
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubredditState"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"minItems": 1,
|
||||
"type": "array"
|
||||
},
|
||||
"itemIs": {
|
||||
@@ -1159,40 +1335,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SubThreshold": {
|
||||
"additionalProperties": false,
|
||||
"description": "At least one count property must be present. If both are present then either can trigger the rule",
|
||||
"minProperties": 1,
|
||||
"properties": {
|
||||
"subreddits": {
|
||||
"description": "A list of Subreddits (by name, case-insensitive) to look for.\n\nEX [\"mealtimevideos\",\"askscience\"]",
|
||||
"examples": [
|
||||
[
|
||||
"mealtimevideos",
|
||||
"askscience"
|
||||
]
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"minItems": 1,
|
||||
"type": "array"
|
||||
},
|
||||
"threshold": {
|
||||
"default": ">= 1",
|
||||
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
|
||||
"examples": [
|
||||
">= 1"
|
||||
],
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"subreddits"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SubmissionState": {
|
||||
"description": "Different attributes a `Submission` can be in. Only include a property if you want to check it.",
|
||||
"examples": [
|
||||
@@ -1217,6 +1359,12 @@
|
||||
"is_self": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"link_flair_css_class": {
|
||||
"type": "string"
|
||||
},
|
||||
"link_flair_text": {
|
||||
"type": "string"
|
||||
},
|
||||
"locked": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1230,6 +1378,16 @@
|
||||
"removed": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"spam": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1238,6 +1396,48 @@
|
||||
},
|
||||
"stickied": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"title": {
|
||||
"description": "A valid regular expression to match against the title of the submission",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"SubredditState": {
|
||||
"description": "Different attributes a `Subreddit` can be in. Only include a property if you want to check it.",
|
||||
"examples": [
|
||||
{
|
||||
"over18": true
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/RegExp"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"description": "The name the subreddit.\n\nCan be a normal string (will check case-insensitive) or a regular expression\n\nEX `[\"mealtimevideos\", \"/onlyfans*\\/i\"]`",
|
||||
"examples": [
|
||||
"mealtimevideos",
|
||||
"/onlyfans*/i"
|
||||
]
|
||||
},
|
||||
"over18": {
|
||||
"description": "Is subreddit NSFW/over 18?\n\n**Note**: This is **mod-controlled flag** so it is up to the mods of the subreddit to correctly mark their subreddit as NSFW",
|
||||
"type": "boolean"
|
||||
},
|
||||
"quarantine": {
|
||||
"description": "Is subreddit quarantined?",
|
||||
"type": "boolean"
|
||||
},
|
||||
"stateDescription": {
|
||||
"description": "A friendly description of what this State is trying to parse",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
import {addAsync, Router} from '@awaitjs/express';
|
||||
import express from 'express';
|
||||
import Snoowrap from "snoowrap";
|
||||
import {permissions} from "../util";
|
||||
import {getLogger} from "../Utils/loggerFactory";
|
||||
import {OperatorConfig} from "../Common/interfaces";
|
||||
|
||||
const app = addAsync(express());
|
||||
const router = Router();
|
||||
app.set('views', `${__dirname}/views`);
|
||||
app.set('view engine', 'ejs');
|
||||
|
||||
app.use(router);
|
||||
|
||||
const helperServer = async function (options: OperatorConfig) {
|
||||
let rUri: string;
|
||||
|
||||
const {
|
||||
credentials: {
|
||||
clientId,
|
||||
clientSecret,
|
||||
redirectUri
|
||||
},
|
||||
web: {
|
||||
port
|
||||
}
|
||||
} = options;
|
||||
|
||||
const server = await app.listen(port);
|
||||
const logger = getLogger(options);
|
||||
logger.info(`Helper UI started: http://localhost:${port}`);
|
||||
app.getAsync('/', async (req, res) => {
|
||||
res.render('helper', {
|
||||
redirectUri
|
||||
});
|
||||
});
|
||||
|
||||
app.getAsync('/auth', async (req, res) => {
|
||||
rUri = req.query.redirect as string;
|
||||
let permissionsList = permissions;
|
||||
|
||||
const includeWikiEdit = (req.query.wikiEdit as any).toString() === "1";
|
||||
if (!includeWikiEdit) {
|
||||
permissionsList = permissionsList.filter(x => x !== 'wikiedit');
|
||||
}
|
||||
const authUrl = Snoowrap.getAuthUrl({
|
||||
clientId,
|
||||
scope: permissionsList,
|
||||
redirectUri: rUri as string,
|
||||
permanent: true,
|
||||
});
|
||||
return res.redirect(authUrl);
|
||||
});
|
||||
|
||||
app.getAsync(/.*callback$/, async (req, res) => {
|
||||
const {error, code} = req.query as any;
|
||||
if (error !== undefined) {
|
||||
let errContent: string;
|
||||
switch (error) {
|
||||
case 'access_denied':
|
||||
errContent = 'You must <b>Allow</b> this application to connect in order to proceed.';
|
||||
break;
|
||||
default:
|
||||
errContent = error;
|
||||
}
|
||||
return res.render('error', {error: errContent, });
|
||||
}
|
||||
const client = await Snoowrap.fromAuthCode({
|
||||
userAgent: `web:contextBot:web`,
|
||||
clientId,
|
||||
clientSecret,
|
||||
redirectUri: rUri,
|
||||
code: code as string,
|
||||
});
|
||||
// @ts-ignore
|
||||
const user = await client.getMe();
|
||||
|
||||
res.render('callback', {
|
||||
accessToken: client.accessToken,
|
||||
refreshToken: client.refreshToken,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export default helperServer;
|
||||
@@ -1,706 +0,0 @@
|
||||
import {addAsync, Router} from '@awaitjs/express';
|
||||
import express from 'express';
|
||||
import bodyParser from 'body-parser';
|
||||
import session from 'express-session';
|
||||
import {Cache} from 'cache-manager';
|
||||
// @ts-ignore
|
||||
import CacheManagerStore from 'express-session-cache-manager'
|
||||
import Snoowrap from "snoowrap";
|
||||
import {App} from "../App";
|
||||
import dayjs from 'dayjs';
|
||||
import {Writable} from "stream";
|
||||
import winston from 'winston';
|
||||
import {Server as SocketServer} from 'socket.io';
|
||||
import sharedSession from 'express-socket.io-session';
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import EventEmitter from "events";
|
||||
import tcpUsed from 'tcp-port-used';
|
||||
|
||||
import {
|
||||
boolToString, cacheStats,
|
||||
COMMENT_URL_ID, createCacheManager,
|
||||
filterLogBySubreddit,
|
||||
formatLogLineToHtml, formatNumber,
|
||||
isLogLineMinLevel,
|
||||
LogEntry,
|
||||
parseLinkIdentifier,
|
||||
parseSubredditLogName, parseSubredditName,
|
||||
pollingInfo, SUBMISSION_URL_ID
|
||||
} from "../util";
|
||||
import {Manager} from "../Subreddit/Manager";
|
||||
import {getLogger} from "../Utils/loggerFactory";
|
||||
import LoggedError from "../Utils/LoggedError";
|
||||
import {OperatorConfig, ResourceStats, RUNNING, STOPPED, SYSTEM, USER} from "../Common/interfaces";
|
||||
import http from "http";
|
||||
import SimpleError from "../Utils/SimpleError";
|
||||
|
||||
const app = addAsync(express());
|
||||
const router = Router();
|
||||
|
||||
app.use(router);
|
||||
app.use(bodyParser.json());
|
||||
app.set('views', `${__dirname}/views`);
|
||||
app.set('view engine', 'ejs');
|
||||
|
||||
interface ConnectedUserInfo {
|
||||
subreddits: string[],
|
||||
level?: string,
|
||||
user: string
|
||||
}
|
||||
|
||||
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
|
||||
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);
|
||||
|
||||
const connectedUsers: Map<string, ConnectedUserInfo> = new Map();
|
||||
|
||||
const availableLevels = ['error', 'warn', 'info', 'verbose', 'debug'];
|
||||
|
||||
let operatorSessionId: (string | undefined);
|
||||
|
||||
declare module 'express-session' {
|
||||
interface SessionData {
|
||||
user: string,
|
||||
subreddits: string[],
|
||||
lastCheck?: number,
|
||||
limit?: number,
|
||||
sort?: string,
|
||||
level?: string,
|
||||
}
|
||||
}
|
||||
|
||||
const subLogMap: Map<string, LogEntry[]> = new Map();
|
||||
|
||||
const emitter = new EventEmitter();
|
||||
const stream = new Writable()
|
||||
|
||||
const rcbServer = async function (options: OperatorConfig) {
|
||||
|
||||
const {
|
||||
credentials: {
|
||||
clientId,
|
||||
clientSecret,
|
||||
redirectUri
|
||||
},
|
||||
operator: {
|
||||
name,
|
||||
display,
|
||||
},
|
||||
web: {
|
||||
port,
|
||||
session: {
|
||||
provider,
|
||||
secret,
|
||||
},
|
||||
maxLogs,
|
||||
},
|
||||
} = options;
|
||||
|
||||
let botSubreddits: string[] = [];
|
||||
|
||||
stream._write = (chunk, encoding, next) => {
|
||||
let logLine = chunk.toString();
|
||||
const now = Date.now();
|
||||
const logEntry: LogEntry = [now, logLine];
|
||||
|
||||
const subName = parseSubredditLogName(logLine);
|
||||
if (subName !== undefined && (botSubreddits.length === 0 || botSubreddits.includes(subName))) {
|
||||
const subLogs = subLogMap.get(subName) || [];
|
||||
subLogs.unshift(logEntry);
|
||||
subLogMap.set(subName, subLogs.slice(0, maxLogs + 1));
|
||||
} else {
|
||||
const appLogs = subLogMap.get('app') || [];
|
||||
appLogs.unshift(logEntry);
|
||||
subLogMap.set('app', appLogs.slice(0, maxLogs + 1));
|
||||
}
|
||||
|
||||
emitter.emit('log', logLine);
|
||||
next();
|
||||
}
|
||||
const streamTransport = new winston.transports.Stream({
|
||||
stream,
|
||||
})
|
||||
|
||||
const logger = getLogger({...options.logging, additionalTransports: [streamTransport]})
|
||||
|
||||
if (await tcpUsed.check(port)) {
|
||||
throw new SimpleError(`Specified port for web interface (${port}) is in use or not available. Cannot start web server.`);
|
||||
}
|
||||
|
||||
let server: http.Server,
|
||||
io: SocketServer;
|
||||
|
||||
try {
|
||||
server = await app.listen(port);
|
||||
io = new SocketServer(server);
|
||||
} catch (err) {
|
||||
logger.error('Error occurred while initializing web or socket.io server', err);
|
||||
err.logged = true;
|
||||
throw err;
|
||||
}
|
||||
|
||||
logger.info(`Web UI started: http://localhost:${port}`);
|
||||
|
||||
const bot = new App(options);
|
||||
await bot.testClient();
|
||||
|
||||
app.use('/public', express.static(`${__dirname}/public`));
|
||||
|
||||
await bot.buildManagers();
|
||||
botSubreddits = bot.subManagers.map(x => x.displayLabel);
|
||||
// TODO potentially prune subLogMap of user keys? shouldn't have happened this early though
|
||||
|
||||
if(provider.store === 'none') {
|
||||
logger.warn(`Cannot use 'none' for session store or else no one can use the interface...falling back to 'memory'`);
|
||||
provider.store = 'memory';
|
||||
}
|
||||
const sessionObj = session({
|
||||
cookie: {
|
||||
maxAge: provider.ttl,
|
||||
},
|
||||
store: new CacheManagerStore(createCacheManager(provider) as Cache),
|
||||
resave: false,
|
||||
saveUninitialized: false,
|
||||
secret,
|
||||
});
|
||||
|
||||
app.use(sessionObj);
|
||||
io.use(sharedSession(sessionObj));
|
||||
|
||||
io.on("connection", function (socket) {
|
||||
// @ts-ignore
|
||||
if (socket.handshake.session.user !== undefined) {
|
||||
// @ts-ignore
|
||||
socket.join(socket.handshake.session.id);
|
||||
// @ts-ignore
|
||||
connectedUsers.set(socket.handshake.session.id, {
|
||||
// @ts-ignore
|
||||
subreddits: socket.handshake.session.subreddits,
|
||||
// @ts-ignore
|
||||
level: socket.handshake.session.level,
|
||||
// @ts-ignore
|
||||
user: socket.handshake.session.user
|
||||
});
|
||||
|
||||
// @ts-ignore
|
||||
if (name !== undefined && socket.handshake.session.user.toLowerCase() === name.toLowerCase()) {
|
||||
// @ts-ignore
|
||||
operatorSessionId = socket.handshake.session.id;
|
||||
}
|
||||
}
|
||||
});
|
||||
io.on('disconnect', (socket) => {
|
||||
// @ts-ignore
|
||||
connectedUsers.delete(socket.handshake.session.id);
|
||||
if (operatorSessionId === socket.handshake.session.id) {
|
||||
operatorSessionId = undefined;
|
||||
}
|
||||
});
|
||||
|
||||
const redditUserMiddleware = async (req: express.Request, res: express.Response, next: Function) => {
|
||||
if (req.session.user === undefined) {
|
||||
return res.redirect('/login');
|
||||
}
|
||||
next();
|
||||
}
|
||||
|
||||
const booleanMiddle = (boolParams: string[] = []) => async (req: express.Request, res: express.Response, next: Function) => {
|
||||
if (req.query !== undefined) {
|
||||
for (const b of boolParams) {
|
||||
const bVal = req.query[b] as any;
|
||||
if (bVal !== undefined) {
|
||||
let truthyVal: boolean;
|
||||
if (bVal === 'true' || bVal === true || bVal === 1 || bVal === '1') {
|
||||
truthyVal = true;
|
||||
} else if (bVal === 'false' || bVal === false || bVal === 0 || bVal === '0') {
|
||||
truthyVal = false;
|
||||
} else {
|
||||
res.status(400);
|
||||
res.send(`Expected query parameter ${b} to be a truthy value. Got "${bVal}" but must be one of these: true/false, 1/0`);
|
||||
return;
|
||||
}
|
||||
// @ts-ignore
|
||||
req.query[b] = truthyVal;
|
||||
}
|
||||
}
|
||||
}
|
||||
next();
|
||||
}
|
||||
|
||||
app.getAsync('/logout', async (req, res) => {
|
||||
// @ts-ignore
|
||||
req.session.destroy();
|
||||
res.send('Bye!');
|
||||
})
|
||||
|
||||
app.getAsync('/login', async (req, res) => {
|
||||
if(redirectUri === undefined) {
|
||||
return res.render('error', {error: `No <b>redirectUri</b> was specified through environmental variables or program argument. This must be provided in order to use the web interface.`});
|
||||
}
|
||||
const authUrl = Snoowrap.getAuthUrl({
|
||||
clientId,
|
||||
scope: ['identity', 'mysubreddits'],
|
||||
redirectUri: redirectUri as string,
|
||||
permanent: false,
|
||||
});
|
||||
return res.redirect(authUrl);
|
||||
});
|
||||
|
||||
app.getAsync(/.*callback$/, async (req, res) => {
|
||||
const {error, code} = req.query as any;
|
||||
if (error !== undefined) {
|
||||
let errContent: string;
|
||||
switch (error) {
|
||||
case 'access_denied':
|
||||
errContent = 'You must <b>Allow</b> this application to connect in order to proceed.';
|
||||
break;
|
||||
default:
|
||||
errContent = error;
|
||||
}
|
||||
return res.render('error', {error: errContent, operatorDisplay: display});
|
||||
}
|
||||
const client = await Snoowrap.fromAuthCode({
|
||||
userAgent: `web:contextBot:web`,
|
||||
clientId,
|
||||
clientSecret,
|
||||
redirectUri: redirectUri as string,
|
||||
code: code as string,
|
||||
});
|
||||
// @ts-ignore
|
||||
const user = await client.getMe().name as string;
|
||||
const subs = await client.getModeratedSubreddits();
|
||||
|
||||
req.session['user'] = user;
|
||||
// @ts-ignore
|
||||
req.session['subreddits'] = name !== undefined && name.toLowerCase() === user.toLowerCase() ? bot.subManagers.map(x => x.displayLabel) : subs.reduce((acc: string[], x) => {
|
||||
const sm = bot.subManagers.find(y => y.subreddit.display_name === x.display_name);
|
||||
if (sm !== undefined) {
|
||||
return acc.concat(sm.displayLabel);
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
req.session['lastCheck'] = dayjs().unix();
|
||||
res.redirect('/');
|
||||
});
|
||||
|
||||
app.use('/', redditUserMiddleware);
|
||||
app.getAsync('/', async (req, res) => {
|
||||
const {
|
||||
subreddits = [],
|
||||
user: userVal,
|
||||
limit = 200,
|
||||
level = 'verbose',
|
||||
sort = 'descending',
|
||||
lastCheck
|
||||
} = req.session;
|
||||
const user = userVal as string;
|
||||
const isOperator = name !== undefined && name.toLowerCase() === user.toLowerCase()
|
||||
|
||||
if ((req.session.subreddits as string[]).length === 0 && !isOperator) {
|
||||
return res.render('noSubs', {operatorDisplay: display});
|
||||
}
|
||||
|
||||
const logs = filterLogBySubreddit(subLogMap, req.session.subreddits, {
|
||||
level,
|
||||
operator: isOperator,
|
||||
user,
|
||||
// @ts-ignore
|
||||
sort,
|
||||
limit
|
||||
});
|
||||
|
||||
const subManagerData = [];
|
||||
for (const s of subreddits) {
|
||||
const m = bot.subManagers.find(x => x.displayLabel === s) as Manager;
|
||||
const sd = {
|
||||
name: s,
|
||||
//linkName: s.replace(/\W/g, ''),
|
||||
logs: logs.get(s) || [], // provide a default empty value in case we truly have not logged anything for this subreddit yet
|
||||
botState: m.botState,
|
||||
eventsState: m.eventsState,
|
||||
queueState: m.queueState,
|
||||
indicator: 'gray',
|
||||
queuedActivities: m.queue.length(),
|
||||
runningActivities: m.queue.running(),
|
||||
maxWorkers: m.queue.concurrency,
|
||||
validConfig: boolToString(m.validConfigLoaded),
|
||||
dryRun: boolToString(m.dryRun === true),
|
||||
pollingInfo: m.pollOptions.length === 0 ? ['nothing :('] : m.pollOptions.map(pollingInfo),
|
||||
checks: {
|
||||
submissions: m.submissionChecks === undefined ? 0 : m.submissionChecks.length,
|
||||
comments: m.commentChecks === undefined ? 0 : m.commentChecks.length,
|
||||
},
|
||||
wikiLocation: m.wikiLocation,
|
||||
wikiHref: `https://reddit.com/r/${m.subreddit.display_name}/wiki/${m.wikiLocation}`,
|
||||
wikiRevisionHuman: m.lastWikiRevision === undefined ? 'N/A' : `${dayjs.duration(dayjs().diff(m.lastWikiRevision)).humanize()} ago`,
|
||||
wikiRevision: m.lastWikiRevision === undefined ? 'N/A' : m.lastWikiRevision.local().format('MMMM D, YYYY h:mm A Z'),
|
||||
wikiLastCheckHuman: `${dayjs.duration(dayjs().diff(m.lastWikiCheck)).humanize()} ago`,
|
||||
wikiLastCheck: m.lastWikiCheck.local().format('MMMM D, YYYY h:mm A Z'),
|
||||
stats: await m.getStats(),
|
||||
startedAt: 'Not Started',
|
||||
startedAtHuman: 'Not Started',
|
||||
delayBy: m.delayBy === undefined ? 'No' : `Delayed by ${m.delayBy} sec`,
|
||||
};
|
||||
// TODO replace indicator data with js on client page
|
||||
let indicator;
|
||||
if (m.botState.state === RUNNING && m.queueState.state === RUNNING && m.eventsState.state === RUNNING) {
|
||||
indicator = 'green';
|
||||
} else if (m.botState.state === STOPPED && m.queueState.state === STOPPED && m.eventsState.state === STOPPED) {
|
||||
indicator = 'red';
|
||||
} else {
|
||||
indicator = 'yellow';
|
||||
}
|
||||
sd.indicator = indicator;
|
||||
if (m.startedAt !== undefined) {
|
||||
const dur = dayjs.duration(dayjs().diff(m.startedAt));
|
||||
sd.startedAtHuman = `${dur.humanize()} ago`;
|
||||
sd.startedAt = m.startedAt.local().format('MMMM D, YYYY h:mm A Z');
|
||||
|
||||
if(sd.stats.cache.totalRequests > 0) {
|
||||
const minutes = dur.asMinutes();
|
||||
if(minutes < 10) {
|
||||
sd.stats.cache.requestRate = formatNumber((10/minutes) * sd.stats.cache.totalRequests, {toFixed: 0, round: {enable: true, indicate: true}});
|
||||
} else {
|
||||
sd.stats.cache.requestRate = formatNumber((minutes/10) * sd.stats.cache.totalRequests, {toFixed: 0, round: {enable: true, indicate: true}});
|
||||
}
|
||||
} else {
|
||||
sd.stats.cache.requestRate = 0;
|
||||
}
|
||||
}
|
||||
subManagerData.push(sd);
|
||||
}
|
||||
const totalStats = subManagerData.reduce((acc, curr) => {
|
||||
return {
|
||||
checks: {
|
||||
submissions: acc.checks.submissions + curr.checks.submissions,
|
||||
comments: acc.checks.comments + curr.checks.comments,
|
||||
},
|
||||
eventsCheckedTotal: acc.eventsCheckedTotal + curr.stats.eventsCheckedTotal,
|
||||
checksRunTotal: acc.checksRunTotal + curr.stats.checksRunTotal,
|
||||
checksTriggeredTotal: acc.checksTriggeredTotal + curr.stats.checksTriggeredTotal,
|
||||
rulesRunTotal: acc.rulesRunTotal + curr.stats.rulesRunTotal,
|
||||
rulesCachedTotal: acc.rulesCachedTotal + curr.stats.rulesCachedTotal,
|
||||
rulesTriggeredTotal: acc.rulesTriggeredTotal + curr.stats.rulesTriggeredTotal,
|
||||
actionsRunTotal: acc.actionsRunTotal + curr.stats.actionsRunTotal,
|
||||
};
|
||||
}, {
|
||||
checks: {
|
||||
submissions: 0,
|
||||
comments: 0,
|
||||
},
|
||||
eventsCheckedTotal: 0,
|
||||
checksRunTotal: 0,
|
||||
checksTriggeredTotal: 0,
|
||||
rulesRunTotal: 0,
|
||||
rulesCachedTotal: 0,
|
||||
rulesTriggeredTotal: 0,
|
||||
actionsRunTotal: 0,
|
||||
});
|
||||
const {checks, ...rest} = totalStats;
|
||||
|
||||
let cumRaw = subManagerData.reduce((acc, curr) => {
|
||||
Object.keys(curr.stats.cache.types as ResourceStats).forEach((k) => {
|
||||
acc[k].requests += curr.stats.cache.types[k].requests;
|
||||
acc[k].miss += curr.stats.cache.types[k].miss;
|
||||
});
|
||||
return acc;
|
||||
}, cacheStats());
|
||||
cumRaw = Object.keys(cumRaw).reduce((acc, curr) => {
|
||||
const per = acc[curr].miss === 0 ? 0 : formatNumber(acc[curr].miss / acc[curr].requests) * 100;
|
||||
// @ts-ignore
|
||||
acc[curr].missPercent = `${formatNumber(per, {toFixed: 0})}%`;
|
||||
return acc;
|
||||
}, cumRaw);
|
||||
let allManagerData: any = {
|
||||
name: 'All',
|
||||
linkName: 'All',
|
||||
indicator: 'green',
|
||||
botState: {
|
||||
state: RUNNING,
|
||||
causedBy: SYSTEM
|
||||
},
|
||||
dryRun: boolToString(bot.dryRun === true),
|
||||
logs: logs.get('all'),
|
||||
checks: checks,
|
||||
softLimit: bot.softLimit,
|
||||
hardLimit: bot.hardLimit,
|
||||
stats: {
|
||||
...rest,
|
||||
cache: {
|
||||
currentKeyCount: await bot.subManagers[0].resources.getCacheKeyCount(),
|
||||
isShared: false,
|
||||
totalRequests: subManagerData.reduce((acc, curr) => acc + curr.stats.cache.totalRequests, 0),
|
||||
types: {
|
||||
...cumRaw,
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
if (allManagerData.logs === undefined) {
|
||||
// this should happen but saw an edge case where potentially did
|
||||
logger.warn(`Logs for 'all' were undefined found but should always have a default empty value`);
|
||||
}
|
||||
// if(isOperator) {
|
||||
allManagerData.startedAt = bot.startedAt.local().format('MMMM D, YYYY h:mm A Z');
|
||||
allManagerData.heartbeatHuman = dayjs.duration({seconds: bot.heartbeatInterval}).humanize();
|
||||
allManagerData.heartbeat = bot.heartbeatInterval;
|
||||
allManagerData = {...allManagerData, ...opStats(bot)};
|
||||
//}
|
||||
|
||||
const botDur = dayjs.duration(dayjs().diff(bot.startedAt))
|
||||
if(allManagerData.stats.cache.totalRequests > 0) {
|
||||
const minutes = botDur.asMinutes();
|
||||
if(minutes < 10) {
|
||||
allManagerData.stats.cache.requestRate = formatNumber((10/minutes) * allManagerData.stats.cache.totalRequests, {toFixed: 0, round: {enable: true, indicate: true}});
|
||||
} else {
|
||||
allManagerData.stats.cache.requestRate = formatNumber(allManagerData.stats.cache.totalRequests / (minutes/10), {toFixed: 0, round: {enable: true, indicate: true}});
|
||||
}
|
||||
} else {
|
||||
allManagerData.stats.cache.requestRate = 0;
|
||||
}
|
||||
|
||||
const data = {
|
||||
userName: user,
|
||||
system: {
|
||||
startedAt: bot.startedAt.local().format('MMMM D, YYYY h:mm A Z'),
|
||||
...opStats(bot),
|
||||
},
|
||||
subreddits: [allManagerData, ...subManagerData],
|
||||
show: 'All',
|
||||
botName: bot.botName,
|
||||
operatorDisplay: display,
|
||||
isOperator,
|
||||
logSettings: {
|
||||
//limit: [10, 20, 50, 100, 200].map(x => `<a class="capitalize ${limit === x ? 'font-bold no-underline pointer-events-none' : ''}" data-limit="${x}" href="logs/settings/update?limit=${x}">${x}</a>`).join(' | '),
|
||||
limitSelect: [10, 20, 50, 100, 200].map(x => `<option ${limit === x ? 'selected' : ''} class="capitalize ${limit === x ? 'font-bold' : ''}" data-value="${x}">${x}</option>`).join(' | '),
|
||||
//sort: ['ascending', 'descending'].map(x => `<a class="capitalize ${sort === x ? 'font-bold no-underline pointer-events-none' : ''}" data-sort="${x}" href="logs/settings/update?sort=${x}">${x}</a>`).join(' | '),
|
||||
sortSelect: ['ascending', 'descending'].map(x => `<option ${sort === x ? 'selected' : ''} class="capitalize ${sort === x ? 'font-bold' : ''}" data-value="${x}">${x}</option>`).join(' '),
|
||||
//level: availableLevels.map(x => `<a class="capitalize log-${x} ${level === x ? `font-bold no-underline pointer-events-none` : ''}" data-log="${x}" href="logs/settings/update?level=${x}">${x}</a>`).join(' | '),
|
||||
levelSelect: availableLevels.map(x => `<option ${level === x ? 'selected' : ''} class="capitalize log-${x} ${level === x ? `font-bold` : ''}" data-value="${x}">${x}</option>`).join(' '),
|
||||
},
|
||||
};
|
||||
if(req.query.sub !== undefined) {
|
||||
const encoded = encodeURI(req.query.sub as string).toLowerCase();
|
||||
const shouldShow = data.subreddits.find(x => x.name.toLowerCase() === encoded);
|
||||
if(shouldShow !== undefined) {
|
||||
data.show = shouldShow.name;
|
||||
}
|
||||
}
|
||||
|
||||
res.render('status', data);
|
||||
});
|
||||
|
||||
app.getAsync('/logs/settings/update', async function (req, res) {
|
||||
const e = req.query;
|
||||
for (const [setting, val] of Object.entries(req.query)) {
|
||||
switch (setting) {
|
||||
case 'limit':
|
||||
req.session.limit = Number.parseInt(val as string);
|
||||
break;
|
||||
case 'sort':
|
||||
req.session.sort = val as string;
|
||||
break;
|
||||
case 'level':
|
||||
req.session.level = val as string;
|
||||
break;
|
||||
}
|
||||
}
|
||||
const {limit = 200, level = 'verbose', sort = 'descending', user} = req.session;
|
||||
|
||||
res.send('OK');
|
||||
|
||||
const subMap = filterLogBySubreddit(subLogMap, req.session.subreddits, {
|
||||
level,
|
||||
operator: name !== undefined && name.toLowerCase() === (user as string).toLowerCase(),
|
||||
user,
|
||||
limit,
|
||||
sort: (sort as 'descending' | 'ascending'),
|
||||
});
|
||||
const subArr: any = [];
|
||||
subMap.forEach((v: string[], k: string) => {
|
||||
subArr.push({name: k, logs: v.join('')});
|
||||
});
|
||||
io.emit('logClear', subArr);
|
||||
});
|
||||
|
||||
app.use('/action', booleanMiddle(['force']));
|
||||
app.getAsync('/action', async (req, res) => {
|
||||
const {type, action, subreddit, force = false} = req.query as any;
|
||||
let subreddits: string[] = [];
|
||||
if (subreddit === 'All') {
|
||||
subreddits = req.session.subreddits as string[];
|
||||
} else if ((req.session.subreddits as string[]).includes(subreddit)) {
|
||||
subreddits = [subreddit];
|
||||
}
|
||||
|
||||
for (const s of subreddits) {
|
||||
const manager = bot.subManagers.find(x => x.displayLabel === s);
|
||||
if (manager === undefined) {
|
||||
logger.warn(`Manager for ${s} does not exist`, {subreddit: `/u/${req.session.user}`});
|
||||
continue;
|
||||
}
|
||||
const mLogger = manager.logger;
|
||||
mLogger.info(`/u/${req.session.user} invoked '${action}' action for ${type} on ${manager.displayLabel}`);
|
||||
try {
|
||||
switch (action) {
|
||||
case 'start':
|
||||
if (type === 'bot') {
|
||||
await manager.start('user');
|
||||
} else if (type === 'queue') {
|
||||
manager.startQueue('user');
|
||||
} else {
|
||||
await manager.startEvents('user');
|
||||
}
|
||||
break;
|
||||
case 'stop':
|
||||
if (type === 'bot') {
|
||||
await manager.stop('user');
|
||||
} else if (type === 'queue') {
|
||||
await manager.stopQueue('user');
|
||||
} else {
|
||||
manager.stopEvents('user');
|
||||
}
|
||||
break;
|
||||
case 'pause':
|
||||
if (type === 'queue') {
|
||||
await manager.pauseQueue('user');
|
||||
} else {
|
||||
manager.pauseEvents('user');
|
||||
}
|
||||
break;
|
||||
case 'reload':
|
||||
const prevQueueState = manager.queueState.state;
|
||||
const newConfig = await manager.parseConfiguration('user', force);
|
||||
if (newConfig === false) {
|
||||
mLogger.info('Config was up-to-date');
|
||||
}
|
||||
if (newConfig && prevQueueState === RUNNING) {
|
||||
await manager.startQueue(USER);
|
||||
}
|
||||
break;
|
||||
case 'check':
|
||||
if (type === 'unmoderated') {
|
||||
const activities = await manager.subreddit.getUnmoderated({limit: 100});
|
||||
for (const a of activities.reverse()) {
|
||||
manager.queue.push({
|
||||
checkType: a instanceof Submission ? 'Submission' : 'Comment',
|
||||
activity: a,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const activities = await manager.subreddit.getModqueue({limit: 100});
|
||||
for (const a of activities.reverse()) {
|
||||
manager.queue.push({
|
||||
checkType: a instanceof Submission ? 'Submission' : 'Comment',
|
||||
activity: a,
|
||||
});
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
if (!(err instanceof LoggedError)) {
|
||||
mLogger.error(err, {subreddit: manager.displayLabel});
|
||||
}
|
||||
}
|
||||
}
|
||||
res.send('OK');
|
||||
});
|
||||
|
||||
app.use('/check', booleanMiddle(['dryRun']));
|
||||
app.getAsync('/check', async (req, res) => {
|
||||
const {url, dryRun, subreddit} = req.query as any;
|
||||
|
||||
let a;
|
||||
const commentId = commentReg(url);
|
||||
if (commentId !== undefined) {
|
||||
// @ts-ignore
|
||||
a = await bot.client.getComment(commentId);
|
||||
}
|
||||
if (a === undefined) {
|
||||
const submissionId = submissionReg(url);
|
||||
if (submissionId !== undefined) {
|
||||
// @ts-ignore
|
||||
a = await bot.client.getSubmission(submissionId);
|
||||
}
|
||||
}
|
||||
|
||||
if (a === undefined) {
|
||||
logger.error('Could not parse Comment or Submission ID from given URL', {subreddit: `/u/${req.session.user}`});
|
||||
return res.send('OK');
|
||||
} else {
|
||||
// @ts-ignore
|
||||
const activity = await a.fetch();
|
||||
const sub = await activity.subreddit.display_name;
|
||||
|
||||
let manager = subreddit === 'All' ? bot.subManagers.find(x => x.subreddit.display_name === sub) : bot.subManagers.find(x => x.displayLabel === subreddit);
|
||||
|
||||
if (manager === undefined || !(req.session.subreddits as string[]).includes(manager.displayLabel)) {
|
||||
let msg = 'Activity does not belong to a subreddit you moderate or the bot runs on.';
|
||||
if (subreddit === 'All') {
|
||||
msg = `${msg} If you want to test an Activity against a Subreddit\'s config it does not belong to then switch to that Subreddit's tab first.`
|
||||
}
|
||||
logger.error(msg, {subreddit: `/u/${req.session.user}`});
|
||||
return res.send('OK');
|
||||
}
|
||||
|
||||
// will run dryrun if specified or if running activity on subreddit it does not belong to
|
||||
const dr: boolean | undefined = (dryRun || manager.subreddit.display_name !== sub) ? true : undefined;
|
||||
manager.logger.info(`/u/${req.session.user} running${dr === true ? ' DRY RUN ' : ' '}check on${manager.subreddit.display_name !== sub ? ' FOREIGN ACTIVITY ' : ' '}${url}`);
|
||||
await manager.runChecks(activity instanceof Submission ? 'Submission' : 'Comment', activity, {dryRun: dr})
|
||||
}
|
||||
res.send('OK');
|
||||
})
|
||||
|
||||
setInterval(() => {
|
||||
// refresh op stats every 30 seconds
|
||||
io.emit('opStats', opStats(bot));
|
||||
// if (operatorSessionId !== undefined) {
|
||||
// io.to(operatorSessionId).emit('opStats', opStats(bot));
|
||||
// }
|
||||
}, 30000);
|
||||
|
||||
emitter.on('log', (log) => {
|
||||
const emittedSessions = [];
|
||||
const subName = parseSubredditLogName(log);
|
||||
if (subName !== undefined) {
|
||||
for (const [id, info] of connectedUsers) {
|
||||
const {subreddits, level = 'verbose', user} = info;
|
||||
if (isLogLineMinLevel(log, level) && (subreddits.includes(subName) || subName.includes(user))) {
|
||||
emittedSessions.push(id);
|
||||
io.to(id).emit('log', formatLogLineToHtml(log));
|
||||
}
|
||||
}
|
||||
}
|
||||
if (operatorSessionId !== undefined) {
|
||||
io.to(operatorSessionId).emit('opStats', opStats(bot));
|
||||
if (subName === undefined || !emittedSessions.includes(operatorSessionId)) {
|
||||
const {level = 'verbose'} = connectedUsers.get(operatorSessionId) || {};
|
||||
if (isLogLineMinLevel(log, level)) {
|
||||
io.to(operatorSessionId).emit('log', formatLogLineToHtml(log));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await bot.runManagers();
|
||||
};
|
||||
|
||||
const opStats = (bot: App) => {
|
||||
const limitReset = dayjs(bot.client.ratelimitExpiration);
|
||||
const nextHeartbeat = bot.nextHeartbeat !== undefined ? bot.nextHeartbeat.local().format('MMMM D, YYYY h:mm A Z') : 'N/A';
|
||||
const nextHeartbeatHuman = bot.nextHeartbeat !== undefined ? `in ${dayjs.duration(bot.nextHeartbeat.diff(dayjs())).humanize()}` : 'N/A'
|
||||
return {
|
||||
startedAtHuman: `${dayjs.duration(dayjs().diff(bot.startedAt)).humanize()}`,
|
||||
nextHeartbeat,
|
||||
nextHeartbeatHuman,
|
||||
apiLimit: bot.client.ratelimitRemaining,
|
||||
apiAvg: formatNumber(bot.apiRollingAvg),
|
||||
nannyMode: bot.nannyMode || 'Off',
|
||||
apiDepletion: bot.apiEstDepletion === undefined ? 'Not Calculated' : bot.apiEstDepletion.humanize(),
|
||||
limitReset,
|
||||
limitResetHuman: `in ${dayjs.duration(limitReset.diff(dayjs())).humanize()}`,
|
||||
}
|
||||
}
|
||||
|
||||
export default rcbServer;
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
<html>
|
||||
<%- include('partials/head', {title: 'RCB OAuth Helper'}) %>
|
||||
<body class="">
|
||||
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
|
||||
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
|
||||
<%- include('partials/title', {title: ' OAuth Helper'}) %>
|
||||
<div class="container mx-auto">
|
||||
<div class="grid">
|
||||
<div class="bg-white dark:bg-gray-500 dark:text-white">
|
||||
<div class="p-6 md:px-10 md:py-6">
|
||||
<div class="text-xl mb-4">Hi! Looks like you're setting up your bot. To get running:</div>
|
||||
<div class="text-lg text-semibold my-3">1. Set your redirect URL</div>
|
||||
<input id="redirectUri" style="min-width:500px;"
|
||||
class="text-black placeholder-gray-500 rounded mt-2 mb-3 p-2" value="<%= redirectUri %>">
|
||||
<div class="my-2">
|
||||
<input type="checkbox" id="wikiedit" name="wikiedit"
|
||||
checked>
|
||||
<label for="wikiedit">Include <span class="font-mono">wikiedit</span> permission for Toolbox
|
||||
User Notes</label>
|
||||
</div>
|
||||
<div class="space-y-3">
|
||||
<div>This is the URL Reddit will redirect you to once you have authorized an account to be used
|
||||
with your application.
|
||||
</div>
|
||||
<div>The input field has been pre-filled with either:
|
||||
<ul class="list-inside list-disc">
|
||||
<li>What you provided to the program as an argument/environmental variable or</li>
|
||||
<li>The current URL in your browser that would be used -- if you are using a reverse
|
||||
proxy this may be different so double check
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div>Make sure it matches what is found in the <b>redirect uri</b> for your <a target="_blank"
|
||||
href="https://www.reddit.com/prefs/apps">application
|
||||
on Reddit</a> and <b>it must end with "callback"</b></div>
|
||||
</div>
|
||||
<div class="text-lg text-semibold my-3">2. Login to Reddit with the account that will be the bot
|
||||
</div>
|
||||
Protip: Login to Reddit in an Incognito session, then open this URL in a new tab.
|
||||
<div class="text-lg text-semibold my-3">3. <a id="doAuth" href="">Authorize your bot account</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
if (document.querySelector('#redirectUri').value === '') {
|
||||
document.querySelector('#redirectUri').value = `${document.location.href}callback`;
|
||||
}
|
||||
|
||||
document.querySelector('#doAuth').addEventListener('click', e => {
|
||||
e.preventDefault()
|
||||
const wikiEdit = document.querySelector('#wikiedit').checked ? 1 : 0;
|
||||
const url = `${document.location.href}auth?redirect=${document.querySelector('#redirectUri').value}&wikiEdit=${wikiEdit}`;
|
||||
window.location.href = url;
|
||||
})
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,86 +0,0 @@
|
||||
<head>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/tailwindcss/2.0.3/tailwind.min.css"
|
||||
integrity="sha512-wl80ucxCRpLkfaCnbM88y4AxnutbGk327762eM9E/rRTvY/ZGAHWMZrYUq66VQBYMIYDFpDdJAOGSLyIPHZ2IQ=="
|
||||
crossorigin="anonymous"/>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/tailwindcss/2.0.3/tailwind-dark.min.css"
|
||||
integrity="sha512-WvyKyiVHgInX5UQt67447ExtRRZG/8GUijaq1MpqTNYp8wY4/EJOG5bI80sRp/5crDy4Z6bBUydZI2OFV3Vbtg=="
|
||||
crossorigin="anonymous"/>
|
||||
<script src="https://code.iconify.design/1/1.0.4/iconify.min.js"></script>
|
||||
<link rel="stylesheet" href="public/themeToggle.css">
|
||||
<style>
|
||||
a {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.loading {
|
||||
height: 35px;
|
||||
fill: black;
|
||||
display: none;
|
||||
}
|
||||
|
||||
.connected .loading {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
.dark .loading {
|
||||
fill: white;
|
||||
}
|
||||
|
||||
.sub {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.sub.active {
|
||||
display: inherit;
|
||||
}
|
||||
|
||||
/*https://stackoverflow.com/a/48386400/1469797*/
|
||||
.stats {
|
||||
display: grid;
|
||||
grid-template-columns: max-content auto;
|
||||
grid-gap: 5px;
|
||||
}
|
||||
|
||||
.stats label {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.stats label:after {
|
||||
content: ":";
|
||||
}
|
||||
|
||||
.has-tooltip {
|
||||
/*position: relative;*/
|
||||
}
|
||||
|
||||
.tooltip {
|
||||
transition-delay: 0.5s;
|
||||
transition-property: visibility;
|
||||
visibility: hidden;
|
||||
position: absolute;
|
||||
/*right: 0;*/
|
||||
margin-top:-35px;
|
||||
}
|
||||
|
||||
.has-tooltip:hover .tooltip {
|
||||
visibility: visible;
|
||||
transition-delay: 0.2s;
|
||||
transition-property: visibility;
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
.pointer {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.botStats.hidden {
|
||||
display: none;
|
||||
}
|
||||
</style>
|
||||
<title><%= title !== undefined ? title : `RCB for /u/${botName}`%></title>
|
||||
<!--<title><%# `RCB for /u/${botName}`%></title>-->
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1.0">
|
||||
<!--icons from https://heroicons.com -->
|
||||
</head>
|
||||
@@ -1,13 +0,0 @@
|
||||
<div class="space-x-4 p-6 md:px-10 md:py-6 leading-6 font-semibold bg-gray-800 text-white">
|
||||
<div class="container mx-auto">
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center flex-grow pr-4">
|
||||
<div class="px-4 width-full relative">
|
||||
<div><a href="https://github.com/FoxxMD/reddit-context-bot">RCB</a> <%= title %></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex items-center flex-end text-sm">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -5,23 +5,26 @@ import {CommentCheck} from "../Check/CommentCheck";
|
||||
import {
|
||||
cacheStats,
|
||||
createRetryHandler,
|
||||
determineNewResults, formatNumber,
|
||||
mergeArr, parseFromJsonOrYamlToObject, pollingInfo, sleep, totalFromMapStats,
|
||||
determineNewResults, findLastIndex, formatNumber,
|
||||
mergeArr, parseFromJsonOrYamlToObject, pollingInfo, resultsSummary, sleep, totalFromMapStats, triggeredIndicator,
|
||||
} from "../util";
|
||||
import {Poll} from "snoostorm";
|
||||
import pEvent from "p-event";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {ConfigBuilder, buildPollingOptions} from "../ConfigBuilder";
|
||||
import {
|
||||
ActionedEvent,
|
||||
ActionResult,
|
||||
DEFAULT_POLLING_INTERVAL,
|
||||
DEFAULT_POLLING_LIMIT, Invokee,
|
||||
ManagerOptions, ManagerStateChangeOption, PAUSED,
|
||||
PollingOptionsStrong, RUNNING, RunState, STOPPED, SYSTEM, USER
|
||||
PollingOptionsStrong, ResourceStats, RUNNING, RunState, STOPPED, SYSTEM, USER
|
||||
} from "../Common/interfaces";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {activityIsRemoved, itemContentPeek} from "../Utils/SnoowrapUtils";
|
||||
import LoggedError from "../Utils/LoggedError";
|
||||
import ResourceManager, {
|
||||
import {
|
||||
BotResourcesManager,
|
||||
SubredditResourceConfig,
|
||||
SubredditResources,
|
||||
SubredditResourceSetOptions
|
||||
@@ -35,6 +38,7 @@ import {queue, QueueObject} from 'async';
|
||||
import {JSONConfig} from "../JsonConfig";
|
||||
import {CheckStructuredJson} from "../Check";
|
||||
import NotificationManager from "../Notification/NotificationManager";
|
||||
import action from "../Web/Server/routes/authenticated/user/action";
|
||||
|
||||
export interface RunningState {
|
||||
state: RunState,
|
||||
@@ -45,6 +49,7 @@ export interface runCheckOptions {
|
||||
checkNames?: string[],
|
||||
delayUntil?: number,
|
||||
dryRun?: boolean,
|
||||
refresh?: boolean,
|
||||
}
|
||||
|
||||
export interface CheckTask {
|
||||
@@ -55,17 +60,60 @@ export interface CheckTask {
|
||||
|
||||
export interface RuntimeManagerOptions extends ManagerOptions {
|
||||
sharedModqueue?: boolean;
|
||||
wikiLocation?: string;
|
||||
botName: string;
|
||||
maxWorkers: number;
|
||||
}
|
||||
|
||||
export interface ManagerStats {
|
||||
eventsCheckedTotal: number
|
||||
eventsCheckedSinceStartTotal: number
|
||||
eventsAvg: number
|
||||
checksRunTotal: number
|
||||
checksRunSinceStartTotal: number
|
||||
checksTriggered: number
|
||||
checksTriggeredTotal: number
|
||||
checksTriggeredSinceStart: number
|
||||
checksTriggeredSinceStartTotal: number
|
||||
rulesRunTotal: number
|
||||
rulesRunSinceStartTotal: number
|
||||
rulesCachedTotal: number
|
||||
rulesCachedSinceStartTotal: number
|
||||
rulesTriggeredTotal: number
|
||||
rulesTriggeredSinceStartTotal: number
|
||||
rulesAvg: number
|
||||
actionsRun: number
|
||||
actionsRunTotal: number
|
||||
actionsRunSinceStart: number,
|
||||
actionsRunSinceStartTotal: number
|
||||
cache: {
|
||||
provider: string,
|
||||
currentKeyCount: number,
|
||||
isShared: boolean,
|
||||
totalRequests: number,
|
||||
totalMiss: number,
|
||||
missPercent: string,
|
||||
requestRate: number,
|
||||
types: ResourceStats
|
||||
},
|
||||
}
|
||||
|
||||
interface QueuedIdentifier {
|
||||
id: string,
|
||||
shouldRefresh: boolean
|
||||
state: 'queued' | 'processing'
|
||||
}
|
||||
|
||||
export class Manager {
|
||||
subreddit: Subreddit;
|
||||
client: Snoowrap;
|
||||
logger: Logger;
|
||||
botName: string;
|
||||
pollOptions: PollingOptionsStrong[] = [];
|
||||
submissionChecks!: SubmissionCheck[];
|
||||
commentChecks!: CommentCheck[];
|
||||
resources!: SubredditResources;
|
||||
wikiLocation: string = 'botconfig/contextbot';
|
||||
wikiLocation: string;
|
||||
lastWikiRevision?: DayjsObj
|
||||
lastWikiCheck: DayjsObj = dayjs();
|
||||
//wikiUpdateRunning: boolean = false;
|
||||
@@ -75,9 +123,21 @@ export class Manager {
|
||||
modStreamCallbacks: Map<string, any> = new Map();
|
||||
dryRun?: boolean;
|
||||
sharedModqueue: boolean;
|
||||
cacheManager: BotResourcesManager;
|
||||
globalDryRun?: boolean;
|
||||
emitter: EventEmitter = new EventEmitter();
|
||||
queue: QueueObject<CheckTask>;
|
||||
// firehose is used to ensure all activities from different polling streams are unique
|
||||
// that is -- if the same activities is in both modqueue and unmoderated we don't want to process the activity twice or use stale data
|
||||
//
|
||||
// so all activities get queued to firehose, it keeps track of items by id (using queuedItemsMeta)
|
||||
// and ensures that if any activities are ingested while they are ALSO currently queued or working then they are properly handled by either
|
||||
// 1) if queued, do not re-queue but instead tell worker to refresh before processing
|
||||
// 2) if currently processing then re-queue but also refresh before processing
|
||||
firehose: QueueObject<CheckTask>;
|
||||
queuedItemsMeta: QueuedIdentifier[] = [];
|
||||
globalMaxWorkers: number;
|
||||
subMaxWorkers?: number;
|
||||
|
||||
displayLabel: string;
|
||||
currentLabels: string[] = [];
|
||||
@@ -124,8 +184,9 @@ export class Manager {
|
||||
rulesUniqueRollingAvg: number = 0;
|
||||
actionsRun: Map<string, number> = new Map();
|
||||
actionsRunSinceStart: Map<string, number> = new Map();
|
||||
actionedEvents: ActionedEvent[] = [];
|
||||
|
||||
getStats = async () => {
|
||||
getStats = async (): Promise<ManagerStats> => {
|
||||
const data: any = {
|
||||
eventsCheckedTotal: this.eventsCheckedTotal,
|
||||
eventsCheckedSinceStartTotal: this.eventsCheckedSinceStartTotal,
|
||||
@@ -152,13 +213,15 @@ export class Manager {
|
||||
currentKeyCount: 0,
|
||||
isShared: false,
|
||||
totalRequests: 0,
|
||||
totalMiss: 0,
|
||||
missPercent: '0%',
|
||||
requestRate: 0,
|
||||
types: cacheStats()
|
||||
},
|
||||
};
|
||||
|
||||
if (this.resources !== undefined) {
|
||||
const resStats = this.resources.getStats();
|
||||
const resStats = await this.resources.getStats();
|
||||
|
||||
data.cache = resStats.cache;
|
||||
data.cache.currentKeyCount = await this.resources.getCacheKeyCount();
|
||||
@@ -176,8 +239,8 @@ export class Manager {
|
||||
return this.displayLabel;
|
||||
}
|
||||
|
||||
constructor(sub: Subreddit, client: Snoowrap, logger: Logger, opts: RuntimeManagerOptions = {}) {
|
||||
const {dryRun, sharedModqueue = false} = opts;
|
||||
constructor(sub: Subreddit, client: Snoowrap, logger: Logger, cacheManager: BotResourcesManager, opts: RuntimeManagerOptions = {botName: 'ContextMod', maxWorkers: 1}) {
|
||||
const {dryRun, sharedModqueue = false, wikiLocation = 'botconfig/contextbot', botName, maxWorkers} = opts;
|
||||
this.displayLabel = opts.nickname || `${sub.display_name_prefixed}`;
|
||||
const getLabels = this.getCurrentLabels;
|
||||
const getDisplay = this.getDisplay;
|
||||
@@ -192,28 +255,18 @@ export class Manager {
|
||||
}
|
||||
}, mergeArr);
|
||||
this.globalDryRun = dryRun;
|
||||
this.wikiLocation = wikiLocation;
|
||||
this.sharedModqueue = sharedModqueue;
|
||||
this.subreddit = sub;
|
||||
this.client = client;
|
||||
this.notificationManager = new NotificationManager(this.logger, this.subreddit, this.displayLabel);
|
||||
this.botName = botName;
|
||||
this.globalMaxWorkers = maxWorkers;
|
||||
this.notificationManager = new NotificationManager(this.logger, this.subreddit, this.displayLabel, botName);
|
||||
this.cacheManager = cacheManager;
|
||||
|
||||
this.queue = queue(async (task: CheckTask, cb) => {
|
||||
if(this.delayBy !== undefined) {
|
||||
this.logger.debug(`SOFT API LIMIT MODE: Delaying Event run by ${this.delayBy} seconds`);
|
||||
await sleep(this.delayBy * 1000);
|
||||
}
|
||||
await this.runChecks(task.checkType, task.activity, task.options);
|
||||
}
|
||||
// TODO allow concurrency??
|
||||
, 1);
|
||||
this.queue.error((err, task) => {
|
||||
this.logger.error('Encountered unhandled error while processing Activity, processing stopped early');
|
||||
this.logger.error(err);
|
||||
});
|
||||
this.queue.drain(() => {
|
||||
this.logger.debug('All queued activities have been processed.');
|
||||
});
|
||||
this.queue = this.generateQueue(this.getMaxWorkers(this.globalMaxWorkers));
|
||||
this.queue.pause();
|
||||
this.firehose = this.generateFirehose();
|
||||
|
||||
this.eventsSampleInterval = setInterval((function(self) {
|
||||
return function() {
|
||||
@@ -256,6 +309,84 @@ export class Manager {
|
||||
})(this), 10000);
|
||||
}
|
||||
|
||||
protected getMaxWorkers(subMaxWorkers?: number) {
|
||||
let maxWorkers = this.globalMaxWorkers;
|
||||
|
||||
if (subMaxWorkers !== undefined) {
|
||||
if (subMaxWorkers > maxWorkers) {
|
||||
this.logger.warn(`Config specified ${subMaxWorkers} max queue workers but global max is set to ${this.globalMaxWorkers} -- will use global max`);
|
||||
} else {
|
||||
maxWorkers = subMaxWorkers;
|
||||
}
|
||||
}
|
||||
if (maxWorkers < 1) {
|
||||
this.logger.warn(`Max queue workers must be greater than or equal to 1, specified: ${maxWorkers}. Will use 1.`);
|
||||
maxWorkers = 1;
|
||||
}
|
||||
|
||||
return maxWorkers;
|
||||
}
|
||||
|
||||
protected generateFirehose() {
|
||||
return queue(async (task: CheckTask, cb) => {
|
||||
// items in queuedItemsMeta will be processing FIFO so earlier elements (by index) are older
|
||||
//
|
||||
// if we insert the same item again because it is currently being processed AND THEN we get the item AGAIN we only want to update the newest meta
|
||||
// so search the array backwards to get the neweset only
|
||||
const queuedItemIndex = findLastIndex(this.queuedItemsMeta, x => x.id === task.activity.id);
|
||||
if(queuedItemIndex !== -1) {
|
||||
const itemMeta = this.queuedItemsMeta[queuedItemIndex];
|
||||
let msg = `Item ${itemMeta.id} is already ${itemMeta.state}.`;
|
||||
if(itemMeta.state === 'queued') {
|
||||
this.logger.debug(`${msg} Flagging to refresh data before processing.`);
|
||||
this.queuedItemsMeta.splice(queuedItemIndex, 1, {...itemMeta, shouldRefresh: true});
|
||||
} else {
|
||||
this.logger.debug(`${msg} Re-queuing item but will also refresh data before processing.`);
|
||||
this.queuedItemsMeta.push({id: task.activity.id, shouldRefresh: true, state: 'queued'});
|
||||
this.queue.push(task);
|
||||
}
|
||||
} else {
|
||||
this.queuedItemsMeta.push({id: task.activity.id, shouldRefresh: false, state: 'queued'});
|
||||
this.queue.push(task);
|
||||
}
|
||||
}
|
||||
, 1);
|
||||
}
|
||||
|
||||
protected generateQueue(maxWorkers: number) {
|
||||
if (maxWorkers > 1) {
|
||||
this.logger.warn(`Setting max queue workers above 1 (specified: ${maxWorkers}) may have detrimental effects to log readability and api usage. Consult the documentation before using this advanced/experimental feature.`);
|
||||
}
|
||||
|
||||
const q = queue(async (task: CheckTask, cb) => {
|
||||
if (this.delayBy !== undefined) {
|
||||
this.logger.debug(`SOFT API LIMIT MODE: Delaying Event run by ${this.delayBy} seconds`);
|
||||
await sleep(this.delayBy * 1000);
|
||||
}
|
||||
|
||||
const queuedItemIndex = this.queuedItemsMeta.findIndex(x => x.id === task.activity.id);
|
||||
try {
|
||||
const itemMeta = this.queuedItemsMeta[queuedItemIndex];
|
||||
this.queuedItemsMeta.splice(queuedItemIndex, 1, {...itemMeta, state: 'processing'});
|
||||
await this.runChecks(task.checkType, task.activity, {...task.options, refresh: itemMeta.shouldRefresh});
|
||||
} finally {
|
||||
// always remove item meta regardless of success or failure since we are done with it meow
|
||||
this.queuedItemsMeta.splice(queuedItemIndex, 1);
|
||||
}
|
||||
}
|
||||
, maxWorkers);
|
||||
q.error((err, task) => {
|
||||
this.logger.error('Encountered unhandled error while processing Activity, processing stopped early');
|
||||
this.logger.error(err);
|
||||
});
|
||||
q.drain(() => {
|
||||
this.logger.debug('All queued activities have been processed.');
|
||||
});
|
||||
|
||||
this.logger.info(`Generated new Queue with ${maxWorkers} max workers`);
|
||||
return q;
|
||||
}
|
||||
|
||||
protected parseConfigurationFromObject(configObj: object) {
|
||||
try {
|
||||
const configBuilder = new ConfigBuilder({logger: this.logger});
|
||||
@@ -268,6 +399,9 @@ export class Manager {
|
||||
footer,
|
||||
nickname,
|
||||
notifications,
|
||||
queue: {
|
||||
maxWorkers = undefined,
|
||||
} = {},
|
||||
} = configManagerOpts || {};
|
||||
this.pollOptions = buildPollingOptions(polling);
|
||||
this.dryRun = this.globalDryRun || dryRun;
|
||||
@@ -278,12 +412,19 @@ export class Manager {
|
||||
this.resources.footer = footer;
|
||||
}
|
||||
|
||||
this.subMaxWorkers = maxWorkers;
|
||||
const realMax = this.getMaxWorkers(this.subMaxWorkers);
|
||||
if(realMax !== this.queue.concurrency) {
|
||||
this.queue = this.generateQueue(realMax);
|
||||
this.queue.pause();
|
||||
}
|
||||
|
||||
this.logger.info(`Dry Run: ${this.dryRun === true}`);
|
||||
for (const p of this.pollOptions) {
|
||||
this.logger.info(`Polling Info => ${pollingInfo(p)}`)
|
||||
}
|
||||
|
||||
this.notificationManager = new NotificationManager(this.logger, this.subreddit, this.displayLabel, notifications);
|
||||
this.notificationManager = new NotificationManager(this.logger, this.subreddit, this.displayLabel, this.botName, notifications);
|
||||
const {events, notifiers} = this.notificationManager.getStats();
|
||||
const notifierContent = notifiers.length === 0 ? 'None' : notifiers.join(', ');
|
||||
const eventContent = events.length === 0 ? 'None' : events.join(', ');
|
||||
@@ -293,9 +434,11 @@ export class Manager {
|
||||
footer,
|
||||
logger: this.logger,
|
||||
subreddit: this.subreddit,
|
||||
caching
|
||||
caching,
|
||||
client: this.client,
|
||||
};
|
||||
this.resources = ResourceManager.set(this.subreddit.display_name, resourceConfig);
|
||||
this.resources = this.cacheManager.set(this.subreddit.display_name, resourceConfig);
|
||||
this.resources.setLogger(this.logger);
|
||||
|
||||
this.logger.info('Subreddit-specific options updated');
|
||||
this.logger.info('Building Checks...');
|
||||
@@ -308,7 +451,9 @@ export class Manager {
|
||||
...jCheck,
|
||||
dryRun: this.dryRun || jCheck.dryRun,
|
||||
logger: this.logger,
|
||||
subredditName: this.subreddit.display_name
|
||||
subredditName: this.subreddit.display_name,
|
||||
resources: this.resources,
|
||||
client: this.client,
|
||||
};
|
||||
if (jCheck.kind === 'comment') {
|
||||
commentChecks.push(new CommentCheck(checkConfig));
|
||||
@@ -423,8 +568,11 @@ export class Manager {
|
||||
checkNames = [],
|
||||
delayUntil,
|
||||
dryRun,
|
||||
refresh = false,
|
||||
} = options || {};
|
||||
|
||||
let wasRefreshed = false;
|
||||
|
||||
if (delayUntil !== undefined) {
|
||||
const created = dayjs.unix(item.created_utc);
|
||||
const diff = dayjs().diff(created, 's');
|
||||
@@ -433,8 +581,16 @@ export class Manager {
|
||||
await sleep(delayUntil - diff);
|
||||
// @ts-ignore
|
||||
item = await activity.refresh();
|
||||
wasRefreshed = true;
|
||||
}
|
||||
}
|
||||
// refresh signal from firehose if activity was ingested multiple times before processing or re-queued while processing
|
||||
// want to make sure we have the most recent data
|
||||
if(!wasRefreshed && refresh === true) {
|
||||
this.logger.verbose('Refreshed data (probably due to signal from firehose)');
|
||||
// @ts-ignore
|
||||
item = await activity.refresh();
|
||||
}
|
||||
|
||||
const startingApiLimit = this.client.ratelimitRemaining;
|
||||
|
||||
@@ -451,24 +607,50 @@ export class Manager {
|
||||
let checksRun = 0;
|
||||
let actionsRun = 0;
|
||||
let totalRulesRun = 0;
|
||||
let runActions: Action[] = [];
|
||||
let runActions: ActionResult[] = [];
|
||||
let actionedEvent: ActionedEvent = {
|
||||
subreddit: this.subreddit.display_name_prefixed,
|
||||
activity: {
|
||||
peek: ePeek,
|
||||
link: item.permalink
|
||||
},
|
||||
author: item.author.name,
|
||||
timestamp: Date.now(),
|
||||
check: '',
|
||||
ruleSummary: '',
|
||||
ruleResults: [],
|
||||
actionResults: [],
|
||||
}
|
||||
let triggered = false;
|
||||
|
||||
try {
|
||||
let triggered = false;
|
||||
for (const check of checks) {
|
||||
if (checkNames.length > 0 && !checkNames.map(x => x.toLowerCase()).some(x => x === check.name.toLowerCase())) {
|
||||
this.logger.warn(`Check ${check.name} not in array of requested checks to run, skipping`);
|
||||
this.logger.warn(`Check ${check.name} not in array of requested checks to run, skipping...`);
|
||||
continue;
|
||||
}
|
||||
if(!check.enabled) {
|
||||
this.logger.info(`Check ${check.name} not run because it is not enabled, skipping...`);
|
||||
continue;
|
||||
}
|
||||
checksRun++;
|
||||
triggered = false;
|
||||
let isFromCache = false;
|
||||
let currentResults: RuleResult[] = [];
|
||||
try {
|
||||
const [checkTriggered, checkResults] = await check.runRules(item, allRuleResults);
|
||||
const [checkTriggered, checkResults, fromCache = false] = await check.runRules(item, allRuleResults);
|
||||
isFromCache = fromCache;
|
||||
if(!fromCache) {
|
||||
await check.setCacheResult(item, {result: checkTriggered, ruleResults: checkResults});
|
||||
}
|
||||
currentResults = checkResults;
|
||||
totalRulesRun += checkResults.length;
|
||||
allRuleResults = allRuleResults.concat(determineNewResults(allRuleResults, checkResults));
|
||||
triggered = checkTriggered;
|
||||
if(triggered && fromCache && !check.cacheUserResult.runActions) {
|
||||
this.logger.info('Check was triggered but cache result options specified NOT to run actions...counting as check NOT triggered');
|
||||
triggered = false;
|
||||
}
|
||||
} catch (e) {
|
||||
if (e.logged !== true) {
|
||||
this.logger.warn(`Running rules for Check ${check.name} failed due to uncaught exception`, e);
|
||||
@@ -476,14 +658,21 @@ export class Manager {
|
||||
}
|
||||
|
||||
if (triggered) {
|
||||
actionedEvent.check = check.name;
|
||||
actionedEvent.ruleResults = currentResults;
|
||||
if(isFromCache) {
|
||||
actionedEvent.ruleSummary = `Check result was found in cache: ${triggeredIndicator(true)}`;
|
||||
} else {
|
||||
actionedEvent.ruleSummary = resultsSummary(currentResults, check.condition);
|
||||
}
|
||||
this.checksTriggered.set(check.name, (this.checksTriggered.get(check.name) || 0) + 1);
|
||||
this.checksTriggeredSinceStart.set(check.name, (this.checksTriggeredSinceStart.get(check.name) || 0) + 1);
|
||||
runActions = await check.runActions(item, currentResults.filter(x => x.triggered), dryRun);
|
||||
actionsRun = runActions.length;
|
||||
|
||||
if(check.notifyOnTrigger) {
|
||||
const ar = runActions.map(x => x.getActionUniqueName()).join(', ');
|
||||
this.notificationManager.handle('eventActioned', 'Check Triggered', `Check "${check.name}" was triggered on Event: \n ${ePeek} \n\n with the following actions run: ${ar}`);
|
||||
const ar = runActions.map(x => x.name).join(', ');
|
||||
this.notificationManager.handle('eventActioned', 'Check Triggered', `Check "${check.name}" was triggered on Event: \n\n ${ePeek} \n\n with the following actions run: ${ar}`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
@@ -512,9 +701,13 @@ export class Manager {
|
||||
this.rulesTriggeredSinceStartTotal += triggeredRulesTotal;
|
||||
|
||||
for (const a of runActions) {
|
||||
const name = a.getActionUniqueName();
|
||||
const name = a.name;
|
||||
this.actionsRun.set(name, (this.actionsRun.get(name) || 0) + 1);
|
||||
this.actionsRunSinceStart.set(name, (this.actionsRunSinceStart.get(name) || 0) + 1)
|
||||
this.actionsRunSinceStart.set(name, (this.actionsRunSinceStart.get(name) || 0) + 1);
|
||||
}
|
||||
actionedEvent.actionResults = runActions;
|
||||
if(triggered) {
|
||||
await this.resources.addActionedEvent(actionedEvent);
|
||||
}
|
||||
|
||||
this.logger.verbose(`Run Stats: Checks ${checksRun} | Rules => Total: ${totalRulesRun} Unique: ${allRuleResults.length} Cached: ${totalRulesRun - allRuleResults.length} Rolling Avg: ~${formatNumber(this.rulesUniqueRollingAvg)}/s | Actions ${actionsRun}`);
|
||||
@@ -549,7 +742,7 @@ export class Manager {
|
||||
if (limit === DEFAULT_POLLING_LIMIT && interval === DEFAULT_POLLING_INTERVAL && this.sharedModqueue) {
|
||||
modStreamType = 'unmoderated';
|
||||
// use default mod stream from resources
|
||||
stream = ResourceManager.modStreams.get('unmoderated') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
|
||||
stream = this.cacheManager.modStreams.get('unmoderated') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
|
||||
} else {
|
||||
stream = new UnmoderatedStream(this.client, {
|
||||
subreddit: this.subreddit.display_name,
|
||||
@@ -562,7 +755,7 @@ export class Manager {
|
||||
if (limit === DEFAULT_POLLING_LIMIT && interval === DEFAULT_POLLING_INTERVAL) {
|
||||
modStreamType = 'modqueue';
|
||||
// use default mod stream from resources
|
||||
stream = ResourceManager.modStreams.get('modqueue') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
|
||||
stream = this.cacheManager.modStreams.get('modqueue') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
|
||||
} else {
|
||||
stream = new ModQueueStream(this.client, {
|
||||
subreddit: this.subreddit.display_name,
|
||||
@@ -616,7 +809,7 @@ export class Manager {
|
||||
checkType = 'Comment';
|
||||
}
|
||||
if (checkType !== undefined) {
|
||||
this.queue.push({checkType, activity: item, options: {delayUntil}})
|
||||
this.firehose.push({checkType, activity: item, options: {delayUntil}})
|
||||
}
|
||||
};
|
||||
|
||||
@@ -642,29 +835,6 @@ export class Manager {
|
||||
}
|
||||
}
|
||||
|
||||
async handle(): Promise<void> {
|
||||
if (this.submissionChecks.length === 0 && this.commentChecks.length === 0) {
|
||||
this.logger.warn('No submission or comment checks to run! Bot will not run.');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
for (const s of this.streams) {
|
||||
s.startInterval();
|
||||
}
|
||||
this.startedAt = dayjs();
|
||||
this.running = true;
|
||||
this.manuallyStopped = false;
|
||||
this.logger.info('Bot Running');
|
||||
|
||||
await pEvent(this.emitter, 'end');
|
||||
} catch (err) {
|
||||
this.logger.error('Too many request errors occurred or an unhandled error was encountered, manager is stopping');
|
||||
} finally {
|
||||
this.stop();
|
||||
}
|
||||
}
|
||||
|
||||
startQueue(causedBy: Invokee = 'system', options?: ManagerStateChangeOption) {
|
||||
const {reason, suppressNotification = false} = options || {};
|
||||
if(this.queueState.state === RUNNING) {
|
||||
@@ -672,14 +842,19 @@ export class Manager {
|
||||
} else if (!this.validConfigLoaded) {
|
||||
this.logger.warn('Cannot start activity processing queue while manager has an invalid configuration');
|
||||
} else {
|
||||
if(this.queueState.state === STOPPED) {
|
||||
// extra precaution to make sure queue meta is cleared before starting queue
|
||||
this.queuedItemsMeta = [];
|
||||
}
|
||||
this.queue.resume();
|
||||
this.firehose.resume();
|
||||
this.logger.info(`Activity processing queue started RUNNING with ${this.queue.length()} queued activities`);
|
||||
this.queueState = {
|
||||
state: RUNNING,
|
||||
causedBy
|
||||
}
|
||||
if(!suppressNotification) {
|
||||
this.notificationManager.handle('runStateChanged', 'Queue Started', reason, causedBy)
|
||||
this.notificationManager.handle('runStateChanged', 'Queue Started', reason, causedBy);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -741,7 +916,9 @@ export class Manager {
|
||||
this.logger.verbose(`Activity processing queue is stopping...waiting for ${this.queue.running()} activities to finish processing`);
|
||||
}
|
||||
this.logger.info(`Activity processing queue stopped by ${causedBy} and ${this.queue.length()} queued activities cleared (waited ${dayjs().diff(pauseWaitStart, 's')} seconds while activity processing finished)`);
|
||||
this.firehose.kill();
|
||||
this.queue.kill();
|
||||
this.queuedItemsMeta = [];
|
||||
}
|
||||
|
||||
this.queueState = {
|
||||
@@ -821,7 +998,7 @@ export class Manager {
|
||||
}
|
||||
this.streams = [];
|
||||
for (const [k, v] of this.modStreamCallbacks) {
|
||||
const stream = ResourceManager.modStreams.get(k) as Poll<Snoowrap.Submission | Snoowrap.Comment>;
|
||||
const stream = this.cacheManager.modStreams.get(k) as Poll<Snoowrap.Submission | Snoowrap.Comment>;
|
||||
stream.removeListener('item', v);
|
||||
}
|
||||
this.startedAt = undefined;
|
||||
|
||||
@@ -1,27 +1,30 @@
|
||||
import Snoowrap, {RedditUser, Comment, Submission} from "snoowrap";
|
||||
import Snoowrap, {RedditUser, Subreddit} from "snoowrap";
|
||||
import objectHash from 'object-hash';
|
||||
import {
|
||||
activityIsDeleted, activityIsFiltered,
|
||||
activityIsRemoved,
|
||||
AuthorActivitiesOptions,
|
||||
AuthorTypedActivitiesOptions, BOT_LINK,
|
||||
getAuthorActivities,
|
||||
testAuthorCriteria
|
||||
} from "../Utils/SnoowrapUtils";
|
||||
import Subreddit from 'snoowrap/dist/objects/Subreddit';
|
||||
import winston, {Logger} from "winston";
|
||||
import fetch from 'node-fetch';
|
||||
import {
|
||||
buildCacheOptionsFromProvider,
|
||||
cacheStats, createCacheManager,
|
||||
formatNumber,
|
||||
asSubmission,
|
||||
buildCacheOptionsFromProvider, buildCachePrefix,
|
||||
cacheStats, comparisonTextOp, createCacheManager,
|
||||
formatNumber, getActivityAuthorName, getActivitySubredditName, isStrongSubredditState,
|
||||
mergeArr,
|
||||
parseExternalUrl,
|
||||
parseWikiContext
|
||||
parseExternalUrl, parseGenericValueComparison,
|
||||
parseWikiContext, toStrongSubredditState
|
||||
} from "../util";
|
||||
import LoggedError from "../Utils/LoggedError";
|
||||
import {
|
||||
CacheOptions,
|
||||
Footer, OperatorConfig, ResourceStats,
|
||||
SubredditCacheConfig, TTLConfig
|
||||
BotInstanceConfig,
|
||||
CacheOptions, CommentState,
|
||||
Footer, OperatorConfig, ResourceStats, StrongCache, SubmissionState,
|
||||
CacheConfig, TTLConfig, TypedActivityStates, UserResultCache, ActionedEvent, SubredditState, StrongSubredditState
|
||||
} from "../Common/interfaces";
|
||||
import UserNotes from "./UserNotes";
|
||||
import Mustache from "mustache";
|
||||
@@ -29,40 +32,55 @@ import he from "he";
|
||||
import {AuthorCriteria} from "../Author/Author";
|
||||
import {SPoll} from "./Streams";
|
||||
import {Cache} from 'cache-manager';
|
||||
import {Submission, Comment} from "snoowrap/dist/objects";
|
||||
import {cacheTTLDefaults} from "../Common/defaults";
|
||||
import {check} from "tcp-port-used";
|
||||
|
||||
export const DEFAULT_FOOTER = '\r\n*****\r\nThis action was performed by [a bot.]({{botLink}}) Mention a moderator or [send a modmail]({{modmailLink}}) if you any ideas, questions, or concerns about this action.';
|
||||
|
||||
export interface SubredditResourceConfig extends Footer {
|
||||
caching?: SubredditCacheConfig,
|
||||
caching?: CacheConfig,
|
||||
subreddit: Subreddit,
|
||||
logger: Logger;
|
||||
client: Snoowrap
|
||||
}
|
||||
|
||||
interface SubredditResourceOptions extends Footer {
|
||||
ttl: Required<TTLConfig>
|
||||
cache?: Cache
|
||||
cache: Cache
|
||||
cacheType: string;
|
||||
cacheSettingsHash: string
|
||||
subreddit: Subreddit,
|
||||
logger: Logger;
|
||||
client: Snoowrap;
|
||||
prefix?: string;
|
||||
actionedEventsMax: number;
|
||||
}
|
||||
|
||||
export interface SubredditResourceSetOptions extends SubredditCacheConfig, Footer {
|
||||
export interface SubredditResourceSetOptions extends CacheConfig, Footer {
|
||||
}
|
||||
|
||||
export class SubredditResources {
|
||||
//enabled!: boolean;
|
||||
protected authorTTL!: number;
|
||||
protected useSubredditAuthorCache!: boolean;
|
||||
protected wikiTTL!: number;
|
||||
protected authorTTL: number | false = cacheTTLDefaults.authorTTL;
|
||||
protected subredditTTL: number | false = cacheTTLDefaults.subredditTTL;
|
||||
protected wikiTTL: number | false = cacheTTLDefaults.wikiTTL;
|
||||
protected submissionTTL: number | false = cacheTTLDefaults.submissionTTL;
|
||||
protected commentTTL: number | false = cacheTTLDefaults.commentTTL;
|
||||
protected filterCriteriaTTL: number | false = cacheTTLDefaults.filterCriteriaTTL;
|
||||
name: string;
|
||||
protected logger: Logger;
|
||||
userNotes: UserNotes;
|
||||
footer: false | string = DEFAULT_FOOTER;
|
||||
subreddit: Subreddit
|
||||
cache?: Cache
|
||||
client: Snoowrap
|
||||
cache: Cache
|
||||
cacheType: string
|
||||
cacheSettingsHash?: string;
|
||||
pruneInterval?: any;
|
||||
prefix?: string
|
||||
actionedEventsMax: number;
|
||||
|
||||
stats: { cache: ResourceStats };
|
||||
|
||||
@@ -74,21 +92,35 @@ export class SubredditResources {
|
||||
userNotesTTL,
|
||||
authorTTL,
|
||||
wikiTTL,
|
||||
filterCriteriaTTL,
|
||||
submissionTTL,
|
||||
commentTTL,
|
||||
subredditTTL,
|
||||
},
|
||||
cache,
|
||||
prefix,
|
||||
cacheType,
|
||||
actionedEventsMax,
|
||||
cacheSettingsHash,
|
||||
client,
|
||||
} = options || {};
|
||||
|
||||
this.cacheSettingsHash = cacheSettingsHash;
|
||||
this.cache = cache;
|
||||
this.prefix = prefix;
|
||||
this.client = client;
|
||||
this.cacheType = cacheType;
|
||||
this.authorTTL = authorTTL;
|
||||
this.wikiTTL = wikiTTL;
|
||||
this.actionedEventsMax = actionedEventsMax;
|
||||
this.authorTTL = authorTTL === true ? 0 : authorTTL;
|
||||
this.submissionTTL = submissionTTL === true ? 0 : submissionTTL;
|
||||
this.commentTTL = commentTTL === true ? 0 : commentTTL;
|
||||
this.subredditTTL = subredditTTL === true ? 0 : subredditTTL;
|
||||
this.wikiTTL = wikiTTL === true ? 0 : wikiTTL;
|
||||
this.filterCriteriaTTL = filterCriteriaTTL === true ? 0 : filterCriteriaTTL;
|
||||
this.subreddit = subreddit;
|
||||
this.name = name;
|
||||
if (logger === undefined) {
|
||||
const alogger = winston.loggers.get('default')
|
||||
const alogger = winston.loggers.get('app')
|
||||
this.logger = alogger.child({labels: [this.name, 'Resource Cache']}, mergeArr);
|
||||
} else {
|
||||
this.logger = logger.child({labels: ['Resource Cache']}, mergeArr);
|
||||
@@ -99,56 +131,213 @@ export class SubredditResources {
|
||||
};
|
||||
|
||||
const cacheUseCB = (miss: boolean) => {
|
||||
this.stats.cache.userNotes.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.userNotes.requests++;
|
||||
this.stats.cache.userNotes.miss += miss ? 1 : 0;
|
||||
}
|
||||
this.userNotes = new UserNotes(userNotesTTL, this.subreddit, this.logger, this.cache, cacheUseCB)
|
||||
|
||||
if(this.cacheType === 'memory' && this.cacheSettingsHash !== 'default') {
|
||||
const min = Math.min(...([this.wikiTTL, this.authorTTL, this.submissionTTL, this.commentTTL, this.filterCriteriaTTL].filter(x => typeof x === 'number' && x !== 0) as number[]));
|
||||
if(min > 0) {
|
||||
// set default prune interval
|
||||
this.pruneInterval = setInterval(() => {
|
||||
// @ts-ignore
|
||||
this.defaultCache?.store.prune();
|
||||
this.logger.debug('Pruned cache');
|
||||
// prune interval should be twice the smallest TTL
|
||||
},min * 1000 * 2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async getCacheKeyCount() {
|
||||
if (this.cache !== undefined && this.cache.store.keys !== undefined) {
|
||||
if (this.cache.store.keys !== undefined) {
|
||||
if(this.cacheType === 'redis') {
|
||||
return (await this.cache.store.keys(`${this.prefix}*`)).length;
|
||||
}
|
||||
return (await this.cache.store.keys()).length;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
getStats() {
|
||||
async getStats() {
|
||||
const totals = Object.values(this.stats.cache).reduce((acc, curr) => ({
|
||||
miss: acc.miss + curr.miss,
|
||||
req: acc.req + curr.requests,
|
||||
}), {miss: 0, req: 0});
|
||||
const cacheKeys = Object.keys(this.stats.cache);
|
||||
return {
|
||||
cache: {
|
||||
// TODO could probably combine these two
|
||||
totalRequests: Object.values(this.stats.cache).reduce((acc, curr) => acc + curr.requests, 0),
|
||||
types: Object.keys(this.stats.cache).reduce((acc, curr) => {
|
||||
totalRequests: totals.req,
|
||||
totalMiss: totals.miss,
|
||||
missPercent: `${formatNumber(totals.miss === 0 || totals.req === 0 ? 0 :(totals.miss/totals.req) * 100, {toFixed: 0})}%`,
|
||||
types: await cacheKeys.reduce(async (accProm, curr) => {
|
||||
const acc = await accProm;
|
||||
// calculate miss percent
|
||||
|
||||
const per = acc[curr].miss === 0 ? 0 : formatNumber(acc[curr].miss / acc[curr].requests) * 100;
|
||||
// @ts-ignore
|
||||
acc[curr].missPercent = `${formatNumber(per, {toFixed: 0})}%`;
|
||||
|
||||
// calculate average identifier hits
|
||||
|
||||
const idCache = acc[curr].identifierRequestCount;
|
||||
// @ts-expect-error
|
||||
const idKeys = await idCache.store.keys() as string[];
|
||||
if(idKeys.length > 0) {
|
||||
let hits = 0;
|
||||
for (const k of idKeys) {
|
||||
hits += await idCache.get(k) as number;
|
||||
}
|
||||
acc[curr].identifierAverageHit = formatNumber(hits/idKeys.length);
|
||||
}
|
||||
|
||||
if(acc[curr].requestTimestamps.length > 1) {
|
||||
// calculate average time between request
|
||||
const diffData = acc[curr].requestTimestamps.reduce((acc, curr: number) => {
|
||||
if(acc.last === 0) {
|
||||
acc.last = curr;
|
||||
return acc;
|
||||
}
|
||||
acc.diffs.push(curr - acc.last);
|
||||
acc.last = curr;
|
||||
return acc;
|
||||
},{last: 0, diffs: [] as number[]});
|
||||
const avgDiff = diffData.diffs.reduce((acc, curr) => acc + curr, 0) / diffData.diffs.length;
|
||||
|
||||
acc[curr].averageTimeBetweenHits = formatNumber(avgDiff/1000);
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, this.stats.cache)
|
||||
}, Promise.resolve(this.stats.cache))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
|
||||
if (this.cache !== undefined && this.authorTTL > 0) {
|
||||
const userName = user.name;
|
||||
const hashObj: any = {...options, userName};
|
||||
if (this.useSubredditAuthorCache) {
|
||||
hashObj.subreddit = this.name;
|
||||
setLogger(logger: Logger) {
|
||||
this.logger = logger.child({labels: ['Resource Cache']}, mergeArr);
|
||||
}
|
||||
|
||||
async getActionedEvents(): Promise<ActionedEvent[]> {
|
||||
return await this.cache.wrap(`actionedEvents-${this.subreddit.display_name}`, () => []);
|
||||
}
|
||||
|
||||
async addActionedEvent(ae: ActionedEvent) {
|
||||
const events = await this.cache.wrap(`actionedEvents-${this.subreddit.display_name}`, () => []) as ActionedEvent[];
|
||||
events.unshift(ae);
|
||||
await this.cache.set(`actionedEvents-${this.subreddit.display_name}`, events.slice(0, this.actionedEventsMax), {ttl: 0});
|
||||
}
|
||||
|
||||
async getActivity(item: Submission | Comment) {
|
||||
try {
|
||||
let hash = '';
|
||||
if (this.submissionTTL !== false && asSubmission(item)) {
|
||||
hash = `sub-${item.name}`;
|
||||
await this.stats.cache.submission.identifierRequestCount.set(hash, (await this.stats.cache.submission.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
this.stats.cache.submission.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.submission.requests++;
|
||||
const cachedSubmission = await this.cache.get(hash);
|
||||
if (cachedSubmission !== undefined && cachedSubmission !== null) {
|
||||
this.logger.debug(`Cache Hit: Submission ${item.name}`);
|
||||
return cachedSubmission;
|
||||
}
|
||||
// @ts-ignore
|
||||
const submission = await item.fetch();
|
||||
this.stats.cache.submission.miss++;
|
||||
await this.cache.set(hash, submission, {ttl: this.submissionTTL});
|
||||
return submission;
|
||||
} else if (this.commentTTL !== false) {
|
||||
hash = `comm-${item.name}`;
|
||||
await this.stats.cache.comment.identifierRequestCount.set(hash, (await this.stats.cache.comment.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
this.stats.cache.comment.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.comment.requests++;
|
||||
const cachedComment = await this.cache.get(hash);
|
||||
if (cachedComment !== undefined && cachedComment !== null) {
|
||||
this.logger.debug(`Cache Hit: Comment ${item.name}`);
|
||||
return cachedComment;
|
||||
}
|
||||
// @ts-ignore
|
||||
const comment = await item.fetch();
|
||||
this.stats.cache.comment.miss++;
|
||||
await this.cache.set(hash, comment, {ttl: this.commentTTL});
|
||||
return comment;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
return await item.fetch();
|
||||
}
|
||||
const hash = objectHash.sha1({...options, userName});
|
||||
} catch (err) {
|
||||
this.logger.error('Error while trying to fetch a cached activity', err);
|
||||
throw err.logged;
|
||||
}
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
async getSubreddit(item: Submission | Comment) {
|
||||
try {
|
||||
let hash = '';
|
||||
if (this.subredditTTL !== false) {
|
||||
hash = `sub-${getActivitySubredditName(item)}`;
|
||||
await this.stats.cache.subreddit.identifierRequestCount.set(hash, (await this.stats.cache.subreddit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
this.stats.cache.subreddit.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.subreddit.requests++;
|
||||
const cachedSubreddit = await this.cache.get(hash);
|
||||
if (cachedSubreddit !== undefined && cachedSubreddit !== null) {
|
||||
this.logger.debug(`Cache Hit: Subreddit ${item.subreddit.display_name}`);
|
||||
// @ts-ignore
|
||||
return cachedSubreddit as Subreddit;
|
||||
}
|
||||
// @ts-ignore
|
||||
const subreddit = await this.client.getSubreddit(getActivitySubredditName(item)).fetch() as Subreddit;
|
||||
this.stats.cache.subreddit.miss++;
|
||||
// @ts-ignore
|
||||
await this.cache.set(hash, subreddit, {ttl: this.subredditTTL});
|
||||
// @ts-ignore
|
||||
return subreddit as Subreddit;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
let subreddit = await this.client.getSubreddit(getActivitySubredditName(item));
|
||||
|
||||
return subreddit as Subreddit;
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error('Error while trying to fetch a cached activity', err);
|
||||
throw err.logged;
|
||||
}
|
||||
}
|
||||
|
||||
async getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
|
||||
const userName = getActivityAuthorName(user);
|
||||
if (this.authorTTL !== false) {
|
||||
const hashObj: any = options;
|
||||
if (this.useSubredditAuthorCache) {
|
||||
hashObj.subreddit = this.subreddit;
|
||||
}
|
||||
const hash = `authorActivities-${userName}-${options.type || 'overview'}-${objectHash.sha1(hashObj)}`;
|
||||
|
||||
this.stats.cache.author.requests++;
|
||||
await this.stats.cache.author.identifierRequestCount.set(userName, (await this.stats.cache.author.identifierRequestCount.wrap(userName, () => 0) as number) + 1);
|
||||
this.stats.cache.author.requestTimestamps.push(Date.now());
|
||||
let miss = false;
|
||||
const cacheVal = await this.cache.wrap(hash, async () => {
|
||||
miss = true;
|
||||
if(typeof user === 'string') {
|
||||
// @ts-ignore
|
||||
user = await this.client.getUser(userName);
|
||||
}
|
||||
return await getAuthorActivities(user, options);
|
||||
}, {ttl: this.authorTTL});
|
||||
if (!miss) {
|
||||
this.logger.debug(`Cache Hit: ${userName} (${options.type || 'overview'})`);
|
||||
this.logger.debug(`Cache Hit: ${userName} (Hash ${hash})`);
|
||||
} else {
|
||||
this.stats.cache.author.miss++;
|
||||
}
|
||||
return cacheVal as Array<Submission | Comment>;
|
||||
}
|
||||
if(typeof user === 'string') {
|
||||
// @ts-ignore
|
||||
user = await this.client.getUser(userName);
|
||||
}
|
||||
return await getAuthorActivities(user, options);
|
||||
}
|
||||
|
||||
@@ -180,12 +369,14 @@ export class SubredditResources {
|
||||
}
|
||||
|
||||
// try to get cached value first
|
||||
let hash = `${subreddit.display_name}-${cacheKey}`;
|
||||
if (this.cache !== undefined && this.wikiTTL > 0) {
|
||||
let hash = `${subreddit.display_name}-content-${cacheKey}`;
|
||||
if (this.wikiTTL !== false) {
|
||||
await this.stats.cache.content.identifierRequestCount.set(cacheKey, (await this.stats.cache.content.identifierRequestCount.wrap(cacheKey, () => 0) as number) + 1);
|
||||
this.stats.cache.content.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.content.requests++;
|
||||
const cachedContent = await this.cache.get(hash);
|
||||
if (cachedContent !== undefined) {
|
||||
this.logger.debug(`Cache Hit: ${cacheKey}`);
|
||||
if (cachedContent !== undefined && cachedContent !== null) {
|
||||
this.logger.debug(`Content Cache Hit: ${cacheKey}`);
|
||||
return cachedContent as string;
|
||||
} else {
|
||||
this.stats.cache.content.miss++;
|
||||
@@ -200,15 +391,21 @@ export class SubredditResources {
|
||||
if (wikiContext.subreddit === undefined || wikiContext.subreddit.toLowerCase() === subreddit.display_name) {
|
||||
sub = subreddit;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
const client = subreddit._r as Snoowrap;
|
||||
sub = client.getSubreddit(wikiContext.subreddit);
|
||||
sub = this.client.getSubreddit(wikiContext.subreddit);
|
||||
}
|
||||
try {
|
||||
// @ts-ignore
|
||||
const wikiPage = sub.getWikiPage(wikiContext.wiki);
|
||||
wikiContent = await wikiPage.content_md;
|
||||
} catch (err) {
|
||||
const msg = `Could not read wiki page. Please ensure the page 'https://reddit.com${sub.display_name_prefixed}wiki/${wikiContext}' exists and is readable`;
|
||||
let msg = `Could not read wiki page for an unknown reason. Please ensure the page 'https://reddit.com${sub.display_name_prefixed}/wiki/${wikiContext.wiki}' exists and is readable`;
|
||||
if(err.statusCode !== undefined) {
|
||||
if(err.statusCode === 404) {
|
||||
msg = `Could not find a wiki page at https://reddit.com${sub.display_name_prefixed}/wiki/${wikiContext.wiki} -- Reddit returned a 404`;
|
||||
} else if(err.statusCode === 403 || err.statusCode === 401) {
|
||||
msg = `Bot either does not have permission visibility permissions for the wiki page at https://reddit.com${sub.display_name_prefixed}wiki/${wikiContext.wiki} (due to subreddit restrictions) or the bot does have have oauth permissions to read wiki pages (operator error). Reddit returned a ${err.statusCode}`;
|
||||
}
|
||||
}
|
||||
this.logger.error(msg, err);
|
||||
throw new LoggedError(msg);
|
||||
}
|
||||
@@ -223,25 +420,73 @@ export class SubredditResources {
|
||||
}
|
||||
}
|
||||
|
||||
if (this.cache !== undefined && this.wikiTTL > 0) {
|
||||
this.cache.set(hash, wikiContent, this.wikiTTL);
|
||||
if (this.wikiTTL !== false) {
|
||||
this.cache.set(hash, wikiContent, {ttl: this.wikiTTL});
|
||||
}
|
||||
|
||||
return wikiContent;
|
||||
}
|
||||
|
||||
async testSubredditCriteria(item: (Comment | Submission), state: SubredditState | StrongSubredditState) {
|
||||
if(Object.keys(state).length === 0) {
|
||||
return true;
|
||||
}
|
||||
// optimize for name-only criteria checks
|
||||
// -- we don't need to store cache results for this since we know subreddit name is always available from item (no request required)
|
||||
const critCount = Object.entries(state).filter(([key, val]) => {
|
||||
return val !== undefined && !['name','stateDescription'].includes(key);
|
||||
}).length;
|
||||
if(critCount === 0) {
|
||||
const subName = getActivitySubredditName(item);
|
||||
return await this.isSubreddit({display_name: subName} as Subreddit, state, this.logger);
|
||||
}
|
||||
|
||||
if (this.filterCriteriaTTL !== false) {
|
||||
try {
|
||||
const hash = `subredditCrit-${getActivitySubredditName(item)}-${objectHash.sha1(state)}`;
|
||||
await this.stats.cache.subredditCrit.identifierRequestCount.set(hash, (await this.stats.cache.subredditCrit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
this.stats.cache.subredditCrit.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.subredditCrit.requests++;
|
||||
const cachedItem = await this.cache.get(hash);
|
||||
if (cachedItem !== undefined && cachedItem !== null) {
|
||||
this.logger.debug(`Cache Hit: Subreddit Check on ${getActivitySubredditName(item)} (Hash ${hash})`);
|
||||
return cachedItem as boolean;
|
||||
}
|
||||
const itemResult = await this.isSubreddit(await this.getSubreddit(item), state, this.logger);
|
||||
this.stats.cache.subredditCrit.miss++;
|
||||
await this.cache.set(hash, itemResult, {ttl: this.filterCriteriaTTL});
|
||||
return itemResult;
|
||||
} catch (err) {
|
||||
if (err.logged !== true) {
|
||||
this.logger.error('Error occurred while testing subreddit criteria', err);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
return await this.isSubreddit(await this.getSubreddit(item), state, this.logger);
|
||||
}
|
||||
|
||||
async testAuthorCriteria(item: (Comment | Submission), authorOpts: AuthorCriteria, include = true) {
|
||||
if (this.cache !== undefined && this.authorTTL > 0) {
|
||||
const hashObj = {itemId: item.id, ...authorOpts, include};
|
||||
const hash = `authorCrit-${objectHash.sha1(hashObj)}`;
|
||||
if (this.filterCriteriaTTL !== false) {
|
||||
// in the criteria check we only actually use the `item` to get the author flair
|
||||
// which will be the same for the entire subreddit
|
||||
//
|
||||
// so we can create a hash only using subreddit-author-criteria
|
||||
// and ignore the actual item
|
||||
const hashObj = {...authorOpts, include};
|
||||
const userName = getActivityAuthorName(item.author);
|
||||
const hash = `authorCrit-${this.subreddit.display_name}-${userName}-${objectHash.sha1(hashObj)}`;
|
||||
await this.stats.cache.authorCrit.identifierRequestCount.set(hash, (await this.stats.cache.authorCrit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
this.stats.cache.authorCrit.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.authorCrit.requests++;
|
||||
let miss = false;
|
||||
const cachedAuthorTest = await this.cache.wrap(hash, async () => {
|
||||
miss = true;
|
||||
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
|
||||
}, {ttl: this.authorTTL});
|
||||
}, {ttl: this.filterCriteriaTTL});
|
||||
if (!miss) {
|
||||
this.logger.debug(`Cache Hit: Author Check on ${item.id}`);
|
||||
this.logger.debug(`Cache Hit: Author Check on ${userName} (Hash ${hash})`);
|
||||
} else {
|
||||
this.stats.cache.authorCrit.miss++;
|
||||
}
|
||||
@@ -251,6 +496,221 @@ export class SubredditResources {
|
||||
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
|
||||
}
|
||||
|
||||
async testItemCriteria(i: (Comment | Submission), activityStates: TypedActivityStates) {
|
||||
if (this.filterCriteriaTTL !== false) {
|
||||
let item = i;
|
||||
let states = activityStates;
|
||||
// optimize for submission only checks on comment item
|
||||
if (item instanceof Comment && states.length === 1 && Object.keys(states[0]).length === 1 && (states[0] as CommentState).submissionState !== undefined) {
|
||||
// @ts-ignore
|
||||
const subProxy = await this.client.getSubmission(await i.link_id);
|
||||
// @ts-ignore
|
||||
item = await this.getActivity(subProxy);
|
||||
states = (states[0] as CommentState).submissionState as SubmissionState[];
|
||||
}
|
||||
try {
|
||||
const hash = `itemCrit-${item.name}-${objectHash.sha1(states)}`;
|
||||
await this.stats.cache.itemCrit.identifierRequestCount.set(hash, (await this.stats.cache.itemCrit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
this.stats.cache.itemCrit.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.itemCrit.requests++;
|
||||
const cachedItem = await this.cache.get(hash);
|
||||
if (cachedItem !== undefined && cachedItem !== null) {
|
||||
this.logger.debug(`Cache Hit: Item Check on ${item.name} (Hash ${hash})`);
|
||||
return cachedItem as boolean;
|
||||
}
|
||||
const itemResult = await this.isItem(item, states, this.logger);
|
||||
this.stats.cache.itemCrit.miss++;
|
||||
await this.cache.set(hash, itemResult, {ttl: this.filterCriteriaTTL});
|
||||
return itemResult;
|
||||
} catch (err) {
|
||||
if (err.logged !== true) {
|
||||
this.logger.error('Error occurred while testing item criteria', err);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
return await this.isItem(i, activityStates, this.logger);
|
||||
}
|
||||
|
||||
async isSubreddit (subreddit: Subreddit, stateCriteria: SubredditState | StrongSubredditState, logger: Logger) {
|
||||
delete stateCriteria.stateDescription;
|
||||
|
||||
if (Object.keys(stateCriteria).length === 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const crit = isStrongSubredditState(stateCriteria) ? stateCriteria : toStrongSubredditState(stateCriteria, {defaultFlags: 'i'});
|
||||
|
||||
const log = logger.child({leaf: 'Subreddit Check'}, mergeArr);
|
||||
|
||||
return await (async () => {
|
||||
for (const k of Object.keys(crit)) {
|
||||
// @ts-ignore
|
||||
if (crit[k] !== undefined) {
|
||||
switch (k) {
|
||||
case 'name':
|
||||
const nameReg = crit[k] as RegExp;
|
||||
if(!nameReg.test(subreddit.display_name)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// @ts-ignore
|
||||
if (crit[k] !== undefined) {
|
||||
// @ts-ignore
|
||||
if (crit[k] !== subreddit[k]) {
|
||||
// @ts-ignore
|
||||
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${subreddit[k]}`)
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
log.warn(`Tried to test for Subreddit property '${k}' but it did not exist`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
log.debug(`Passed: ${JSON.stringify(stateCriteria)}`);
|
||||
return true;
|
||||
})() as boolean;
|
||||
}
|
||||
|
||||
async isItem (item: Submission | Comment, stateCriteria: TypedActivityStates, logger: Logger) {
|
||||
if (stateCriteria.length === 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const log = logger.child({leaf: 'Item Check'}, mergeArr);
|
||||
|
||||
for (const crit of stateCriteria) {
|
||||
const pass = await (async () => {
|
||||
for (const k of Object.keys(crit)) {
|
||||
// @ts-ignore
|
||||
if (crit[k] !== undefined) {
|
||||
switch (k) {
|
||||
case 'submissionState':
|
||||
if(!(item instanceof Comment)) {
|
||||
log.warn('`submissionState` is not allowed in `itemIs` criteria when the main Activity is a Submission');
|
||||
continue;
|
||||
}
|
||||
// get submission
|
||||
// @ts-ignore
|
||||
const subProxy = await this.client.getSubmission(await item.link_id);
|
||||
// @ts-ignore
|
||||
const sub = await this.getActivity(subProxy);
|
||||
// @ts-ignore
|
||||
const res = await this.testItemCriteria(sub, crit[k] as SubmissionState[], logger);
|
||||
if(!res) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'score':
|
||||
const scoreCompare = parseGenericValueComparison(crit[k] as string);
|
||||
if(!comparisonTextOp(item.score, scoreCompare.operator, scoreCompare.value)) {
|
||||
// @ts-ignore
|
||||
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${item.score}`)
|
||||
return false
|
||||
}
|
||||
break;
|
||||
case 'reports':
|
||||
const reportCompare = parseGenericValueComparison(crit[k] as string);
|
||||
if(!comparisonTextOp(item.num_reports, reportCompare.operator, reportCompare.value)) {
|
||||
// @ts-ignore
|
||||
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${item.num_reports}`)
|
||||
return false
|
||||
}
|
||||
break;
|
||||
case 'removed':
|
||||
const removed = activityIsRemoved(item);
|
||||
if (removed !== crit['removed']) {
|
||||
// @ts-ignore
|
||||
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${removed}`)
|
||||
return false
|
||||
}
|
||||
break;
|
||||
case 'deleted':
|
||||
const deleted = activityIsDeleted(item);
|
||||
if (deleted !== crit['deleted']) {
|
||||
// @ts-ignore
|
||||
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${deleted}`)
|
||||
return false
|
||||
}
|
||||
break;
|
||||
case 'filtered':
|
||||
const filtered = activityIsFiltered(item);
|
||||
if (filtered !== crit['filtered']) {
|
||||
// @ts-ignore
|
||||
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${filtered}`)
|
||||
return false
|
||||
}
|
||||
break;
|
||||
case 'title':
|
||||
if((item instanceof Comment)) {
|
||||
log.warn('`title` is not allowed in `itemIs` criteria when the main Activity is a Comment');
|
||||
continue;
|
||||
}
|
||||
// @ts-ignore
|
||||
const titleReg = crit[k] as string;
|
||||
try {
|
||||
if(null === item.title.match(titleReg)) {
|
||||
// @ts-ignore
|
||||
log.debug(`Failed to match title as regular expression: ${titleReg}`);
|
||||
return false;
|
||||
}
|
||||
} catch (err) {
|
||||
log.error(`An error occurred while attempting to match title against string as regular expression: ${titleReg}. Most likely the string does not make a valid regular expression.`, err);
|
||||
return false
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// @ts-ignore
|
||||
if (item[k] !== undefined) {
|
||||
// @ts-ignore
|
||||
if (item[k] !== crit[k]) {
|
||||
// @ts-ignore
|
||||
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${item[k]}`)
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
log.warn(`Tried to test for Item property '${k}' but it did not exist`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
log.debug(`Passed: ${JSON.stringify(crit)}`);
|
||||
return true;
|
||||
})() as boolean;
|
||||
if (pass) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
async getCommentCheckCacheResult(item: Comment, checkConfig: object): Promise<UserResultCache | undefined> {
|
||||
const userName = getActivityAuthorName(item.author);
|
||||
const hash = `commentUserResult-${userName}-${item.link_id}-${objectHash.sha1(checkConfig)}`;
|
||||
this.stats.cache.commentCheck.requests++;
|
||||
let result = await this.cache.get(hash) as UserResultCache | undefined | null;
|
||||
if(result === null) {
|
||||
result = undefined;
|
||||
}
|
||||
if(result === undefined) {
|
||||
this.stats.cache.commentCheck.miss++;
|
||||
}
|
||||
this.logger.debug(`Cache Hit: Comment Check for ${userName} in Submission ${item.link_id} (Hash ${hash})`);
|
||||
return result;
|
||||
}
|
||||
|
||||
async setCommentCheckCacheResult(item: Comment, checkConfig: object, result: UserResultCache, ttl: number) {
|
||||
const userName = getActivityAuthorName(item.author);
|
||||
const hash = `commentUserResult-${userName}-${item.link_id}-${objectHash.sha1(checkConfig)}`
|
||||
await this.cache.set(hash, result, { ttl });
|
||||
this.logger.debug(`Cached check result '${result.result}' for User ${userName} on Submission ${item.link_id} for ${ttl} seconds (Hash ${hash})`);
|
||||
}
|
||||
|
||||
async generateFooter(item: Submission | Comment, actionFooter?: false | string) {
|
||||
let footer = actionFooter !== undefined ? actionFooter : this.footer;
|
||||
if (footer === false) {
|
||||
@@ -265,35 +725,63 @@ export class SubredditResources {
|
||||
}
|
||||
}
|
||||
|
||||
class SubredditResourcesManager {
|
||||
export class BotResourcesManager {
|
||||
resources: Map<string, SubredditResources> = new Map();
|
||||
authorTTL: number = 10000;
|
||||
enabled: boolean = true;
|
||||
modStreams: Map<string, SPoll<Snoowrap.Submission | Snoowrap.Comment>> = new Map();
|
||||
defaultCache?: Cache;
|
||||
defaultCache: Cache;
|
||||
defaultCacheConfig: StrongCache
|
||||
cacheType: string = 'none';
|
||||
ttlDefaults!: Required<TTLConfig>;
|
||||
cacheHash: string;
|
||||
ttlDefaults: Required<TTLConfig>;
|
||||
actionedEventsMaxDefault?: number;
|
||||
actionedEventsDefault: number;
|
||||
pruneInterval: any;
|
||||
|
||||
setDefaultsFromConfig(config: OperatorConfig) {
|
||||
constructor(config: BotInstanceConfig) {
|
||||
const {
|
||||
caching: {
|
||||
authorTTL,
|
||||
userNotesTTL,
|
||||
wikiTTL,
|
||||
commentTTL,
|
||||
submissionTTL,
|
||||
subredditTTL,
|
||||
filterCriteriaTTL,
|
||||
provider,
|
||||
actionedEventsMax,
|
||||
actionedEventsDefault,
|
||||
},
|
||||
name,
|
||||
credentials,
|
||||
caching,
|
||||
} = config;
|
||||
this.setDefaultCache(provider);
|
||||
this.setTTLDefaults({authorTTL, userNotesTTL, wikiTTL});
|
||||
}
|
||||
caching.provider.prefix = buildCachePrefix([caching.provider.prefix, 'SHARED']);
|
||||
const {actionedEventsMax: eMax, actionedEventsDefault: eDef, ...relevantCacheSettings} = caching;
|
||||
this.cacheHash = objectHash.sha1(relevantCacheSettings);
|
||||
this.defaultCacheConfig = caching;
|
||||
this.ttlDefaults = {authorTTL, userNotesTTL, wikiTTL, commentTTL, submissionTTL, filterCriteriaTTL, subredditTTL};
|
||||
|
||||
setDefaultCache(options: CacheOptions) {
|
||||
const options = provider;
|
||||
this.cacheType = options.store;
|
||||
this.actionedEventsMaxDefault = actionedEventsMax;
|
||||
this.actionedEventsDefault = actionedEventsDefault;
|
||||
this.defaultCache = createCacheManager(options);
|
||||
}
|
||||
|
||||
setTTLDefaults(def: Required<TTLConfig>) {
|
||||
this.ttlDefaults = def;
|
||||
if (this.cacheType === 'memory') {
|
||||
const min = Math.min(...([this.ttlDefaults.wikiTTL, this.ttlDefaults.authorTTL, this.ttlDefaults.userNotesTTL].filter(x => typeof x === 'number' && x !== 0) as number[]));
|
||||
if (min > 0) {
|
||||
// set default prune interval
|
||||
this.pruneInterval = setInterval(() => {
|
||||
// @ts-ignore
|
||||
this.defaultCache?.store.prune();
|
||||
// kinda hacky but whatever
|
||||
const logger = winston.loggers.get('app');
|
||||
logger.debug('Pruned Shared Cache');
|
||||
// prune interval should be twice the smallest TTL
|
||||
}, min * 1000 * 2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get(subName: string): SubredditResources | undefined {
|
||||
@@ -306,9 +794,19 @@ class SubredditResourcesManager {
|
||||
set(subName: string, initOptions: SubredditResourceConfig): SubredditResources {
|
||||
let hash = 'default';
|
||||
const { caching, ...init } = initOptions;
|
||||
let opts: SubredditResourceOptions;
|
||||
|
||||
let opts: SubredditResourceOptions = {
|
||||
cache: this.defaultCache,
|
||||
cacheType: this.cacheType,
|
||||
cacheSettingsHash: hash,
|
||||
ttl: this.ttlDefaults,
|
||||
prefix: this.defaultCacheConfig.provider.prefix,
|
||||
actionedEventsMax: this.actionedEventsMaxDefault !== undefined ? Math.min(this.actionedEventsDefault, this.actionedEventsMaxDefault) : this.actionedEventsDefault,
|
||||
...init,
|
||||
};
|
||||
|
||||
if(caching !== undefined) {
|
||||
const {provider = 'memory', ...rest} = caching;
|
||||
const {provider = this.defaultCacheConfig.provider, actionedEventsMax = this.actionedEventsDefault, ...rest} = caching;
|
||||
let cacheConfig = {
|
||||
provider: buildCacheOptionsFromProvider(provider),
|
||||
ttl: {
|
||||
@@ -317,21 +815,22 @@ class SubredditResourcesManager {
|
||||
},
|
||||
}
|
||||
hash = objectHash.sha1(cacheConfig);
|
||||
const {provider: trueProvider, ...trueRest} = cacheConfig;
|
||||
opts = {
|
||||
cache: createCacheManager(trueProvider),
|
||||
cacheType: trueProvider.store,
|
||||
cacheSettingsHash: hash,
|
||||
...init,
|
||||
...trueRest,
|
||||
};
|
||||
} else {
|
||||
opts = {
|
||||
cache: this.defaultCache,
|
||||
cacheType: this.cacheType,
|
||||
cacheSettingsHash: hash,
|
||||
ttl: this.ttlDefaults,
|
||||
...init,
|
||||
// only need to create private if there settings are actually different than the default
|
||||
if(hash !== this.cacheHash) {
|
||||
const {provider: trueProvider, ...trueRest} = cacheConfig;
|
||||
const defaultPrefix = trueProvider.prefix;
|
||||
const subPrefix = defaultPrefix === this.defaultCacheConfig.provider.prefix ? buildCachePrefix([(defaultPrefix !== undefined ? defaultPrefix.replace('SHARED', '') : defaultPrefix), subName]) : trueProvider.prefix;
|
||||
trueProvider.prefix = subPrefix;
|
||||
const eventsMax = this.actionedEventsMaxDefault !== undefined ? Math.min(actionedEventsMax, this.actionedEventsMaxDefault) : actionedEventsMax;
|
||||
opts = {
|
||||
cache: createCacheManager(trueProvider),
|
||||
actionedEventsMax: eventsMax,
|
||||
cacheType: trueProvider.store,
|
||||
cacheSettingsHash: hash,
|
||||
prefix: subPrefix,
|
||||
...init,
|
||||
...trueRest,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -356,7 +855,3 @@ class SubredditResourcesManager {
|
||||
return resource;
|
||||
}
|
||||
}
|
||||
|
||||
const manager = new SubredditResourcesManager();
|
||||
|
||||
export default manager;
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
import dayjs, {Dayjs} from "dayjs";
|
||||
import {Comment, RedditUser, WikiPage} from "snoowrap";
|
||||
import {COMMENT_URL_ID, deflateUserNotes, inflateUserNotes, parseLinkIdentifier, SUBMISSION_URL_ID} from "../util";
|
||||
import {
|
||||
COMMENT_URL_ID,
|
||||
deflateUserNotes, getActivityAuthorName,
|
||||
inflateUserNotes,
|
||||
isScopeError,
|
||||
parseLinkIdentifier,
|
||||
SUBMISSION_URL_ID
|
||||
} from "../util";
|
||||
import Subreddit from "snoowrap/dist/objects/Subreddit";
|
||||
import {Logger} from "winston";
|
||||
import LoggedError from "../Utils/LoggedError";
|
||||
@@ -48,19 +55,23 @@ export interface RawNote {
|
||||
export type UserNotesConstants = Pick<any, "users" | "warnings">;
|
||||
|
||||
export class UserNotes {
|
||||
notesTTL: number;
|
||||
notesTTL: number | false;
|
||||
subreddit: Subreddit;
|
||||
wiki: WikiPage;
|
||||
moderators?: RedditUser[];
|
||||
logger: Logger;
|
||||
identifier: string;
|
||||
cache?: Cache
|
||||
cache: Cache
|
||||
cacheCB: Function;
|
||||
|
||||
users: Map<string, UserNote[]> = new Map();
|
||||
|
||||
constructor(ttl: number, subreddit: Subreddit, logger: Logger, cache: Cache | undefined, cacheCB: Function) {
|
||||
this.notesTTL = ttl;
|
||||
saveDebounce: any;
|
||||
debounceCB: any;
|
||||
batchCount: number = 0;
|
||||
|
||||
constructor(ttl: number | boolean, subreddit: Subreddit, logger: Logger, cache: Cache, cacheCB: Function) {
|
||||
this.notesTTL = ttl === true ? 0 : ttl;
|
||||
this.subreddit = subreddit;
|
||||
this.logger = logger;
|
||||
this.wiki = subreddit.getWikiPage('usernotes');
|
||||
@@ -70,10 +81,11 @@ export class UserNotes {
|
||||
}
|
||||
|
||||
async getUserNotes(user: RedditUser): Promise<UserNote[]> {
|
||||
const userName = getActivityAuthorName(user);
|
||||
let notes: UserNote[] | undefined = [];
|
||||
|
||||
if (this.users !== undefined) {
|
||||
notes = this.users.get(user.name);
|
||||
notes = this.users.get(userName);
|
||||
if (notes !== undefined) {
|
||||
this.logger.debug('Returned cached notes');
|
||||
return notes;
|
||||
@@ -81,7 +93,7 @@ export class UserNotes {
|
||||
}
|
||||
|
||||
const payload = await this.retrieveData();
|
||||
const rawNotes = payload.blob[user.name];
|
||||
const rawNotes = payload.blob[userName];
|
||||
if (rawNotes !== undefined) {
|
||||
if (this.moderators === undefined) {
|
||||
this.moderators = await this.subreddit.getModerators();
|
||||
@@ -90,7 +102,7 @@ export class UserNotes {
|
||||
// sort in ascending order by time
|
||||
notes.sort((a, b) => a.time.isBefore(b.time) ? -1 : 1);
|
||||
if (this.notesTTL > 0 && this.cache !== undefined) {
|
||||
this.users.set(user.name, notes);
|
||||
this.users.set(userName, notes);
|
||||
}
|
||||
return notes;
|
||||
} else {
|
||||
@@ -101,6 +113,7 @@ export class UserNotes {
|
||||
async addUserNote(item: (Submission|Comment), type: string | number, text: string = ''): Promise<UserNote>
|
||||
{
|
||||
const payload = await this.retrieveData();
|
||||
const userName = getActivityAuthorName(item.author);
|
||||
|
||||
// idgaf
|
||||
// @ts-ignore
|
||||
@@ -116,16 +129,16 @@ export class UserNotes {
|
||||
}
|
||||
const newNote = new UserNote(dayjs(), text, mod, type, `https://reddit.com${item.permalink}`);
|
||||
|
||||
if(payload.blob[item.author.name] === undefined) {
|
||||
payload.blob[item.author.name] = {ns: []};
|
||||
if(payload.blob[userName] === undefined) {
|
||||
payload.blob[userName] = {ns: []};
|
||||
}
|
||||
payload.blob[item.author.name].ns.push(newNote.toRaw(payload.constants));
|
||||
payload.blob[userName].ns.push(newNote.toRaw(payload.constants));
|
||||
|
||||
await this.saveData(payload);
|
||||
if(this.notesTTL > 0 && this.cache !== undefined) {
|
||||
const currNotes = this.users.get(item.author.name) || [];
|
||||
if(this.notesTTL > 0) {
|
||||
const currNotes = this.users.get(userName) || [];
|
||||
currNotes.push(newNote);
|
||||
this.users.set(item.author.name, currNotes);
|
||||
this.users.set(userName, currNotes);
|
||||
}
|
||||
return newNote;
|
||||
}
|
||||
@@ -137,16 +150,27 @@ export class UserNotes {
|
||||
}
|
||||
|
||||
async retrieveData(): Promise<RawUserNotesPayload> {
|
||||
if (this.notesTTL > 0 && this.cache !== undefined) {
|
||||
let cacheMiss;
|
||||
if (this.notesTTL > 0) {
|
||||
const cachedPayload = await this.cache.get(this.identifier);
|
||||
if (cachedPayload !== undefined) {
|
||||
if (cachedPayload !== undefined && cachedPayload !== null) {
|
||||
this.cacheCB(false);
|
||||
return cachedPayload as unknown as RawUserNotesPayload;
|
||||
}
|
||||
this.cacheCB(true);
|
||||
cacheMiss = true;
|
||||
}
|
||||
|
||||
try {
|
||||
// DISABLED for now because I think its causing issues
|
||||
// if(cacheMiss && this.debounceCB !== undefined) {
|
||||
// // timeout is still delayed. its our wiki data and we want it now! cm cacheworth 877 cache now
|
||||
// this.logger.debug(`Detected missed cache on usernotes retrieval while batch (${this.batchCount}) save is in progress, executing save immediately before retrieving new notes...`);
|
||||
// clearTimeout(this.saveDebounce);
|
||||
// await this.debounceCB();
|
||||
// this.debounceCB = undefined;
|
||||
// this.saveDebounce = undefined;
|
||||
// }
|
||||
// @ts-ignore
|
||||
this.wiki = await this.subreddit.getWikiPage('usernotes').fetch();
|
||||
const wikiContent = this.wiki.content_md;
|
||||
@@ -155,8 +179,8 @@ export class UserNotes {
|
||||
|
||||
userNotes.blob = inflateUserNotes(userNotes.blob);
|
||||
|
||||
if (this.notesTTL > 0 && this.cache !== undefined) {
|
||||
await this.cache.set(`${this.subreddit.display_name}-usernotes`, userNotes, this.notesTTL);
|
||||
if (this.notesTTL !== false) {
|
||||
await this.cache.set(`${this.subreddit.display_name}-usernotes`, userNotes, {ttl: this.notesTTL});
|
||||
this.users = new Map();
|
||||
}
|
||||
|
||||
@@ -171,21 +195,54 @@ export class UserNotes {
|
||||
async saveData(payload: RawUserNotesPayload): Promise<RawUserNotesPayload> {
|
||||
|
||||
const blob = deflateUserNotes(payload.blob);
|
||||
const wikiPayload = {...payload, blob};
|
||||
|
||||
const wikiPayload = {text: JSON.stringify({...payload, blob}), reason: 'ContextBot edited usernotes'};
|
||||
try {
|
||||
// @ts-ignore
|
||||
//this.wiki = await this.wiki.refresh();
|
||||
// @ts-ignore
|
||||
this.wiki = await this.subreddit.getWikiPage('usernotes').edit({text: JSON.stringify(wikiPayload), reason: 'ContextBot edited usernotes'});
|
||||
if (this.notesTTL > 0 && this.cache !== undefined) {
|
||||
await this.cache.set(this.identifier, payload, this.notesTTL);
|
||||
if (this.notesTTL !== false) {
|
||||
// DISABLED for now because if it fails throws an uncaught rejection
|
||||
// and need to figured out how to handle this other than just logging (want to interrupt action flow too?)
|
||||
//
|
||||
// debounce usernote save by 5 seconds -- effectively batch usernote saves
|
||||
//
|
||||
// so that if we are processing a ton of checks that write user notes we aren't calling to save the wiki page on every call
|
||||
// since we also have everything in cache (most likely...)
|
||||
//
|
||||
// TODO might want to increase timeout to 10 seconds
|
||||
// if(this.saveDebounce !== undefined) {
|
||||
// clearTimeout(this.saveDebounce);
|
||||
// }
|
||||
// this.debounceCB = (async function () {
|
||||
// const p = wikiPayload;
|
||||
// // @ts-ignore
|
||||
// const self = this as UserNotes;
|
||||
// // @ts-ignore
|
||||
// self.wiki = await self.subreddit.getWikiPage('usernotes').edit(p);
|
||||
// self.logger.debug(`Batch saved ${self.batchCount} usernotes`);
|
||||
// self.debounceCB = undefined;
|
||||
// self.saveDebounce = undefined;
|
||||
// self.batchCount = 0;
|
||||
// }).bind(this);
|
||||
// this.saveDebounce = setTimeout(this.debounceCB,5000);
|
||||
// this.batchCount++;
|
||||
// this.logger.debug(`Saving Usernotes has been debounced for 5 seconds (${this.batchCount} batched)`)
|
||||
|
||||
// @ts-ignore
|
||||
await this.subreddit.getWikiPage('usernotes').edit(wikiPayload);
|
||||
await this.cache.set(this.identifier, payload, {ttl: this.notesTTL});
|
||||
this.users = new Map();
|
||||
} else {
|
||||
// @ts-ignore
|
||||
this.wiki = await this.subreddit.getWikiPage('usernotes').edit(wikiPayload);
|
||||
}
|
||||
|
||||
return payload as RawUserNotesPayload;
|
||||
} catch (err) {
|
||||
const msg = `Could not edit usernotes. Make sure at least one moderator has used toolbox and usernotes before and that this account has editing permissions`;
|
||||
let msg = 'Could not edit usernotes.';
|
||||
// Make sure at least one moderator has used toolbox and usernotes before and that this account has editing permissions`;
|
||||
if(isScopeError(err)) {
|
||||
msg = `${msg} The bot account did not have sufficient OAUTH scope to perform this action. You must re-authenticate the bot and ensure it has has 'wikiedit' permissions.`
|
||||
} else {
|
||||
msg = `${msg} Make sure at least one moderator has used toolbox, created a usernote, and that this account has editing permissions for the wiki page.`;
|
||||
}
|
||||
this.logger.error(msg, err);
|
||||
throw new LoggedError(msg);
|
||||
}
|
||||
|
||||
25
src/Utils/AbortToken.ts
Normal file
25
src/Utils/AbortToken.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
//https://gist.github.com/pygy/6290f78b078e22418821b07d8d63f111#gistcomment-3408351
|
||||
class AbortToken {
|
||||
private readonly abortSymbol = Symbol('cancelled');
|
||||
private abortPromise: Promise<any>;
|
||||
private resolve!: Function; // Works due to promise init
|
||||
|
||||
constructor() {
|
||||
this.abortPromise = new Promise(res => this.resolve = res);
|
||||
}
|
||||
|
||||
public async wrap<T>(p: PromiseLike<T>): Promise<T> {
|
||||
const result = await Promise.race([p, this.abortPromise]);
|
||||
if (result === this.abortSymbol) {
|
||||
throw new Error('aborted');
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public abort() {
|
||||
this.resolve(this.abortSymbol);
|
||||
}
|
||||
}
|
||||
|
||||
export default AbortToken;
|
||||
@@ -40,13 +40,13 @@ export const hardLimit = new commander.Option('--hardLimit <limit>', 'When API l
|
||||
.argParser(argParseInt);
|
||||
|
||||
export const dryRun = new commander.Option('--dryRun', 'Set all subreddits in dry run mode, overriding configurations (default: process.env.DRYRUN || false)')
|
||||
.argParser(parseBoolWithDefault(undefined));
|
||||
.default(undefined);
|
||||
|
||||
export const checks = new commander.Option('-h, --checks <checkNames...>', 'An optional list of Checks, by name, that should be run. If none are specified all Checks for the Subreddit the Activity is in will be run');
|
||||
|
||||
export const proxy = new commander.Option('--proxy <proxyEndpoint>', 'Proxy Snoowrap requests through this endpoint (default: process.env.PROXY)');
|
||||
|
||||
export const operator = new commander.Option('--operator <name>', 'Username of the reddit user operating this application, used for displaying OP level info/actions in UI (default: process.env.OPERATOR)');
|
||||
export const operator = new commander.Option('--operator <name...>', 'Username(s) of the reddit user(s) operating this application, used for displaying OP level info/actions in UI (default: process.env.OPERATOR)');
|
||||
|
||||
export const operatorDisplay = new commander.Option('--operatorDisplay <name>', 'An optional name to display who is operating this application in the UI (default: process.env.OPERATOR_DISPLAY || Anonymous)');
|
||||
|
||||
|
||||
@@ -13,10 +13,16 @@ import {
|
||||
TypedActivityStates
|
||||
} from "../Common/interfaces";
|
||||
import {
|
||||
compareDurationValue, comparisonTextOp,
|
||||
compareDurationValue,
|
||||
comparisonTextOp,
|
||||
isActivityWindowCriteria,
|
||||
normalizeName, parseDuration,
|
||||
parseDurationComparison, parseGenericValueComparison, parseGenericValueOrPercentComparison, parseSubredditName,
|
||||
normalizeName,
|
||||
parseDuration,
|
||||
parseDurationComparison,
|
||||
parseGenericValueComparison,
|
||||
parseGenericValueOrPercentComparison,
|
||||
parseRuleResultsToMarkdownSummary,
|
||||
parseSubredditName,
|
||||
truncateStringToLength
|
||||
} from "../util";
|
||||
import UserNotes from "../Subreddit/UserNotes";
|
||||
@@ -24,9 +30,9 @@ import {Logger} from "winston";
|
||||
import InvalidRegexError from "./InvalidRegexError";
|
||||
import SimpleError from "./SimpleError";
|
||||
import {AuthorCriteria} from "../Author/Author";
|
||||
import { URL } from "url";
|
||||
import {URL} from "url";
|
||||
|
||||
export const BOT_LINK = 'https://www.reddit.com/r/ContextModBot/comments/o1dugk/introduction_to_contextmodbot_and_rcb';
|
||||
export const BOT_LINK = 'https://www.reddit.com/r/ContextModBot/comments/otz396/introduction_to_contextmodbot';
|
||||
|
||||
export interface AuthorTypedActivitiesOptions extends AuthorActivitiesOptions {
|
||||
type?: 'comment' | 'submission',
|
||||
@@ -58,7 +64,7 @@ export async function getAuthorActivities(user: RedditUser, options: AuthorTyped
|
||||
let includes: string[] = [];
|
||||
let excludes: string[] = [];
|
||||
|
||||
if(isActivityWindowCriteria(optWindow)) {
|
||||
if (isActivityWindowCriteria(optWindow)) {
|
||||
const {
|
||||
satisfyOn = 'any',
|
||||
count,
|
||||
@@ -72,25 +78,25 @@ export async function getAuthorActivities(user: RedditUser, options: AuthorTyped
|
||||
includes = include.map(x => parseSubredditName(x).toLowerCase());
|
||||
excludes = exclude.map(x => parseSubredditName(x).toLowerCase());
|
||||
|
||||
if(includes.length > 0 && excludes.length > 0) {
|
||||
if (includes.length > 0 && excludes.length > 0) {
|
||||
// TODO add logger so this can be logged...
|
||||
// this.logger.warn('include and exclude both specified, exclude will be ignored');
|
||||
}
|
||||
satisfiedCount = count;
|
||||
durVal = duration;
|
||||
satisfy = satisfyOn
|
||||
} else if(typeof optWindow === 'number') {
|
||||
} else if (typeof optWindow === 'number') {
|
||||
satisfiedCount = optWindow;
|
||||
} else {
|
||||
durVal = optWindow as DurationVal;
|
||||
}
|
||||
|
||||
// if count is less than max limit (100) go ahead and just get that many. may result in faster response time for low numbers
|
||||
if(satisfiedCount !== undefined) {
|
||||
if (satisfiedCount !== undefined) {
|
||||
chunkSize = Math.min(chunkSize, satisfiedCount);
|
||||
}
|
||||
|
||||
if(durVal !== undefined) {
|
||||
if (durVal !== undefined) {
|
||||
const endTime = dayjs();
|
||||
if (typeof durVal === 'object') {
|
||||
duration = dayjs.duration(durVal);
|
||||
@@ -110,9 +116,9 @@ export async function getAuthorActivities(user: RedditUser, options: AuthorTyped
|
||||
satisfiedEndtime = endTime.subtract(duration.asMilliseconds(), 'milliseconds');
|
||||
}
|
||||
|
||||
if(satisfiedCount === undefined && satisfiedEndtime === undefined) {
|
||||
if (satisfiedCount === undefined && satisfiedEndtime === undefined) {
|
||||
throw new Error('window value was not valid');
|
||||
} else if(satisfy === 'all' && !(satisfiedCount !== undefined && satisfiedEndtime !== undefined)) {
|
||||
} else if (satisfy === 'all' && !(satisfiedCount !== undefined && satisfiedEndtime !== undefined)) {
|
||||
// even though 'all' was requested we don't have two criteria so its really 'any' logic
|
||||
satisfy = 'any';
|
||||
}
|
||||
@@ -152,23 +158,16 @@ export async function getAuthorActivities(user: RedditUser, options: AuthorTyped
|
||||
});
|
||||
}
|
||||
|
||||
if(!keepRemoved) {
|
||||
if (!keepRemoved) {
|
||||
// snoowrap typings think 'removed' property does not exist on submission
|
||||
// @ts-ignore
|
||||
listSlice = listSlice.filter(x => !activityIsRemoved(x));
|
||||
}
|
||||
|
||||
if (satisfiedCount !== undefined && items.length + listSlice.length >= satisfiedCount) {
|
||||
// satisfied count
|
||||
if(satisfy === 'any') {
|
||||
items = items.concat(listSlice).slice(0, satisfiedCount);
|
||||
break;
|
||||
}
|
||||
countOk = true;
|
||||
}
|
||||
|
||||
// its more likely the time criteria is going to be hit before the count criteria
|
||||
// so check this first
|
||||
let truncatedItems: Array<Submission | Comment> = [];
|
||||
if(satisfiedEndtime !== undefined) {
|
||||
if (satisfiedEndtime !== undefined) {
|
||||
truncatedItems = listSlice.filter((x) => {
|
||||
const utc = x.created_utc * 1000;
|
||||
const itemDate = dayjs(utc);
|
||||
@@ -177,7 +176,7 @@ export async function getAuthorActivities(user: RedditUser, options: AuthorTyped
|
||||
});
|
||||
|
||||
if (truncatedItems.length !== listSlice.length) {
|
||||
if(satisfy === 'any') {
|
||||
if (satisfy === 'any') {
|
||||
// satisfied duration
|
||||
items = items.concat(truncatedItems);
|
||||
break;
|
||||
@@ -186,9 +185,18 @@ export async function getAuthorActivities(user: RedditUser, options: AuthorTyped
|
||||
}
|
||||
}
|
||||
|
||||
if (satisfiedCount !== undefined && items.length + listSlice.length >= satisfiedCount) {
|
||||
// satisfied count
|
||||
if (satisfy === 'any') {
|
||||
items = items.concat(listSlice).slice(0, satisfiedCount);
|
||||
break;
|
||||
}
|
||||
countOk = true;
|
||||
}
|
||||
|
||||
// if we've satisfied everything take whichever is bigger
|
||||
if(satisfy === 'all' && countOk && timeOk) {
|
||||
if(satisfiedCount as number > items.length + truncatedItems.length) {
|
||||
if (satisfy === 'all' && countOk && timeOk) {
|
||||
if (satisfiedCount as number > items.length + truncatedItems.length) {
|
||||
items = items.concat(listSlice).slice(0, satisfiedCount);
|
||||
} else {
|
||||
items = items.concat(truncatedItems);
|
||||
@@ -256,14 +264,14 @@ export const renderContent = async (template: string, data: (Submission | Commen
|
||||
// ...grouped,
|
||||
// };
|
||||
// },
|
||||
permalink: data.permalink,
|
||||
permalink: `https://reddit.com${data.permalink}`,
|
||||
botLink: BOT_LINK,
|
||||
}
|
||||
if(template.includes('{{item.notes')) {
|
||||
if (template.includes('{{item.notes')) {
|
||||
// we need to get notes
|
||||
const notesData = await usernotes.getUserNotes(data.author);
|
||||
// return usable notes data with some stats
|
||||
const current = notesData.length > 0 ? notesData[notesData.length -1] : undefined;
|
||||
const current = notesData.length > 0 ? notesData[notesData.length - 1] : undefined;
|
||||
// group by type
|
||||
const grouped = notesData.reduce((acc: any, x) => {
|
||||
const {[x.noteType]: nt = []} = acc;
|
||||
@@ -303,7 +311,7 @@ export const renderContent = async (template: string, data: (Submission | Commen
|
||||
};
|
||||
}, {});
|
||||
|
||||
const view = {item: templateData, rules: normalizedRuleResults};
|
||||
const view = {item: templateData, ruleSummary: parseRuleResultsToMarkdownSummary(ruleResults), rules: normalizedRuleResults};
|
||||
const rendered = Mustache.render(template, view) as string;
|
||||
return he.decode(rendered);
|
||||
}
|
||||
@@ -379,10 +387,10 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
|
||||
case 'linkKarma':
|
||||
const lkCompare = parseGenericValueOrPercentComparison(await authorOpts.linkKarma as string);
|
||||
let lkMatch;
|
||||
if(lkCompare.isPercent) {
|
||||
if (lkCompare.isPercent) {
|
||||
// @ts-ignore
|
||||
const tk = author.total_karma as number;
|
||||
lkMatch = comparisonTextOp(author.link_karma / tk, lkCompare.operator, lkCompare.value/100);
|
||||
lkMatch = comparisonTextOp(author.link_karma / tk, lkCompare.operator, lkCompare.value / 100);
|
||||
} else {
|
||||
lkMatch = comparisonTextOp(author.link_karma, lkCompare.operator, lkCompare.value);
|
||||
}
|
||||
@@ -393,10 +401,10 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
|
||||
case 'commentKarma':
|
||||
const ckCompare = parseGenericValueOrPercentComparison(await authorOpts.commentKarma as string);
|
||||
let ckMatch;
|
||||
if(ckCompare.isPercent) {
|
||||
if (ckCompare.isPercent) {
|
||||
// @ts-ignore
|
||||
const ck = author.total_karma as number;
|
||||
ckMatch = comparisonTextOp(author.comment_karma / ck, ckCompare.operator, ckCompare.value/100);
|
||||
ckMatch = comparisonTextOp(author.comment_karma / ck, ckCompare.operator, ckCompare.value / 100);
|
||||
} else {
|
||||
ckMatch = comparisonTextOp(author.comment_karma, ckCompare.operator, ckCompare.value);
|
||||
}
|
||||
@@ -406,7 +414,7 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
|
||||
break;
|
||||
case 'totalKarma':
|
||||
const tkCompare = parseGenericValueComparison(await authorOpts.totalKarma as string);
|
||||
if(tkCompare.isPercent) {
|
||||
if (tkCompare.isPercent) {
|
||||
throw new SimpleError(`'totalKarma' value on AuthorCriteria cannot be a percentage`);
|
||||
}
|
||||
// @ts-ignore
|
||||
@@ -427,7 +435,12 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
|
||||
const notePass = () => {
|
||||
for (const noteCriteria of authorOpts[k] as UserNoteCriteria[]) {
|
||||
const {count = '>= 1', search = 'current', type} = noteCriteria;
|
||||
const {value, operator, isPercent, extra = ''} = parseGenericValueOrPercentComparison(count);
|
||||
const {
|
||||
value,
|
||||
operator,
|
||||
isPercent,
|
||||
extra = ''
|
||||
} = parseGenericValueOrPercentComparison(count);
|
||||
const order = extra.includes('asc') ? 'ascending' : 'descending';
|
||||
switch (search) {
|
||||
case 'current':
|
||||
@@ -448,7 +461,7 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
|
||||
} else {
|
||||
currCount = 0;
|
||||
}
|
||||
if(isPercent) {
|
||||
if (isPercent) {
|
||||
throw new SimpleError(`When comparing UserNotes with 'consecutive' search 'count' cannot be a percentage. Given: ${count}`);
|
||||
}
|
||||
if (comparisonTextOp(currCount, operator, value)) {
|
||||
@@ -457,11 +470,11 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
|
||||
}
|
||||
break;
|
||||
case 'total':
|
||||
if(isPercent) {
|
||||
if(comparisonTextOp(notes.filter(x => x.noteType === type).length / notes.length, operator, value/100)) {
|
||||
if (isPercent) {
|
||||
if (comparisonTextOp(notes.filter(x => x.noteType === type).length / notes.length, operator, value / 100)) {
|
||||
return true;
|
||||
}
|
||||
} else if(comparisonTextOp(notes.filter(x => x.noteType === type).length, operator, value)) {
|
||||
} else if (comparisonTextOp(notes.filter(x => x.noteType === type).length, operator, value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -497,7 +510,8 @@ export const itemContentPeek = async (item: (Comment | Submission), peekLength =
|
||||
peek = `${truncatePeek(item.title)} by ${author} https://reddit.com${item.permalink}`;
|
||||
|
||||
} else if (item instanceof Comment) {
|
||||
content = truncatePeek(item.body);
|
||||
// replace newlines with spaces to make peek more compact
|
||||
content = truncatePeek(item.body.replaceAll('\n', ' '));
|
||||
peek = `${truncatePeek(content)} by ${author} in https://reddit.com${item.permalink}`;
|
||||
}
|
||||
|
||||
@@ -526,18 +540,18 @@ export const getAttributionIdentifier = (sub: Submission, useParentMediaDomain =
|
||||
description,
|
||||
provider_name,
|
||||
} = sub.secure_media?.oembed;
|
||||
switch(provider_name) {
|
||||
switch (provider_name) {
|
||||
case 'Spotify':
|
||||
if(description !== undefined) {
|
||||
if (description !== undefined) {
|
||||
let match = description.match(SPOTIFY_PODCAST_AUTHOR_REGEX);
|
||||
if(match !== null) {
|
||||
if (match !== null) {
|
||||
const {author} = match.groups as any;
|
||||
displayDomain = author;
|
||||
domainIdents.push(author);
|
||||
mediaType = 'Podcast';
|
||||
} else {
|
||||
match = description.match(SPOTIFY_MUSIC_AUTHOR_REGEX);
|
||||
if(match !== null) {
|
||||
if (match !== null) {
|
||||
const {author, mediaType: mt} = match.groups as any;
|
||||
displayDomain = author;
|
||||
domainIdents.push(author);
|
||||
@@ -545,26 +559,26 @@ export const getAttributionIdentifier = (sub: Submission, useParentMediaDomain =
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
break;
|
||||
case 'Anchor FM Inc.':
|
||||
if(author_name !== undefined) {
|
||||
if (author_name !== undefined) {
|
||||
let match = author_name.match(ANCHOR_AUTHOR_REGEX);
|
||||
if(match !== null) {
|
||||
if (match !== null) {
|
||||
const {author} = match.groups as any;
|
||||
displayDomain = author;
|
||||
domainIdents.push(author);
|
||||
mediaType = 'podcast';
|
||||
}
|
||||
}
|
||||
break;
|
||||
break;
|
||||
case 'YouTube':
|
||||
mediaType = 'Video/Audio';
|
||||
break;
|
||||
default:
|
||||
// nah
|
||||
// nah
|
||||
}
|
||||
// handles yt, vimeo, twitter fine
|
||||
if(displayDomain === '') {
|
||||
if (displayDomain === '') {
|
||||
if (author_name !== undefined) {
|
||||
domainIdents.push(author_name);
|
||||
if (displayDomain === '') {
|
||||
@@ -579,95 +593,35 @@ export const getAttributionIdentifier = (sub: Submission, useParentMediaDomain =
|
||||
}
|
||||
}
|
||||
}
|
||||
if(displayDomain === '') {
|
||||
if (displayDomain === '') {
|
||||
// we have media but could not parse stuff for some reason just use url
|
||||
const u = new URL(sub.url);
|
||||
displayDomain = u.pathname;
|
||||
domainIdents.push(u.pathname);
|
||||
}
|
||||
provider = provider_name;
|
||||
} else if(sub.secure_media?.type !== undefined) {
|
||||
} else if (sub.secure_media?.type !== undefined) {
|
||||
domainIdents.push(sub.secure_media?.type);
|
||||
domain = sub.secure_media?.type;
|
||||
} else {
|
||||
domain = sub.domain;
|
||||
}
|
||||
|
||||
if(domain === '') {
|
||||
if (domain === '') {
|
||||
domain = sub.domain;
|
||||
}
|
||||
if (displayDomain === '') {
|
||||
displayDomain = domain;
|
||||
}
|
||||
if(domainIdents.length === 0 && domain !== '') {
|
||||
domainIdents.push(domain);
|
||||
}
|
||||
|
||||
return {display: displayDomain, domain, aliases: domainIdents, provider, mediaType};
|
||||
}
|
||||
|
||||
export const isItem = (item: Submission | Comment, stateCriteria: TypedActivityStates, logger: Logger): [boolean, SubmissionState|CommentState|undefined] => {
|
||||
if (stateCriteria.length === 0) {
|
||||
return [true, undefined];
|
||||
}
|
||||
|
||||
const log = logger.child({leaf: 'Item Check'});
|
||||
|
||||
for (const crit of stateCriteria) {
|
||||
const [pass, passCrit] = (() => {
|
||||
for (const k of Object.keys(crit)) {
|
||||
// @ts-ignore
|
||||
if (crit[k] !== undefined) {
|
||||
switch(k) {
|
||||
case 'removed':
|
||||
const removed = activityIsRemoved(item);
|
||||
if (removed !== crit['removed']) {
|
||||
// @ts-ignore
|
||||
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${removed}`)
|
||||
return [false, crit];
|
||||
}
|
||||
break;
|
||||
case 'deleted':
|
||||
const deleted = activityIsDeleted(item);
|
||||
if (deleted !== crit['deleted']) {
|
||||
// @ts-ignore
|
||||
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${deleted}`)
|
||||
return [false, crit];
|
||||
}
|
||||
break;
|
||||
case 'filtered':
|
||||
const filtered = activityIsFiltered(item);
|
||||
if (filtered !== crit['filtered']) {
|
||||
// @ts-ignore
|
||||
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${filtered}`)
|
||||
return [false, crit];
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// @ts-ignore
|
||||
if (item[k] !== undefined) {
|
||||
// @ts-ignore
|
||||
if (item[k] !== crit[k]) {
|
||||
// @ts-ignore
|
||||
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${item[k]}`)
|
||||
return [false, crit];
|
||||
}
|
||||
} else {
|
||||
log.warn(`Tried to test for Item property '${k}' but it did not exist`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
log.debug(`Passed: ${JSON.stringify(crit)}`);
|
||||
return [true, crit];
|
||||
})() as [boolean, SubmissionState|CommentState|undefined];
|
||||
if (pass) {
|
||||
return [true, passCrit];
|
||||
}
|
||||
}
|
||||
return [false, undefined];
|
||||
}
|
||||
|
||||
export const activityIsRemoved = (item: Submission|Comment): boolean => {
|
||||
if(item instanceof Submission) {
|
||||
export const activityIsRemoved = (item: Submission | Comment): boolean => {
|
||||
if (item instanceof Submission) {
|
||||
// when automod filters a post it gets this category
|
||||
return item.banned_at_utc !== null && item.removed_by_category !== 'automod_filtered';
|
||||
}
|
||||
@@ -676,8 +630,8 @@ export const activityIsRemoved = (item: Submission|Comment): boolean => {
|
||||
return item.banned_at_utc !== null && item.removed;
|
||||
}
|
||||
|
||||
export const activityIsFiltered = (item: Submission|Comment): boolean => {
|
||||
if(item instanceof Submission) {
|
||||
export const activityIsFiltered = (item: Submission | Comment): boolean => {
|
||||
if (item instanceof Submission) {
|
||||
// when automod filters a post it gets this category
|
||||
return item.banned_at_utc !== null && item.removed_by_category === 'automod_filtered';
|
||||
}
|
||||
@@ -686,8 +640,8 @@ export const activityIsFiltered = (item: Submission|Comment): boolean => {
|
||||
return item.banned_at_utc !== null && !item.removed;
|
||||
}
|
||||
|
||||
export const activityIsDeleted = (item: Submission|Comment): boolean => {
|
||||
if(item instanceof Submission) {
|
||||
export const activityIsDeleted = (item: Submission | Comment): boolean => {
|
||||
if (item instanceof Submission) {
|
||||
return item.removed_by_category === 'deleted';
|
||||
}
|
||||
return item.author.name === '[deleted]'
|
||||
|
||||
@@ -1,29 +1,42 @@
|
||||
import {labelledFormat, logLevels} from "../util";
|
||||
import winston, {Logger} from "winston";
|
||||
import {DuplexTransport} from "winston-duplex";
|
||||
|
||||
const {transports} = winston;
|
||||
|
||||
export const getLogger = (options: any, name = 'default'): Logger => {
|
||||
export const getLogger = (options: any, name = 'app'): Logger => {
|
||||
if(!winston.loggers.has(name)) {
|
||||
const {
|
||||
path,
|
||||
level,
|
||||
additionalTransports = [],
|
||||
defaultLabel = 'App',
|
||||
} = options || {};
|
||||
|
||||
const consoleTransport = new transports.Console();
|
||||
const consoleTransport = new transports.Console({
|
||||
handleExceptions: true,
|
||||
// @ts-expect-error
|
||||
handleRejections: true,
|
||||
});
|
||||
|
||||
const myTransports = [
|
||||
consoleTransport,
|
||||
new DuplexTransport({
|
||||
stream: {
|
||||
transform(chunk,e, cb) {
|
||||
cb(null, chunk);
|
||||
},
|
||||
objectMode: true,
|
||||
},
|
||||
name: 'duplex',
|
||||
dump: false,
|
||||
handleExceptions: true,
|
||||
// @ts-expect-error
|
||||
handleRejections: true,
|
||||
}),
|
||||
...additionalTransports,
|
||||
];
|
||||
|
||||
let errorTransports = [consoleTransport];
|
||||
|
||||
for (const a of additionalTransports) {
|
||||
myTransports.push(a);
|
||||
errorTransports.push(a);
|
||||
}
|
||||
|
||||
if (path !== undefined && path !== '') {
|
||||
const rotateTransport = new winston.transports.DailyRotateFile({
|
||||
dirname: path,
|
||||
@@ -31,21 +44,19 @@ export const getLogger = (options: any, name = 'default'): Logger => {
|
||||
symlinkName: 'contextBot-current.log',
|
||||
filename: 'contextBot-%DATE%.log',
|
||||
datePattern: 'YYYY-MM-DD',
|
||||
maxSize: '5m'
|
||||
maxSize: '5m',
|
||||
handleExceptions: true,
|
||||
handleRejections: true,
|
||||
});
|
||||
// @ts-ignore
|
||||
myTransports.push(rotateTransport);
|
||||
// @ts-ignore
|
||||
errorTransports.push(rotateTransport);
|
||||
}
|
||||
|
||||
const loggerOptions = {
|
||||
level: level || 'info',
|
||||
format: labelledFormat(),
|
||||
format: labelledFormat(defaultLabel),
|
||||
transports: myTransports,
|
||||
levels: logLevels,
|
||||
exceptionHandlers: errorTransports,
|
||||
rejectionHandlers: errorTransports,
|
||||
};
|
||||
|
||||
winston.loggers.add(name, loggerOptions);
|
||||
|
||||
216
src/Utils/memoryStore.ts
Normal file
216
src/Utils/memoryStore.ts
Normal file
@@ -0,0 +1,216 @@
|
||||
/**
|
||||
* quick and dirty hack to get access to lru.prune();
|
||||
*
|
||||
* this is identical to cache-manager/store/memory.js except for prune function
|
||||
* */
|
||||
import Lru from 'lru-cache';
|
||||
import cloneDeep from 'lodash/cloneDeep';
|
||||
|
||||
var isObject = function isObject(value: any) {
|
||||
return value instanceof Object && value.constructor === Object;
|
||||
};
|
||||
|
||||
function clone(object: object) {
|
||||
if (typeof object === 'object' && object !== null) {
|
||||
return cloneDeep(object);
|
||||
}
|
||||
return object;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper for lru-cache.
|
||||
* NOTE: If you want to use a memory store, you want to write your own to have full
|
||||
* control over its behavior. E.g., you might need the extra Promise overhead or
|
||||
* you may want to clone objects a certain way before storing.
|
||||
*
|
||||
* @param {object} args - Args passed to underlying lru-cache, plus additional optional args:
|
||||
* @param {boolean} [args.shouldCloneBeforeSet=true] - Whether to clone the data being stored.
|
||||
* Default: true
|
||||
* @param {boolean} [args.usePromises=true] - Whether to enable the use of Promises. Default: true
|
||||
*/
|
||||
var memoryStore = function(args: any) {
|
||||
args = args || {};
|
||||
var self: any = {};
|
||||
self.name = 'memory';
|
||||
var Promise = args.promiseDependency || global.Promise;
|
||||
self.usePromises = !(typeof Promise === 'undefined' || args.noPromises);
|
||||
self.shouldCloneBeforeSet = args.shouldCloneBeforeSet !== false; // clone by default
|
||||
|
||||
var ttl = args.ttl;
|
||||
var lruOpts = {
|
||||
max: args.max || 500,
|
||||
maxAge: (ttl || ttl === 0) ? ttl * 1000 : null,
|
||||
dispose: args.dispose,
|
||||
length: args.length,
|
||||
stale: args.stale,
|
||||
updateAgeOnGet: args.updateAgeOnGet || false
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
var lruCache = new Lru(lruOpts);
|
||||
|
||||
// @ts-ignore
|
||||
var setMultipleKeys = function setMultipleKeys(keysValues, maxAge) {
|
||||
var length = keysValues.length;
|
||||
var values = [];
|
||||
for (var i = 0; i < length; i += 2) {
|
||||
lruCache.set(keysValues[i], keysValues[i + 1], maxAge);
|
||||
values.push(keysValues[i + 1]);
|
||||
}
|
||||
return values;
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
self.set = function(key, value, options, cb) {
|
||||
if (self.shouldCloneBeforeSet) {
|
||||
value = clone(value);
|
||||
}
|
||||
|
||||
if (typeof options === 'function') {
|
||||
cb = options;
|
||||
options = {};
|
||||
}
|
||||
options = options || {};
|
||||
|
||||
var maxAge = (options.ttl || options.ttl === 0) ? options.ttl * 1000 : lruOpts.maxAge;
|
||||
|
||||
// @ts-ignore
|
||||
lruCache.set(key, value, maxAge);
|
||||
if (cb) {
|
||||
process.nextTick(cb.bind(null, null));
|
||||
} else if (self.usePromises) {
|
||||
return Promise.resolve(value);
|
||||
}
|
||||
};
|
||||
|
||||
self.mset = function() {
|
||||
var args = Array.prototype.slice.apply(arguments);
|
||||
var cb;
|
||||
var options = {};
|
||||
|
||||
if (typeof args[args.length - 1] === 'function') {
|
||||
cb = args.pop();
|
||||
}
|
||||
|
||||
if (args.length % 2 > 0 && isObject(args[args.length - 1])) {
|
||||
options = args.pop();
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
var maxAge = (options.ttl || options.ttl === 0) ? options.ttl * 1000 : lruOpts.maxAge;
|
||||
|
||||
var values = setMultipleKeys(args, maxAge);
|
||||
|
||||
if (cb) {
|
||||
process.nextTick(cb.bind(null, null));
|
||||
} else if (self.usePromises) {
|
||||
return Promise.resolve(values);
|
||||
}
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
self.get = function(key, options, cb) {
|
||||
if (typeof options === 'function') {
|
||||
cb = options;
|
||||
}
|
||||
var value = lruCache.get(key);
|
||||
|
||||
if (cb) {
|
||||
process.nextTick(cb.bind(null, null, value));
|
||||
} else if (self.usePromises) {
|
||||
return Promise.resolve(value);
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
};
|
||||
|
||||
self.mget = function() {
|
||||
var args = Array.prototype.slice.apply(arguments);
|
||||
var cb;
|
||||
var options = {};
|
||||
|
||||
if (typeof args[args.length - 1] === 'function') {
|
||||
cb = args.pop();
|
||||
}
|
||||
|
||||
if (isObject(args[args.length - 1])) {
|
||||
options = args.pop();
|
||||
}
|
||||
|
||||
var values = args.map(function(key) {
|
||||
return lruCache.get(key);
|
||||
});
|
||||
|
||||
if (cb) {
|
||||
process.nextTick(cb.bind(null, null, values));
|
||||
} else if (self.usePromises) {
|
||||
return Promise.resolve(values);
|
||||
} else {
|
||||
return values;
|
||||
}
|
||||
};
|
||||
|
||||
self.del = function() {
|
||||
var args = Array.prototype.slice.apply(arguments);
|
||||
var cb;
|
||||
var options = {};
|
||||
|
||||
if (typeof args[args.length - 1] === 'function') {
|
||||
cb = args.pop();
|
||||
}
|
||||
|
||||
if (isObject(args[args.length - 1])) {
|
||||
options = args.pop();
|
||||
}
|
||||
|
||||
if (Array.isArray(args[0])) {
|
||||
args = args[0];
|
||||
}
|
||||
|
||||
args.forEach(function(key) {
|
||||
lruCache.del(key);
|
||||
});
|
||||
|
||||
if (cb) {
|
||||
process.nextTick(cb.bind(null, null));
|
||||
} else if (self.usePromises) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
self.reset = function(cb) {
|
||||
lruCache.reset();
|
||||
if (cb) {
|
||||
process.nextTick(cb.bind(null, null));
|
||||
} else if (self.usePromises) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
self.keys = function(cb) {
|
||||
var keys = lruCache.keys();
|
||||
if (cb) {
|
||||
process.nextTick(cb.bind(null, null, keys));
|
||||
} else if (self.usePromises) {
|
||||
return Promise.resolve(keys);
|
||||
} else {
|
||||
return keys;
|
||||
}
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
self.prune = function(cb) {
|
||||
lruCache.prune();
|
||||
if (cb) {
|
||||
process.nextTick(cb.bind(null, null));
|
||||
} else if (self.usePromises) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
}
|
||||
|
||||
return self;
|
||||
};
|
||||
|
||||
export const create = (args: any) => memoryStore(args);
|
||||
1113
src/Web/Client/index.ts
Normal file
1113
src/Web/Client/index.ts
Normal file
File diff suppressed because it is too large
Load Diff
108
src/Web/Common/defaults.ts
Normal file
108
src/Web/Common/defaults.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import {BotStats, BotStatusResponse, SubredditDataResponse} from "./interfaces";
|
||||
import {ManagerStats, RunningState} from "../../Subreddit/Manager";
|
||||
import {Invokee, RunState} from "../../Common/interfaces";
|
||||
import {cacheStats} from "../../util";
|
||||
|
||||
const managerStats: ManagerStats = {
|
||||
actionsRun: 0,
|
||||
actionsRunSinceStart: 0,
|
||||
actionsRunSinceStartTotal: 0,
|
||||
actionsRunTotal: 0,
|
||||
cache: {
|
||||
currentKeyCount: 0,
|
||||
isShared: false,
|
||||
missPercent: "-",
|
||||
provider: "-",
|
||||
requestRate: 0,
|
||||
totalMiss: 0,
|
||||
totalRequests: 0,
|
||||
types: cacheStats()
|
||||
},
|
||||
checksRunSinceStartTotal: 0,
|
||||
checksRunTotal: 0,
|
||||
checksTriggered: 0,
|
||||
checksTriggeredSinceStart: 0,
|
||||
checksTriggeredSinceStartTotal: 0,
|
||||
checksTriggeredTotal: 0,
|
||||
eventsAvg: 0,
|
||||
eventsCheckedSinceStartTotal: 0,
|
||||
eventsCheckedTotal: 0,
|
||||
rulesAvg: 0,
|
||||
rulesCachedSinceStartTotal: 0,
|
||||
rulesCachedTotal: 0,
|
||||
rulesRunSinceStartTotal: 0,
|
||||
rulesRunTotal: 0,
|
||||
rulesTriggeredSinceStartTotal: 0,
|
||||
rulesTriggeredTotal: 0,
|
||||
};
|
||||
const botStats: BotStats = {
|
||||
apiAvg: '-',
|
||||
apiDepletion: "-",
|
||||
apiLimit: 0,
|
||||
limitReset: '-',
|
||||
limitResetHuman: "-",
|
||||
nannyMode: "-",
|
||||
nextHeartbeat: "-",
|
||||
nextHeartbeatHuman: "-",
|
||||
startedAtHuman: "-"
|
||||
};
|
||||
|
||||
const runningState: RunningState = {
|
||||
causedBy: '-' as Invokee,
|
||||
state: '-' as RunState,
|
||||
}
|
||||
|
||||
const sub: SubredditDataResponse = {
|
||||
botState: runningState,
|
||||
checks: {comments: 0, submissions: 0},
|
||||
delayBy: "-",
|
||||
dryRun: false,
|
||||
eventsState: runningState,
|
||||
globalMaxWorkers: 0,
|
||||
hardLimit: 0,
|
||||
heartbeat: 0,
|
||||
heartbeatHuman: "-",
|
||||
indicator: "-",
|
||||
logs: [],
|
||||
maxWorkers: 0,
|
||||
name: "-",
|
||||
pollingInfo: [],
|
||||
queueState: runningState,
|
||||
queuedActivities: 0,
|
||||
runningActivities: 0,
|
||||
softLimit: 0,
|
||||
startedAt: "-",
|
||||
startedAtHuman: "-",
|
||||
stats: managerStats,
|
||||
subMaxWorkers: 0,
|
||||
validConfig: false,
|
||||
wikiHref: "-",
|
||||
wikiLastCheck: "-",
|
||||
wikiLastCheckHuman: "-",
|
||||
wikiLocation: "-",
|
||||
wikiRevision: "-",
|
||||
wikiRevisionHuman: "-"
|
||||
};
|
||||
|
||||
export const defaultBotStatus = (subreddits: string[] = []) => {
|
||||
|
||||
const subs: SubredditDataResponse[] = [
|
||||
{
|
||||
...sub,
|
||||
name: 'All',
|
||||
},
|
||||
...subreddits.map(x => ({...sub, name: x}))
|
||||
];
|
||||
|
||||
const data: BotStatusResponse = {
|
||||
subreddits: subs,
|
||||
system: {
|
||||
startedAt: '-',
|
||||
running: false,
|
||||
account: '-',
|
||||
name: '-',
|
||||
...botStats,
|
||||
}
|
||||
};
|
||||
return data;
|
||||
}
|
||||
59
src/Web/Common/interfaces.ts
Normal file
59
src/Web/Common/interfaces.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import {ManagerStats, RunningState} from "../../Subreddit/Manager";
|
||||
|
||||
export interface BotStats {
|
||||
startedAtHuman: string,
|
||||
nextHeartbeat: string,
|
||||
nextHeartbeatHuman: string,
|
||||
apiLimit: number,
|
||||
apiAvg: string | number,
|
||||
nannyMode: string,
|
||||
apiDepletion: string,
|
||||
limitReset: string | number,
|
||||
limitResetHuman: string
|
||||
}
|
||||
|
||||
export interface SubredditDataResponse {
|
||||
name: string
|
||||
logs: string[]
|
||||
botState: RunningState
|
||||
eventsState: RunningState
|
||||
queueState: RunningState
|
||||
indicator: string
|
||||
queuedActivities: number
|
||||
runningActivities: number
|
||||
maxWorkers: number
|
||||
subMaxWorkers: number
|
||||
globalMaxWorkers: number
|
||||
validConfig: string | boolean
|
||||
dryRun: string | boolean
|
||||
pollingInfo: string[]
|
||||
checks: {
|
||||
submissions: number
|
||||
comments: number
|
||||
}
|
||||
wikiLocation: string
|
||||
wikiHref: string
|
||||
wikiRevisionHuman: string
|
||||
wikiRevision: string
|
||||
wikiLastCheckHuman: string
|
||||
wikiLastCheck: string
|
||||
stats: ManagerStats
|
||||
startedAt: string
|
||||
startedAtHuman: string
|
||||
delayBy: string
|
||||
softLimit?: number
|
||||
hardLimit?: number
|
||||
heartbeatHuman?: string
|
||||
heartbeat: number
|
||||
}
|
||||
|
||||
export interface BotStatusResponse {
|
||||
system: BotStats & {
|
||||
startedAt: string,
|
||||
name: string,
|
||||
running: boolean,
|
||||
error?: string,
|
||||
account: string,
|
||||
}
|
||||
subreddits: SubredditDataResponse[]
|
||||
}
|
||||
33
src/Web/Common/middleware.ts
Normal file
33
src/Web/Common/middleware.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import {Request, Response} from 'express';
|
||||
|
||||
export interface boolOptions {
|
||||
name: string,
|
||||
defaultVal: any
|
||||
}
|
||||
|
||||
export const booleanMiddle = (boolParams: (string | boolOptions)[] = []) => async (req: Request, res: Response, next: Function) => {
|
||||
for (const b of boolParams) {
|
||||
const opts = typeof b === 'string' ? {name: b, defaultVal: undefined} : b as boolOptions;
|
||||
|
||||
const bVal = req.query[opts.name] as any;
|
||||
if (bVal !== undefined) {
|
||||
let truthyVal: boolean;
|
||||
if (bVal === 'true' || bVal === true || bVal === 1 || bVal === '1') {
|
||||
truthyVal = true;
|
||||
} else if (bVal === 'false' || bVal === false || bVal === 0 || bVal === '0') {
|
||||
truthyVal = false;
|
||||
} else {
|
||||
res.status(400);
|
||||
return res.send(`Expected query parameter ${opts.name} to be a truthy value. Got "${bVal}" but must be one of these: true/false, 1/0`);
|
||||
}
|
||||
// @ts-ignore
|
||||
req.query[opts.name] = truthyVal;
|
||||
} else if (opts.defaultVal !== undefined) {
|
||||
req.query[opts.name] = opts.defaultVal;
|
||||
} else {
|
||||
res.status(400);
|
||||
return res.send(`Expected query parameter ${opts.name} to be a truthy value but it was missing. Must be one of these: true/false, 1/0`);
|
||||
}
|
||||
}
|
||||
next();
|
||||
}
|
||||
22
src/Web/Common/util.ts
Normal file
22
src/Web/Common/util.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import {App} from "../../App";
|
||||
import {BotStats} from "./interfaces";
|
||||
import dayjs from "dayjs";
|
||||
import {formatNumber} from "../../util";
|
||||
import Bot from "../../Bot";
|
||||
|
||||
export const opStats = (bot: Bot): BotStats => {
|
||||
const limitReset = dayjs(bot.client.ratelimitExpiration);
|
||||
const nextHeartbeat = bot.nextHeartbeat !== undefined ? bot.nextHeartbeat.local().format('MMMM D, YYYY h:mm A Z') : 'N/A';
|
||||
const nextHeartbeatHuman = bot.nextHeartbeat !== undefined ? `in ${dayjs.duration(bot.nextHeartbeat.diff(dayjs())).humanize()}` : 'N/A'
|
||||
return {
|
||||
startedAtHuman: `${dayjs.duration(dayjs().diff(bot.startedAt)).humanize()}`,
|
||||
nextHeartbeat,
|
||||
nextHeartbeatHuman,
|
||||
apiLimit: bot.client !== undefined ? bot.client.ratelimitRemaining : 0,
|
||||
apiAvg: formatNumber(bot.apiRollingAvg),
|
||||
nannyMode: bot.nannyMode || 'Off',
|
||||
apiDepletion: bot.apiEstDepletion === undefined ? 'Not Calculated' : bot.apiEstDepletion.humanize(),
|
||||
limitReset: limitReset.format(),
|
||||
limitResetHuman: `in ${dayjs.duration(limitReset.diff(dayjs())).humanize()}`,
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user