mirror of
https://github.com/FoxxMD/context-mod.git
synced 2026-01-14 07:57:57 -05:00
Compare commits
56 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d9ab81ab8c | ||
|
|
d8003e049c | ||
|
|
b67a933084 | ||
|
|
d684ecc0ff | ||
|
|
9efd4751d8 | ||
|
|
9331c2a3c8 | ||
|
|
d6f7ce2441 | ||
|
|
ffd7033faf | ||
|
|
df5825d8df | ||
|
|
42c6ca7af5 | ||
|
|
1e94835f97 | ||
|
|
6230ef707d | ||
|
|
b290a4696d | ||
|
|
4c965f7215 | ||
|
|
ce990094a1 | ||
|
|
4196d2acb0 | ||
|
|
3150da8b4a | ||
|
|
655c82d5e1 | ||
|
|
98691bd19c | ||
|
|
73302b718e | ||
|
|
bbf91ceac0 | ||
|
|
67b793c2aa | ||
|
|
d635e5a65d | ||
|
|
8dc140a953 | ||
|
|
eaa9f627e2 | ||
|
|
16cb28cb72 | ||
|
|
fa450f9f8f | ||
|
|
1e76ca6c0e | ||
|
|
25b7ea497f | ||
|
|
2f93774346 | ||
|
|
e2f6a92a90 | ||
|
|
0ffddaac9e | ||
|
|
4ddf6ddf26 | ||
|
|
7f61c190ea | ||
|
|
fbf328d90f | ||
|
|
36c6f7f1b8 | ||
|
|
0379ad17b9 | ||
|
|
02ed9f91f9 | ||
|
|
9fcc3db7b2 | ||
|
|
65ea84a69d | ||
|
|
a4b8d3a8ef | ||
|
|
2692a5fecb | ||
|
|
192c1659a0 | ||
|
|
cc241e41f4 | ||
|
|
4bfb57a6cf | ||
|
|
b6dedae7a1 | ||
|
|
f2783bd7a4 | ||
|
|
2efe41eadd | ||
|
|
ebd60b9abe | ||
|
|
2a16df49a4 | ||
|
|
0d6841259b | ||
|
|
8c0755c8c2 | ||
|
|
5e1da5bc5d | ||
|
|
242c6a49b5 | ||
|
|
8123c34463 | ||
|
|
aa399c160e |
@@ -8,6 +8,7 @@ coverage
|
||||
.idea
|
||||
*.bak
|
||||
*.sqlite
|
||||
*.sqlite*
|
||||
*.json
|
||||
*.json5
|
||||
*.yaml
|
||||
|
||||
@@ -120,6 +120,10 @@ ENV NPM_CONFIG_LOGLEVEL debug
|
||||
# can set database to use more performant better-sqlite3 since we control everything
|
||||
ENV DB_DRIVER=better-sqlite3
|
||||
|
||||
# NODE_ARGS are expanded after `node` command in the entrypoint IE "node {NODE_ARGS} src/index.js run"
|
||||
# by default enforce better memory mangement by limiting max long-lived GC space to 512MB
|
||||
ENV NODE_ARGS="--max_old_space_size=512"
|
||||
|
||||
ARG webPort=8085
|
||||
ENV PORT=$webPort
|
||||
EXPOSE $PORT
|
||||
|
||||
@@ -30,6 +30,7 @@ Feature Highlights for **Moderators:**
|
||||
* Event notification via Discord
|
||||
* [**Web interface**](#web-ui-and-screenshots) for monitoring, administration, and oauth bot authentication
|
||||
* [**Placeholders**](/docs/subreddit/actionTemplating.md) (like automoderator) can be configured via a wiki page or raw text and supports [mustache](https://mustache.github.io) templating
|
||||
* [**Partial Configurations**](/docs/subreddit/components/README.md#partial-configurations) -- offload parts of your configuration to shared locations to consolidate logic between multiple subreddits
|
||||
|
||||
Feature highlights for **Developers and Hosting (Operators):**
|
||||
|
||||
@@ -43,6 +44,7 @@ Feature highlights for **Developers and Hosting (Operators):**
|
||||
* Historical statistics
|
||||
* [Docker container support](/docs/operator/installation.md#docker-recommended)
|
||||
* Easy, UI-based [OAuth authentication](/docs/operator/addingBot.md) for adding Bots and moderator dashboard
|
||||
* Integration with [InfluxDB](https://www.influxdata.com) for detailed [time-series metrics](/docs/operator/database.md#influx) and a pre-built [Grafana](https://grafana.com) [dashboard](/docs/operator/database.md#grafana)
|
||||
|
||||
# Table of Contents
|
||||
|
||||
@@ -145,6 +147,13 @@ A built-in editor using [monaco-editor](https://microsoft.github.io/monaco-edito
|
||||
|
||||

|
||||
|
||||
### [Grafana Dashboard](/docs/operator/database.md#grafana)
|
||||
|
||||
* Overall stats (active bots/subreddits, api calls, per second/hour/minute activity ingest)
|
||||
* Over time graphs for events, per subreddit, and for individual rules/check/actions
|
||||
|
||||

|
||||
|
||||
## License
|
||||
|
||||
[MIT](/LICENSE)
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
# used https://github.com/linuxserver/docker-plex as a template
|
||||
|
||||
# NODE_ARGS can be passed by ENV in docker command like "docker run foxxmd/context-mod -e NODE_ARGS=--optimize_for_size"
|
||||
|
||||
exec \
|
||||
s6-setuidgid abc \
|
||||
/usr/local/bin/node /app/src/index.js run
|
||||
/usr/local/bin/node $NODE_ARGS /app/src/index.js run
|
||||
|
||||
BIN
docs/images/grafana.jpg
Normal file
BIN
docs/images/grafana.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 183 KiB |
@@ -130,3 +130,59 @@ retention: 2000
|
||||
runs:
|
||||
...
|
||||
```
|
||||
|
||||
# Influx
|
||||
|
||||
ContextMod supports writing detailed time-series data to [InfluxDB](https://www.influxdata.com/).
|
||||
|
||||
This data can be used to monitor the overall health, performance, and metrics for a ContextMod server. Currently, this data can **only be used by an Operator** as it requires access to the operator configuration and CM instance.
|
||||
|
||||
CM supports InfluxDB OSS > 2.3 or InfluxDB Cloud.
|
||||
|
||||
**Note:** This is an **advanced feature** and assumes you have enough technical knowledge to follow the documentation provided by each application to deploy and configure them. No support is guaranteed for installation, configuration, or use of Influx and Grafana.
|
||||
|
||||
## Supported Metrics
|
||||
|
||||
TBA
|
||||
|
||||
## Setup
|
||||
|
||||
### InfluxDB OSS
|
||||
|
||||
* Install [InfluxDB](https://docs.influxdata.com/influxdb/v2.3/install/)
|
||||
* [Configure InfluxDB using the UI](https://docs.influxdata.com/influxdb/v2.3/install/#set-up-influxdb-through-the-ui)
|
||||
* You will need **Username**, **Password**, **Organization Name**, and **Bucket Name** later for Grafana setup so make sure to record them somewhere
|
||||
* [Create a Token](https://docs.influxdata.com/influxdb/v2.3/security/tokens/create-token/) with enough permissions to write/read to the bucket you configured
|
||||
* After the token is created **view/copy the token** to clipboard by clicking the token name. You will need this for Grafana setup.
|
||||
|
||||
### ContextMod
|
||||
|
||||
Add the following block to the top-level of your operator configuration:
|
||||
|
||||
```yaml
|
||||
influxConfig:
|
||||
credentials:
|
||||
url: 'http://localhost:8086' # URL to your influx DB instance
|
||||
token: '9RtZ5YZ6bfEXAMPLENJsTSKg==' # token created in the previous step
|
||||
org: MyOrg # organization created in the previous step
|
||||
bucket: contextmod # name of the bucket created in the previous step
|
||||
```
|
||||
|
||||
## Grafana
|
||||
|
||||
A pre-built dashboard for [Grafana](https://grafana.com) can be imported to display overall metrics/stats using InfluxDB data.
|
||||
|
||||

|
||||
|
||||
* Create a new Data Source using **InfluxDB** type
|
||||
* Choose **Flux** for the **Query Language**
|
||||
* Fill in the details for **URL**, **Basic Auth Details** and **InfluxDB Details** using the data you created in the [Influx Setup step](#influxdb-oss)
|
||||
* Set **Min time interval** to `60s`
|
||||
* Click **Save and test**
|
||||
* Import Dashboard
|
||||
* **Browse** the Dashboard pane
|
||||
* Click **Import** and **upload** the [grafana dashboard json file](/docs/operator/grafana.json)
|
||||
* Chose the data source you created from the **InfluxDB CM** dropdown
|
||||
* Click **Import**
|
||||
|
||||
The dashboard can be filtered by **Bots** and **Subreddits** dropdowns at the top of the page to get more specific details.
|
||||
|
||||
3148
docs/operator/grafana.json
Normal file
3148
docs/operator/grafana.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -76,3 +76,21 @@ Be aware that Heroku's [free dyno plan](https://devcenter.heroku.com/articles/fr
|
||||
* The **Worker** dyno **will not** go to sleep but you will NOT be able to access the web interface. You can, however, still see how Cm is running by reading the logs for the dyno.
|
||||
|
||||
If you want to use a free dyno it is recommended you perform first-time setup (bot authentication and configuration, testing, etc...) with the **Web** dyno, then SWITCH to a **Worker** dyno so it can run 24/7.
|
||||
|
||||
# Memory Management
|
||||
|
||||
Node exhibits [lazy GC cleanup](https://github.com/FoxxMD/context-mod/issues/90#issuecomment-1190384006) which can result in memory usage for long-running CM instances increasing to unreasonable levels. This problem does not seem to be an issue with CM itself but with Node's GC approach. The increase does not affect CM's performance and, for systems with less memory, the Node *should* limit memory usage based on total available.
|
||||
|
||||
In practice CM uses ~130MB for a single bot, single subreddit setup. Up to ~350MB for many (10+) bots or many (20+) subreddits.
|
||||
|
||||
If you need to reign in CM's memory usage for some reason this can be addressed by setting an upper limit for memory usage with `node` args by using either:
|
||||
|
||||
**--max_old_space_size=**
|
||||
|
||||
Value is megabytes. This sets an explicit limit on GC memory usage.
|
||||
|
||||
This is set by default in the [Docker](#docker-recommended) container using the env `NODE_ARGS` to `--max_old_space_size=512`. It can be disabled by overriding the ENV.
|
||||
|
||||
**--optimize_for_size**
|
||||
|
||||
Tells Node to optimize for (less) memory usage rather than some performance optimizations. This option is not memory size dependent. In practice performance does not seem to be affected and it reduces (but not entirely prevents) memory increases over long periods.
|
||||
|
||||
@@ -2,44 +2,92 @@ Actions that can submit text (Report, Comment, UserNote) will have their `conten
|
||||
|
||||
See here for a [cheatsheet](https://gist.github.com/FoxxMD/d365707cf99fdb526a504b8b833a5b78) and [here](https://www.tsmean.com/articles/mustache/the-ultimate-mustache-tutorial/) for a more thorough tutorial.
|
||||
|
||||
# Template Data
|
||||
|
||||
## Activity Data
|
||||
|
||||
Activity data can be accessed using the `item` variable. Example
|
||||
|
||||
```
|
||||
This activity is a {{item.kind}} with {{item.votes}} votes, created {{item.age}} ago.
|
||||
```
|
||||
Produces:
|
||||
|
||||
> This activity is a submission with 10 votes created 5 minutes ago.
|
||||
|
||||
### Common
|
||||
|
||||
All Actions with `content` have access to this data:
|
||||
|
||||
```json5
|
||||
| Name | Description | Example |
|
||||
|-------------|-----------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------|
|
||||
| `kind` | The Activity type (submission or comment) | submission |
|
||||
| `author` | Name of the Author of the Activity being processed | FoxxMD |
|
||||
| `permalink` | URL to the Activity | https://reddit.com/r/mySuibreddit/comments/ab23f/my_post |
|
||||
| `votes` | Number of upvotes | 69 |
|
||||
| `age` | The age of the Activity in a [human friendly format](https://day.js.org/docs/en/durations/humanize) | 5 minutes |
|
||||
| `botLink` | A URL to CM's introduction thread | https://www.reddit.com/r/ContextModBot/comments/otz396/introduction_to_contextmodbot |
|
||||
|
||||
{
|
||||
item: {
|
||||
kind: 'string', // the type of item (comment/submission)
|
||||
author: 'string', // name of the item author (reddit user)
|
||||
permalink: 'string', // a url to the item
|
||||
url: 'string', // if the item is a Submission then its URL (external for link type submission, reddit link for self-posts)
|
||||
title: 'string', // if the item is a Submission, then the title of the Submission,
|
||||
botLink: 'string' // a link to the bot's FAQ
|
||||
},
|
||||
rules: {
|
||||
// contains all rules that were run and are accessible using the name, lowercased, with all spaces/dashes/underscores removed
|
||||
}
|
||||
}
|
||||
### Submissions
|
||||
|
||||
If the **Activity** is a Submission these additional properties are accessible:
|
||||
|
||||
| Name | Description | Example |
|
||||
|---------------|-----------------------------------------------------------------|-------------------------|
|
||||
| `upvoteRatio` | The upvote ratio | 100% |
|
||||
| `nsfw` | If the submission is marked as NSFW | true |
|
||||
| `spoiler` | If the submission is marked as a spoiler | true |
|
||||
| `url` | If the submission was a link then this is the URL for that link | http://example.com |
|
||||
| `title` | The title of the submission | Test post please ignore |
|
||||
|
||||
### Comments
|
||||
|
||||
If the **Activity** is a Comment these additional properties are accessible:
|
||||
|
||||
| Name | Description | Example |
|
||||
|------|--------------------------------------------------------------|---------|
|
||||
| `op` | If the Author is the OP of the Submission this comment is in | true |
|
||||
|
||||
### Moderator
|
||||
|
||||
If the **Activity** occurred in a Subreddit the Bot moderates these properties are accessible:
|
||||
|
||||
| Name | Description | Example |
|
||||
|---------------|-------------------------------------|---------|
|
||||
| `reports` | The number of reports recieved | 1 |
|
||||
| `modReports` | The number of reports by moderators | 1 |
|
||||
| `userReports` | The number of reports by users | 1 |
|
||||
|
||||
## Rule Data
|
||||
|
||||
### Summary
|
||||
|
||||
A summary of what rules were processed and which were triggered, with results, is available using the `ruleSummary` variable. Example:
|
||||
|
||||
```
|
||||
A summary of rules processed for this activity:
|
||||
|
||||
The properties of `rules` are accessible using the name, lower-cased, with all spaces/dashes/underscores. If no name is given `kind` is used as `name` Example:
|
||||
|
||||
{{ruleSummary}}
|
||||
```
|
||||
|
||||
"rules": [
|
||||
{
|
||||
"name": "My Custom-Recent Activity Rule", // mycustomrecentactivityrule
|
||||
"kind": "recentActivity"
|
||||
},
|
||||
{
|
||||
// name = repeatsubmission
|
||||
"kind": "repeatActivity",
|
||||
}
|
||||
]
|
||||
Would produce:
|
||||
> A summary of rules processed for this activity:
|
||||
>
|
||||
> * namedRegexRule - ✘
|
||||
> * nameAttributionRule - ✓ - 1 Attribution(s) met the threshold of < 20%, with 1 (3%) of 32 Total -- window: 6 months
|
||||
> * noXPost ✓ - ✓ 1 of 1 unique items repeated <= 3 times, largest repeat: 1
|
||||
|
||||
|
||||
### Individual
|
||||
|
||||
Individual **Rules** can be accessed using the name of the rule, **lower-cased, with all spaces/dashes/underscores.** Example:
|
||||
|
||||
```
|
||||
Submission was repeated {{rules.noxpost.largestRepeat}} times
|
||||
```
|
||||
Produces
|
||||
|
||||
**To see what data is available for individual Rules [consult the schema](#configuration) for each Rule.**
|
||||
> Submission was repeated 7 times
|
||||
|
||||
#### Quick Templating Tutorial
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ This list is not exhaustive. [For complete documentation on a subreddit's config
|
||||
* [Author](#author)
|
||||
* [Regex](#regex)
|
||||
* [Repost](#repost)
|
||||
* [Sentiment Analysis](#sentiment-analysis)
|
||||
* [Rule Sets](#rule-sets)
|
||||
* [Actions](#actions)
|
||||
* [Named Actions](#named-actions)
|
||||
@@ -66,6 +67,7 @@ This list is not exhaustive. [For complete documentation on a subreddit's config
|
||||
* [Check Order](#check-order)
|
||||
* [Rule Order](#rule-order)
|
||||
* [Configuration Re-use and Caching](#configuration-re-use-and-caching)
|
||||
* [Partial Configurations](#partial-configurations)
|
||||
* [Subreddit-ready examples](#subreddit-ready-examples)
|
||||
|
||||
# Runs
|
||||
@@ -368,6 +370,12 @@ The **Repost** rule is used to find reposts for both **Submissions** and **Comme
|
||||
|
||||
This rule is for searching **all of Reddit** for reposts, as opposed to just the history of the Author of the Activity being checked. If you only want to check for reposts by the Author of the Activity being checked you should use the [Repeat Activity](/docs/subreddit/components/repeatActivity) rule.
|
||||
|
||||
### Sentiment Analysis
|
||||
|
||||
[**Full Documentation**](/docs/subreddit/components/sentiment)
|
||||
|
||||
The **Sentiment Rule** is used to determine the overall emotional intent (negative, neutral, positive) of a Submission or Comment by analyzing the actual text content of the Activity.
|
||||
|
||||
# Rule Sets
|
||||
|
||||
The `rules` list on a `Check` can contain both `Rule` objects and `RuleSet` objects.
|
||||
@@ -656,6 +664,8 @@ actions:
|
||||
|
||||
### Mod Note
|
||||
|
||||
[**Full Documentation**](/docs/subreddit/components/modActions/README.md#mod-note-action)
|
||||
|
||||
Add a [Mod Note](https://www.reddit.com/r/modnews/comments/t8vafc/announcing_mod_notes/) for the Author of the Activity.
|
||||
|
||||
* `type` must be one of the [valid note labels](https://www.reddit.com/dev/api#POST_api_mod_notes):
|
||||
@@ -1064,7 +1074,7 @@ If the Check is using `AND` condition for its rules (default) then if either Rul
|
||||
|
||||
**It is therefore advantageous to list your lightweight Rules first in each Check.**
|
||||
|
||||
### Configuration Re-use and Caching
|
||||
## Configuration Re-use and Caching
|
||||
|
||||
ContextMod implements caching functionality for:
|
||||
|
||||
@@ -1088,6 +1098,116 @@ PROTIP: You can monitor the re-use of cache in the `Cache` section of your subre
|
||||
|
||||
[Learn more about how Caching works](/docs/operator/caching.md)
|
||||
|
||||
## Partial Configurations
|
||||
|
||||
ContextMod supports fetching parts of a configuration (a **Fragment**) from an external source. Fragments are an advanced feature and should only be used by users who are familiar with CM's configuration syntax and understand the risks/downsides associates with fragmenting a configuration.
|
||||
|
||||
**Fragments** are supported for:
|
||||
|
||||
* [Runs](#runs)
|
||||
* [Checks](#checks)
|
||||
* [Rules](#rules)
|
||||
* [Actions](#actions)
|
||||
|
||||
### Should You Use Partial Configurations?
|
||||
|
||||
* **PROS**
|
||||
* Consolidate shared configuration for many subreddits into one location
|
||||
* Shared configuration can be updated independently of subreddits
|
||||
* Allows sharing access to configuration outside of moderators of a specific subreddit or even reddit
|
||||
* **CONS**
|
||||
* Editor does not currently support viewing, editing, or updating Fragments. Only the Fragment URL is visible in a Subreddit's configuration
|
||||
* No editor support for viewing obscures "complete view" of configuration and makes editor less useful for validation
|
||||
* Currently, editor cannot validate individual Fragments. They must be copy-pasted "in place" within a normal configuration.
|
||||
* Using external (non-wiki) sources means **you** are responsible for the security/access to the fragment
|
||||
|
||||
In general, Fragments should only be used to offload small, well-tested pieces of a configuration that can be shared between many subreddits. Examples:
|
||||
|
||||
* A regex Rule for spam links
|
||||
* A Recent Activity Rule for reporting users from freekarma subreddits
|
||||
|
||||
### Usage
|
||||
|
||||
A Fragment may be either a special string or a Fragment object. The fetched Fragment can be either an object or an array of objects of the type of Fragment being replaced.
|
||||
|
||||
**String**
|
||||
|
||||
If value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit
|
||||
|
||||
* EX `wiki:botconfig/myFragment` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/myFragment`
|
||||
|
||||
If the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page
|
||||
|
||||
* EX `wiki:myFragment/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/myFragment/test`
|
||||
|
||||
If the value starts with `url:` then the value is fetched as an external url and expects raw text returned
|
||||
|
||||
* EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`
|
||||
|
||||
**Object**
|
||||
|
||||
The object contains:
|
||||
|
||||
* `path` -- REQUIRED string following rules above
|
||||
* `ttl` -- OPTIONAL, number of seconds to cache the URL result. Defaults to `WikiTTL`
|
||||
|
||||
#### Examples
|
||||
|
||||
**Replacing A Rule with a URL Fragment**
|
||||
|
||||
```yaml
|
||||
runs:
|
||||
- checks:
|
||||
- name: Free Karma Alert
|
||||
description: Check if author has posted in 'freekarma' subreddits
|
||||
kind: submission
|
||||
rules:
|
||||
- 'url:https://gist.githubusercontent.com/FoxxMD/0e1ee1ab950ff4d1f0cd26172bae7f8f/raw/0ebfaca903e4a651827effac5775c8718fb6e1f2/fragmentRule.yaml'
|
||||
- name: badSub
|
||||
kind: recentActivity
|
||||
useSubmissionAsReference: false
|
||||
thresholds:
|
||||
# if the number of activities (sub/comment) found CUMULATIVELY in the subreddits listed is
|
||||
# equal to or greater than 1 then the rule is triggered
|
||||
- threshold: '>= 1'
|
||||
subreddits:
|
||||
- MyBadSubreddit
|
||||
window: 7 days
|
||||
actions:
|
||||
- kind: report
|
||||
content: 'uses freekarma subreddits and bad subreddits'
|
||||
```
|
||||
|
||||
**Replacing A Rule with a URL Fragment (Multiple)**
|
||||
|
||||
```yaml
|
||||
runs:
|
||||
- checks:
|
||||
- name: Free Karma Alert
|
||||
description: Check if author has posted in 'freekarma' subreddits
|
||||
kind: submission
|
||||
rules:
|
||||
- 'url:https://gist.githubusercontent.com/FoxxMD/0e1ee1ab950ff4d1f0cd26172bae7f8f/raw/0ebfaca903e4a651827effac5775c8718fb6e1f2/fragmentRuleArray.yaml'
|
||||
actions:
|
||||
- kind: report
|
||||
content: 'uses freekarma subreddits and bad subreddits'
|
||||
```
|
||||
|
||||
**Replacing A Rule with a Wiki Fragment**
|
||||
|
||||
```yaml
|
||||
runs:
|
||||
- checks:
|
||||
- name: Free Karma Alert
|
||||
description: Check if author has posted in 'freekarma' subreddits
|
||||
kind: submission
|
||||
rules:
|
||||
- 'wiki:freeKarmaFrag'
|
||||
actions:
|
||||
- kind: report
|
||||
content: 'uses freekarma subreddits'
|
||||
```
|
||||
|
||||
# Subreddit-Ready Examples
|
||||
|
||||
Refer to the [Subreddit-Ready Examples](/docs/subreddit/components/subredditReady) section to find ready-to-use configurations for common scenarios (spam, freekarma blocking, etc...). This is also a good place to familiarize yourself with what complete configurations look like.
|
||||
|
||||
183
docs/subreddit/components/sentiment/README.md
Normal file
183
docs/subreddit/components/sentiment/README.md
Normal file
@@ -0,0 +1,183 @@
|
||||
# Table of Contents
|
||||
|
||||
* [Overview](#overview)
|
||||
* [Pros And Cons](#pros-and-cons)
|
||||
* [Technical Overview](#technical-overview)
|
||||
* [Sentiment Values](#sentiment-values)
|
||||
* [Usage](#usage)
|
||||
* [Testing Sentiment Value](#testing-sentiment-value)
|
||||
* [Numerical](#numerical)
|
||||
* [Text](#text)
|
||||
* [Sentiment Rule](#sentiment-rule)
|
||||
* [Historical](#historical)
|
||||
* [Examples](#examples)
|
||||
|
||||
# Overview
|
||||
|
||||
[Sentiment Analysis](https://monkeylearn.com/sentiment-analysis/) (SA) is a form of [Natural Language Processing](https://monkeylearn.com/natural-language-processing/) (NLP) used to extract the overall [sentiment](https://www.merriam-webster.com/dictionary/sentiment) (emotional intent) from a piece of text. Simply, SA is used to determine how positive or negative the emotion of a sentence is.
|
||||
|
||||
Examples:
|
||||
|
||||
* "I love how curly your hair is" -- very positive
|
||||
* "The United States is over 200 years old" -- neutral
|
||||
* "Frankly, your face is disgusting and I would hate to meet you" -- very negative
|
||||
|
||||
SA can be a powerful signal for determining the intent of a user's comment/submission. However, it should not be the **only** tool as it comes with both strengths and weaknesses.
|
||||
|
||||
## Pros and Cons
|
||||
|
||||
Pros
|
||||
|
||||
* In terms of Reddit API usage, SA is **free**. It requires no API calls and is computationally trivial.
|
||||
* Extremely powerful signal for intent since it analyzes the actual text content of an activity
|
||||
* Requires almost no setup to use
|
||||
* Can be used as a substitute for regex/keyword matching when looking for hateful/toxic comments
|
||||
* English language comprehension is very thorough
|
||||
* Uses 3 independent algorithms to evaluate sentiment
|
||||
* Understands common english slang, internet slang, and emojis
|
||||
|
||||
Cons
|
||||
|
||||
* Language limited -- only supported for English (most thorough), French, German, and Spanish
|
||||
* Less accurate for small word count content (less than 4 words)
|
||||
* Does not understand sarcasm/jokes
|
||||
* Accuracy depends on use of common words
|
||||
* Accuracy depends on clear intent
|
||||
* Heavy nuance, obscure word choice, and hidden meanings are not understood
|
||||
|
||||
## Technical Overview
|
||||
|
||||
ContextMod attempts to identify the language of the content it is processing. Based on its confidence of the language it will use up to three different NLP libraries to extract sentiment:
|
||||
|
||||
* [NLP.js](https://github.com/axa-group/nlp.js/blob/master/docs/v3/sentiment-analysis.md) (english, french, german, and spanish)
|
||||
* [vaderSentiment-js](https://github.com/vaderSentiment/vaderSentiment-js/) (english only)
|
||||
* [wink-sentiment](https://github.com/winkjs/wink-sentiment) (english only)
|
||||
|
||||
The above libraries make use of these Sentiment Analysis algorithms:
|
||||
|
||||
* VADER https://github.com/cjhutto/vaderSentiment
|
||||
* AFINN http://corpustext.com/reference/sentiment_afinn.html
|
||||
* Senticon https://ieeexplore.ieee.org/document/8721408
|
||||
* Pattern https://github.com/clips/pattern
|
||||
* wink https://github.com/winkjs/wink-sentiment (modified AFINN with emojis)
|
||||
|
||||
Each library produces a normalized score: the sum of all the valence values for each recognized token in its lexicon, divided by the number of words/tokens.
|
||||
|
||||
ContextMod takes each normalized score and adjusts it to be between -1 and +1. It then adds finds the average of all normalized score to produce a final sentiment between -1 and +1.
|
||||
|
||||
# Sentiment Values
|
||||
|
||||
Each piece of content ContextMod analyses produces a score from -1 to +1 to represent the sentiment of that content
|
||||
|
||||
| Score | Sentiment |
|
||||
|-------|--------------------|
|
||||
| -1 | |
|
||||
| -0.6 | Extremely Negative |
|
||||
| -0.3 | Very Negative |
|
||||
| -0.1 | Negative |
|
||||
| 0 | Neutral |
|
||||
| 0.1 | Positive |
|
||||
| 0.3 | Very Positive |
|
||||
| 0.6 | Extremely Positive |
|
||||
| 1 | |
|
||||
|
||||
# Usage
|
||||
|
||||
## Testing Sentiment Value
|
||||
|
||||
Testing for sentiment in the Sentiment Rule is done using either a **text** or **numerical** comparison.
|
||||
|
||||
### Numerical
|
||||
|
||||
Similar to other numerical comparisons in CM -- use an equality operator and the number to test for:
|
||||
|
||||
* `> 0.1` -- sentiment is at least positive
|
||||
* `<= -0.1` -- sentiment is not negative
|
||||
|
||||
Testing for *only* neutral sentiment should be done use a text comparison (below).
|
||||
|
||||
### Text
|
||||
|
||||
Use any of the **Sentiment** text values from the above table to form a test:
|
||||
|
||||
* `is very positive`
|
||||
* `is neutral`
|
||||
* `is extremely negative`
|
||||
|
||||
You may also use the `not` operator:
|
||||
|
||||
* `is not negative`
|
||||
* `is not very negative`
|
||||
* `is not neutral`
|
||||
|
||||
## Sentiment Rule
|
||||
|
||||
An example rule that tests the current comment/submission to see if it has negative sentiment:
|
||||
|
||||
```yaml
|
||||
sentiment: 'is negative'
|
||||
```
|
||||
|
||||
It's very simple :)
|
||||
|
||||
### Historical
|
||||
|
||||
You may also test the Sentiment of Activities from the user's history. (Note: this may use an API call to get history)
|
||||
|
||||
```yaml
|
||||
sentiment: 'is negative'
|
||||
historical:
|
||||
window:
|
||||
count: 50
|
||||
mustMatchCurrent: true # optional, the initial activity being tested must test true ("is positive" must be true) before historical tests are run
|
||||
sentimentVal: 'is very negative' # optional, if the sentiment test to use for historical content is different than the initial test
|
||||
totalMatching: '> 3' # optional, a comparison for how many historical activities must match sentimentVal
|
||||
```
|
||||
|
||||
# Examples
|
||||
|
||||
#### Check with Rules for recent problem subreddit activity and negative sentiment in comment
|
||||
|
||||
```yaml
|
||||
name: Probably Toxic Comment
|
||||
kind: comment
|
||||
rules:
|
||||
- kind: recentActivity
|
||||
thresholds:
|
||||
- aProblemSubreddit
|
||||
- kind: sentiment
|
||||
name: negsentiment
|
||||
sentiment: 'is very negative'
|
||||
actions:
|
||||
- kind: report
|
||||
content: 'Sentiment of {{rules.negsentiment.averageScore}} {{rules.negsentiment.sentimentTest}}'
|
||||
```
|
||||
|
||||
#### Check with Rules for recent problem subreddit activity and negative sentiment in comment history from problem subreddits
|
||||
|
||||
```yaml
|
||||
name: Toxic Comment With History
|
||||
kind: comment
|
||||
rules:
|
||||
- kind: recentActivity
|
||||
thresholds:
|
||||
- aProblemSubreddit
|
||||
- aSecondProblemSubreddit
|
||||
- kind: sentiment
|
||||
sentiment: 'is very negative'
|
||||
historical:
|
||||
sentimentVal: 'is negative'
|
||||
mustMatchCurrent: true
|
||||
totalMatching: '> 1'
|
||||
window:
|
||||
count: 100
|
||||
filterOn:
|
||||
post:
|
||||
subreddits:
|
||||
include:
|
||||
- name:
|
||||
- aProblemSubreddit
|
||||
- aSecondProblemSubreddit
|
||||
actions:
|
||||
- kind: remove
|
||||
```
|
||||
@@ -6,6 +6,16 @@ Context Mod supports reading and writing [User Notes](https://www.reddit.com/r/t
|
||||
|
||||
[Click here for the Toolbox Quickstart Guide](https://www.reddit.com/r/toolbox/wiki/docs/quick_start)
|
||||
|
||||
Valid Note Types:
|
||||
|
||||
* `gooduser`
|
||||
* `spamwatch`
|
||||
* `spamwarn`
|
||||
* `abusewarn`
|
||||
* `ban`
|
||||
* `permban`
|
||||
* `botban`
|
||||
|
||||
## Filter
|
||||
|
||||
User Notes are an additional criteria on [AuthorCriteria](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) that can be used alongside other Author properties for both [filtering rules and in the AuthorRule.](/docs/subreddit/components/author/)
|
||||
|
||||
67
package-lock.json
generated
67
package-lock.json
generated
@@ -1,17 +1,19 @@
|
||||
{
|
||||
"name": "redditcontextbot",
|
||||
"version": "0.5.1",
|
||||
"version": "0.11.4",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "redditcontextbot",
|
||||
"version": "0.5.1",
|
||||
"version": "0.11.4",
|
||||
"hasInstallScript": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@awaitjs/express": "^0.8.0",
|
||||
"@googleapis/youtube": "^2.0.0",
|
||||
"@influxdata/influxdb-client": "^1.27.0",
|
||||
"@influxdata/influxdb-client-apis": "^1.27.0",
|
||||
"@nlpjs/core": "^4.23.4",
|
||||
"@nlpjs/lang-de": "^4.23.4",
|
||||
"@nlpjs/lang-en": "^4.23.4",
|
||||
@@ -29,7 +31,7 @@
|
||||
"cache-manager-redis-store": "^2.0.0",
|
||||
"commander": "^8.0.0",
|
||||
"comment-json": "^4.1.1",
|
||||
"connect-typeorm": "github:FoxxMD/connect-typeorm#typeormBump",
|
||||
"connect-typeorm": "^2.0.0",
|
||||
"cookie-parser": "^1.3.5",
|
||||
"dayjs": "^1.10.5",
|
||||
"deepmerge": "^4.2.2",
|
||||
@@ -73,7 +75,7 @@
|
||||
"string-similarity": "^4.0.4",
|
||||
"tcp-port-used": "^1.0.2",
|
||||
"triple-beam": "^1.3.0",
|
||||
"typeorm": "^0.3.4",
|
||||
"typeorm": "^0.3.7",
|
||||
"typeorm-logger-adaptor": "^1.1.0",
|
||||
"vader-sentiment": "^1.1.3",
|
||||
"webhook-discord": "^3.7.7",
|
||||
@@ -664,6 +666,19 @@
|
||||
"node": ">=10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@influxdata/influxdb-client": {
|
||||
"version": "1.27.0",
|
||||
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client/-/influxdb-client-1.27.0.tgz",
|
||||
"integrity": "sha512-hOBi+ApIurDd8jFWo+eYjMWWsDRp3wih/U/NOVRoHaTOE8ihSQthi9wfMD4YeVqt4pCN6ygIwo7lEKFXwNuwcA=="
|
||||
},
|
||||
"node_modules/@influxdata/influxdb-client-apis": {
|
||||
"version": "1.27.0",
|
||||
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client-apis/-/influxdb-client-apis-1.27.0.tgz",
|
||||
"integrity": "sha512-a4gd7CwNRXSsSVt9tm8GzGxuPXngEmQucMdoTZ0YYeWSbKUXz3B/3u9/EqMGEbtq5MdbbB2OKA611hu205UiNg==",
|
||||
"peerDependencies": {
|
||||
"@influxdata/influxdb-client": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@istanbuljs/load-nyc-config": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz",
|
||||
@@ -2786,9 +2801,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/connect-typeorm": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "git+ssh://git@github.com/FoxxMD/connect-typeorm.git#a2f0a7225a1c218db0b919142076ce0774c8caa6",
|
||||
"license": "MIT",
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/connect-typeorm/-/connect-typeorm-2.0.0.tgz",
|
||||
"integrity": "sha512-0OcbHJkNMTJjSrbcKGljr4PKgRq13Dds7zQq3+8oaf4syQTgGvGv9OgnXo2qg+Bljkh4aJNzIvW74QOVLn8zrw==",
|
||||
"dependencies": {
|
||||
"@types/debug": "0.0.31",
|
||||
"@types/express-session": "^1.15.5",
|
||||
@@ -2796,7 +2811,7 @@
|
||||
"express-session": "^1.15.6"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typeorm": "^0.3.4"
|
||||
"typeorm": "^0.3.0"
|
||||
}
|
||||
},
|
||||
"node_modules/connect-typeorm/node_modules/debug": {
|
||||
@@ -9300,9 +9315,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/typeorm": {
|
||||
"version": "0.3.6",
|
||||
"resolved": "https://registry.npmjs.org/typeorm/-/typeorm-0.3.6.tgz",
|
||||
"integrity": "sha512-DRqgfqcelMiGgWSMbBmVoJNFN2nPNA3EeY2gC324ndr2DZoGRTb9ILtp2oGVGnlA+cu5zgQ6it5oqKFNkte7Aw==",
|
||||
"version": "0.3.7",
|
||||
"resolved": "https://registry.npmjs.org/typeorm/-/typeorm-0.3.7.tgz",
|
||||
"integrity": "sha512-MsPJeP6Zuwfe64c++l80+VRqpGEGxf0CkztIEnehQ+CMmQPSHjOnFbFxwBuZ2jiLqZTjLk2ZqQdVF0RmvxNF3Q==",
|
||||
"dependencies": {
|
||||
"@sqltools/formatter": "^1.2.2",
|
||||
"app-root-path": "^3.0.0",
|
||||
@@ -9335,12 +9350,12 @@
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@google-cloud/spanner": "^5.18.0",
|
||||
"@sap/hana-client": "^2.11.14",
|
||||
"@sap/hana-client": "^2.12.25",
|
||||
"better-sqlite3": "^7.1.2",
|
||||
"hdb-pool": "^0.1.6",
|
||||
"ioredis": "^4.28.3",
|
||||
"ioredis": "^5.0.4",
|
||||
"mongodb": "^3.6.0",
|
||||
"mssql": "^6.3.1",
|
||||
"mssql": "^7.3.0",
|
||||
"mysql2": "^2.2.5",
|
||||
"oracledb": "^5.1.0",
|
||||
"pg": "^8.5.1",
|
||||
@@ -9348,7 +9363,7 @@
|
||||
"pg-query-stream": "^4.0.0",
|
||||
"redis": "^3.1.1 || ^4.0.0",
|
||||
"sql.js": "^1.4.0",
|
||||
"sqlite3": "^5.0.2",
|
||||
"sqlite3": "^5.0.3",
|
||||
"ts-node": "^10.7.0",
|
||||
"typeorm-aurora-data-api-driver": "^2.0.0"
|
||||
},
|
||||
@@ -10796,6 +10811,17 @@
|
||||
"googleapis-common": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"@influxdata/influxdb-client": {
|
||||
"version": "1.27.0",
|
||||
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client/-/influxdb-client-1.27.0.tgz",
|
||||
"integrity": "sha512-hOBi+ApIurDd8jFWo+eYjMWWsDRp3wih/U/NOVRoHaTOE8ihSQthi9wfMD4YeVqt4pCN6ygIwo7lEKFXwNuwcA=="
|
||||
},
|
||||
"@influxdata/influxdb-client-apis": {
|
||||
"version": "1.27.0",
|
||||
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client-apis/-/influxdb-client-apis-1.27.0.tgz",
|
||||
"integrity": "sha512-a4gd7CwNRXSsSVt9tm8GzGxuPXngEmQucMdoTZ0YYeWSbKUXz3B/3u9/EqMGEbtq5MdbbB2OKA611hu205UiNg==",
|
||||
"requires": {}
|
||||
},
|
||||
"@istanbuljs/load-nyc-config": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz",
|
||||
@@ -12549,8 +12575,9 @@
|
||||
}
|
||||
},
|
||||
"connect-typeorm": {
|
||||
"version": "git+ssh://git@github.com/FoxxMD/connect-typeorm.git#a2f0a7225a1c218db0b919142076ce0774c8caa6",
|
||||
"from": "connect-typeorm@github:FoxxMD/connect-typeorm#typeormBump",
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/connect-typeorm/-/connect-typeorm-2.0.0.tgz",
|
||||
"integrity": "sha512-0OcbHJkNMTJjSrbcKGljr4PKgRq13Dds7zQq3+8oaf4syQTgGvGv9OgnXo2qg+Bljkh4aJNzIvW74QOVLn8zrw==",
|
||||
"requires": {
|
||||
"@types/debug": "0.0.31",
|
||||
"@types/express-session": "^1.15.5",
|
||||
@@ -17557,9 +17584,9 @@
|
||||
}
|
||||
},
|
||||
"typeorm": {
|
||||
"version": "0.3.6",
|
||||
"resolved": "https://registry.npmjs.org/typeorm/-/typeorm-0.3.6.tgz",
|
||||
"integrity": "sha512-DRqgfqcelMiGgWSMbBmVoJNFN2nPNA3EeY2gC324ndr2DZoGRTb9ILtp2oGVGnlA+cu5zgQ6it5oqKFNkte7Aw==",
|
||||
"version": "0.3.7",
|
||||
"resolved": "https://registry.npmjs.org/typeorm/-/typeorm-0.3.7.tgz",
|
||||
"integrity": "sha512-MsPJeP6Zuwfe64c++l80+VRqpGEGxf0CkztIEnehQ+CMmQPSHjOnFbFxwBuZ2jiLqZTjLk2ZqQdVF0RmvxNF3Q==",
|
||||
"requires": {
|
||||
"@sqltools/formatter": "^1.2.2",
|
||||
"app-root-path": "^3.0.0",
|
||||
|
||||
22
package.json
22
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "redditcontextbot",
|
||||
"version": "0.5.1",
|
||||
"version": "0.11.4",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
@@ -8,13 +8,15 @@
|
||||
"build": "tsc && npm run bundle-front",
|
||||
"bundle-front": "browserify src/Web/assets/browser.js | terser --compress --mangle > src/Web/assets/public/browserBundle.js",
|
||||
"start": "node src/index.js run",
|
||||
"schema": "npm run -s schema-app & npm run -s schema-ruleset & npm run -s schema-rule & npm run -s schema-action & npm run -s schema-config",
|
||||
"schema-app": "typescript-json-schema tsconfig.json JSONConfig --out src/Schema/App.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
|
||||
"schema-ruleset": "typescript-json-schema tsconfig.json RuleSetJson --out src/Schema/RuleSet.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
|
||||
"schema-rule": "typescript-json-schema tsconfig.json RuleJson --out src/Schema/Rule.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
|
||||
"schema-action": "typescript-json-schema tsconfig.json ActionJson --out src/Schema/Action.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
|
||||
"schema": "npm run -s schema-app & npm run -s schema-ruleset & npm run -s schema-rule & npm run -s schema-action & npm run -s schema-check & npm run -s schema-run & npm run -s schema-config",
|
||||
"schema-app": "typescript-json-schema tsconfig.json SubredditConfigData --out src/Schema/App.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
|
||||
"schema-ruleset": "typescript-json-schema tsconfig.json RuleSetConfigData --out src/Schema/RuleSet.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
|
||||
"schema-rule": "typescript-json-schema tsconfig.json RuleConfigData --out src/Schema/Rule.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
|
||||
"schema-check": "typescript-json-schema tsconfig.json ActivityCheckConfigValue --out src/Schema/Check.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
|
||||
"schema-run": "typescript-json-schema tsconfig.json RunConfigValue --out src/Schema/Run.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
|
||||
"schema-action": "typescript-json-schema tsconfig.json ActionConfigData --out src/Schema/Action.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
|
||||
"schema-config": "typescript-json-schema tsconfig.json OperatorJsonConfig --out src/Schema/OperatorConfig.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
|
||||
"schemaNotWorking": "./node_modules/.bin/ts-json-schema-generator -f tsconfig.json -p src/JsonConfig.ts -t JSONConfig --out src/Schema/vegaSchema.json",
|
||||
"schemaNotWorking": "./node_modules/.bin/ts-json-schema-generator -f tsconfig.json -p src/SubredditConfigData.ts -t JSONConfig --out src/Schema/vegaSchema.json",
|
||||
"circular": "madge --circular --extensions ts src/index.ts",
|
||||
"circular-graph": "madge --image graph.svg --circular --extensions ts src/index.ts",
|
||||
"postinstall": "patch-package",
|
||||
@@ -30,6 +32,8 @@
|
||||
"dependencies": {
|
||||
"@awaitjs/express": "^0.8.0",
|
||||
"@googleapis/youtube": "^2.0.0",
|
||||
"@influxdata/influxdb-client": "^1.27.0",
|
||||
"@influxdata/influxdb-client-apis": "^1.27.0",
|
||||
"@nlpjs/core": "^4.23.4",
|
||||
"@nlpjs/lang-de": "^4.23.4",
|
||||
"@nlpjs/lang-en": "^4.23.4",
|
||||
@@ -47,7 +51,7 @@
|
||||
"cache-manager-redis-store": "^2.0.0",
|
||||
"commander": "^8.0.0",
|
||||
"comment-json": "^4.1.1",
|
||||
"connect-typeorm": "github:FoxxMD/connect-typeorm#typeormBump",
|
||||
"connect-typeorm": "^2.0.0",
|
||||
"cookie-parser": "^1.3.5",
|
||||
"dayjs": "^1.10.5",
|
||||
"deepmerge": "^4.2.2",
|
||||
@@ -91,7 +95,7 @@
|
||||
"string-similarity": "^4.0.4",
|
||||
"tcp-port-used": "^1.0.2",
|
||||
"triple-beam": "^1.3.0",
|
||||
"typeorm": "^0.3.4",
|
||||
"typeorm": "^0.3.7",
|
||||
"typeorm-logger-adaptor": "^1.1.0",
|
||||
"vader-sentiment": "^1.1.3",
|
||||
"webhook-discord": "^3.7.7",
|
||||
|
||||
@@ -6,6 +6,10 @@ import {ActionProcessResult, Footer, RuleResult} from "../Common/interfaces";
|
||||
import {RuleResultEntity} from "../Common/Entities/RuleResultEntity";
|
||||
import {runCheckOptions} from "../Subreddit/Manager";
|
||||
import {ActionTypes} from "../Common/Infrastructure/Atomic";
|
||||
import {truncateStringToLength} from "../util";
|
||||
|
||||
const truncate = truncateStringToLength(100);
|
||||
const truncateLongMessage = truncateStringToLength(200);
|
||||
|
||||
export class BanAction extends Action {
|
||||
|
||||
@@ -37,15 +41,17 @@ export class BanAction extends Action {
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResultEntity[], options: runCheckOptions): Promise<ActionProcessResult> {
|
||||
const dryRun = this.getRuntimeAwareDryrun(options);
|
||||
const content = this.message === undefined ? undefined : await this.resources.getContent(this.message, item.subreddit);
|
||||
const renderedBody = content === undefined ? undefined : await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
const renderedBody = this.message === undefined ? undefined : await this.resources.renderContent(this.message, item, ruleResults);
|
||||
const renderedContent = renderedBody === undefined ? undefined : `${renderedBody}${await this.resources.generateFooter(item, this.footer)}`;
|
||||
|
||||
const renderedReason = this.reason === undefined ? undefined : truncate(await this.resources.renderContent(this.reason, item, ruleResults));
|
||||
const renderedNote = this.note === undefined ? undefined : truncate(await this.resources.renderContent(this.note, item, ruleResults));
|
||||
|
||||
const touchedEntities = [];
|
||||
let banPieces = [];
|
||||
banPieces.push(`Message: ${renderedContent === undefined ? 'None' : `${renderedContent.length > 100 ? `\r\n${renderedContent}` : renderedContent}`}`);
|
||||
banPieces.push(`Reason: ${this.reason || 'None'}`);
|
||||
banPieces.push(`Note: ${this.note || 'None'}`);
|
||||
banPieces.push(`Message: ${renderedContent === undefined ? 'None' : `${renderedContent.length > 100 ? `\r\n${truncateLongMessage(renderedContent)}` : renderedContent}`}`);
|
||||
banPieces.push(`Reason: ${renderedReason || 'None'}`);
|
||||
banPieces.push(`Note: ${renderedNote || 'None'}`);
|
||||
const durText = this.duration === undefined ? 'permanently' : `for ${this.duration} days`;
|
||||
this.logger.info(`Banning ${item.author.name} ${durText}${this.reason !== undefined ? ` (${this.reason})` : ''}`);
|
||||
this.logger.verbose(`\r\n${banPieces.join('\r\n')}`);
|
||||
@@ -56,8 +62,8 @@ export class BanAction extends Action {
|
||||
const bannedUser = await fetchedSub.banUser({
|
||||
name: fetchedName,
|
||||
banMessage: renderedContent === undefined ? undefined : renderedContent,
|
||||
banReason: this.reason,
|
||||
banNote: this.note,
|
||||
banReason: renderedReason,
|
||||
banNote: renderedNote,
|
||||
duration: this.duration
|
||||
});
|
||||
touchedEntities.push(bannedUser);
|
||||
@@ -65,7 +71,7 @@ export class BanAction extends Action {
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
result: `Banned ${item.author.name} ${durText}${this.reason !== undefined ? ` (${this.reason})` : ''}`,
|
||||
result: `Banned ${item.author.name} ${durText}${renderedReason !== undefined ? ` (${renderedReason})` : ''}`,
|
||||
touchedEntities
|
||||
};
|
||||
}
|
||||
@@ -97,8 +103,10 @@ export interface BanActionConfig extends ActionConfig, Footer {
|
||||
* */
|
||||
message?: string
|
||||
/**
|
||||
* Reason for ban.
|
||||
* @maxLength 100
|
||||
* Reason for ban. Can use Templating.
|
||||
*
|
||||
* If the length expands to more than 100 characters it will truncated with "..."
|
||||
*
|
||||
* @examples ["repeat spam"]
|
||||
* */
|
||||
reason?: string
|
||||
@@ -110,8 +118,10 @@ export interface BanActionConfig extends ActionConfig, Footer {
|
||||
* */
|
||||
duration?: number
|
||||
/**
|
||||
* A mod note for this ban
|
||||
* @maxLength 100
|
||||
* A mod note for this ban. Can use Templating.
|
||||
*
|
||||
* If the length expands to more than 100 characters it will truncated with "..."
|
||||
*
|
||||
* @examples ["Sock puppet for u/AnotherUser"]
|
||||
* */
|
||||
note?: string
|
||||
|
||||
@@ -50,8 +50,9 @@ export class MessageAction extends Action {
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResultEntity[], options: runCheckOptions): Promise<ActionProcessResult> {
|
||||
const dryRun = this.getRuntimeAwareDryrun(options);
|
||||
const content = await this.resources.getContent(this.content);
|
||||
const body = await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
|
||||
const body = await this.resources.renderContent(this.content, item, ruleResults);
|
||||
const subject = this.title === undefined ? `Concerning your ${isSubmission(item) ? 'Submission' : 'Comment'}` : await this.resources.renderContent(this.title, item, ruleResults);
|
||||
|
||||
const footer = await this.resources.generateFooter(item, this.footer);
|
||||
|
||||
@@ -80,7 +81,7 @@ export class MessageAction extends Action {
|
||||
text: renderedContent,
|
||||
// @ts-ignore
|
||||
fromSubreddit: this.asSubreddit ? await item.subreddit.fetch() : undefined,
|
||||
subject: this.title || `Concerning your ${isSubmission(item) ? 'Submission' : 'Comment'}`,
|
||||
subject: subject,
|
||||
};
|
||||
|
||||
const msgPreview = `\r\n
|
||||
|
||||
@@ -7,12 +7,12 @@ import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {ActionProcessResult, RuleResult} from "../Common/interfaces";
|
||||
import {RuleResultEntity} from "../Common/Entities/RuleResultEntity";
|
||||
import {runCheckOptions} from "../Subreddit/Manager";
|
||||
import {ActionTypes} from "../Common/Infrastructure/Atomic";
|
||||
import {ActionTypes, UserNoteType} from "../Common/Infrastructure/Atomic";
|
||||
|
||||
|
||||
export class UserNoteAction extends Action {
|
||||
content: string;
|
||||
type: string;
|
||||
type: UserNoteType;
|
||||
allowDuplicate: boolean;
|
||||
|
||||
constructor(options: UserNoteActionOptions) {
|
||||
|
||||
@@ -133,7 +133,7 @@ export class App {
|
||||
for (const b of this.bots) {
|
||||
if (b.error === undefined) {
|
||||
try {
|
||||
await b.testClient();
|
||||
await b.init();
|
||||
await b.buildManagers();
|
||||
await sleep(2000);
|
||||
b.runManagers(causedBy).catch((err) => {
|
||||
|
||||
312
src/Bot/index.ts
312
src/Bot/index.ts
@@ -13,12 +13,12 @@ import {
|
||||
USER
|
||||
} from "../Common/interfaces";
|
||||
import {
|
||||
createRetryHandler, difference,
|
||||
createRetryHandler, symmetricalDifference,
|
||||
formatNumber, getExceptionMessage, getUserAgent,
|
||||
mergeArr,
|
||||
parseBool,
|
||||
parseDuration, parseMatchMessage, parseRedditEntity,
|
||||
parseSubredditName, RetryOptions,
|
||||
parseSubredditName, partition, RetryOptions,
|
||||
sleep
|
||||
} from "../util";
|
||||
import {Manager} from "../Subreddit/Manager";
|
||||
@@ -41,6 +41,8 @@ import {ManagerRunState} from "../Common/Entities/EntityRunState/ManagerRunState
|
||||
import {Invokee, PollOn} from "../Common/Infrastructure/Atomic";
|
||||
import {FilterCriteriaDefaults} from "../Common/Infrastructure/Filters/FilterShapes";
|
||||
import {snooLogWrapper} from "../Utils/loggerFactory";
|
||||
import {InfluxClient} from "../Common/Influx/InfluxClient";
|
||||
import {Point} from "@influxdata/influxdb-client";
|
||||
|
||||
class Bot {
|
||||
|
||||
@@ -49,6 +51,7 @@ class Bot {
|
||||
logs: LogInfo[] = [];
|
||||
wikiLocation: string;
|
||||
dryRun?: true | undefined;
|
||||
inited: boolean = false;
|
||||
running: boolean = false;
|
||||
subreddits: string[];
|
||||
excludeSubreddits: string[];
|
||||
@@ -71,6 +74,7 @@ class Bot {
|
||||
botName?: string;
|
||||
botLink?: string;
|
||||
botAccount?: string;
|
||||
botUser?: RedditUser;
|
||||
maxWorkers: number;
|
||||
startedAt: Dayjs = dayjs();
|
||||
sharedStreams: PollOn[] = [];
|
||||
@@ -90,6 +94,8 @@ class Bot {
|
||||
|
||||
config: BotInstanceConfig;
|
||||
|
||||
influxClients: InfluxClient[] = [];
|
||||
|
||||
database: DataSource
|
||||
invokeeRepo: Repository<InvokeeType>;
|
||||
runTypeRepo: Repository<RunStateType>;
|
||||
@@ -177,8 +183,11 @@ class Bot {
|
||||
|
||||
this.logger.stream().on('log', (log: LogInfo) => {
|
||||
if(log.bot !== undefined && log.bot === this.getBotName() && log.subreddit === undefined) {
|
||||
const combinedLogs = [log, ...this.logs];
|
||||
this.logs = combinedLogs.slice(0, 301);
|
||||
this.logs.unshift(log);
|
||||
if(this.logs.length > 300) {
|
||||
// remove all elements starting from the 300th index (301st item)
|
||||
this.logs.splice(300);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -311,33 +320,22 @@ class Bot {
|
||||
}
|
||||
}
|
||||
|
||||
async testClient(initial = true) {
|
||||
try {
|
||||
// @ts-ignore
|
||||
await this.client.getMe();
|
||||
this.logger.info('Test API call successful');
|
||||
} catch (err: any) {
|
||||
if (initial) {
|
||||
this.logger.error('An error occurred while trying to initialize the Reddit API Client which would prevent the entire application from running.');
|
||||
}
|
||||
const hint = getExceptionMessage(err, {
|
||||
401: 'Likely a credential is missing or incorrect. Check clientId, clientSecret, refreshToken, and accessToken',
|
||||
400: 'Credentials may have been invalidated manually or by reddit due to behavior',
|
||||
});
|
||||
let msg = `Error occurred while testing Reddit API client${hint !== undefined ? `: ${hint}` : ''}`;
|
||||
this.error = msg;
|
||||
const clientError = new CMError(msg, {cause: err});
|
||||
clientError.logged = true;
|
||||
this.logger.error(clientError);
|
||||
throw clientError;
|
||||
}
|
||||
}
|
||||
async init() {
|
||||
|
||||
if(this.inited) {
|
||||
return;
|
||||
}
|
||||
|
||||
let user: RedditUser;
|
||||
try {
|
||||
user = await this.testClient();
|
||||
} catch(err: any) {
|
||||
this.logger.error('An error occurred while trying to initialize the Reddit API Client which would prevent the Bot from running.');
|
||||
throw err;
|
||||
}
|
||||
|
||||
async buildManagers(subreddits: string[] = []) {
|
||||
let availSubs = [];
|
||||
// @ts-ignore
|
||||
const user = await this.client.getMe().fetch();
|
||||
this.cacheManager.botName = user.name;
|
||||
this.botUser = user;
|
||||
this.botLink = `https://reddit.com/user/${user.name}`;
|
||||
this.botAccount = `u/${user.name}`;
|
||||
this.logger.info(`Reddit API Limit Remaining: ${this.client.ratelimitRemaining}`);
|
||||
@@ -364,35 +362,78 @@ class Bot {
|
||||
this.botEntity = b;
|
||||
}
|
||||
|
||||
if(this.config.opInflux !== undefined) {
|
||||
this.influxClients.push(this.config.opInflux.childClient(this.logger, {bot: user.name}));
|
||||
if(this.config.influxConfig !== undefined) {
|
||||
const iClient = new InfluxClient(this.config.influxConfig, this.logger, {bot: user.name});
|
||||
await iClient.isReady();
|
||||
this.influxClients.push(iClient);
|
||||
}
|
||||
}
|
||||
|
||||
this.inited = true;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
async testClient(initial = true) {
|
||||
try {
|
||||
// @ts-ignore
|
||||
const user = this.client.getMe().fetch();
|
||||
this.logger.info('Test API call successful');
|
||||
return user;
|
||||
} catch (err: any) {
|
||||
if (initial) {
|
||||
this.logger.error('An error occurred while trying to initialize the Reddit API Client which would prevent the entire application from running.');
|
||||
}
|
||||
const hint = getExceptionMessage(err, {
|
||||
401: 'Likely a credential is missing or incorrect. Check clientId, clientSecret, refreshToken, and accessToken',
|
||||
400: 'Credentials may have been invalidated manually or by reddit due to behavior',
|
||||
});
|
||||
let msg = `Error occurred while testing Reddit API client${hint !== undefined ? `: ${hint}` : ''}`;
|
||||
this.error = msg;
|
||||
const clientError = new CMError(msg, {cause: err});
|
||||
clientError.logged = true;
|
||||
this.logger.error(clientError);
|
||||
throw clientError;
|
||||
}
|
||||
}
|
||||
|
||||
async buildManagers(subreddits: string[] = []) {
|
||||
await this.init();
|
||||
|
||||
this.logger.verbose('Syncing subreddits to moderate with managers...');
|
||||
|
||||
let availSubs: Subreddit[] = [];
|
||||
|
||||
let subListing = await this.client.getModeratedSubreddits({count: 100});
|
||||
while(!subListing.isFinished) {
|
||||
subListing = await subListing.fetchMore({amount: 100});
|
||||
}
|
||||
availSubs = subListing.filter(x => x.display_name !== `u_${user.name}`);
|
||||
availSubs = subListing.filter(x => x.display_name !== `u_${this.botUser?.name}`);
|
||||
|
||||
this.logger.info(`u/${user.name} is a moderator of these subreddits: ${availSubs.map(x => x.display_name_prefixed).join(', ')}`);
|
||||
this.logger.verbose(`${this.botAccount} is a moderator of these subreddits: ${availSubs.map(x => x.display_name_prefixed).join(', ')}`);
|
||||
|
||||
let subsToRun: Subreddit[] = [];
|
||||
const subsToUse = subreddits.length > 0 ? subreddits.map(parseSubredditName) : this.subreddits;
|
||||
if (subsToUse.length > 0) {
|
||||
this.logger.info(`Operator-defined subreddit constraints detected (CLI argument or environmental variable), will try to run on: ${subsToUse.join(', ')}`);
|
||||
for (const sub of subsToUse) {
|
||||
this.logger.info(`Operator-specified subreddit constraints detected, will only use these: ${subsToUse.join(', ')}`);
|
||||
const availSubsCI = availSubs.map(x => x.display_name.toLowerCase());
|
||||
const [foundSubs, notFoundSubs] = partition(subsToUse, (aSub) => availSubsCI.includes(aSub.toLowerCase()));
|
||||
if(notFoundSubs.length > 0) {
|
||||
this.logger.warn(`Will not run some operator-specified subreddits because they are not modded by, or do not have appropriate mod permissions for, this bot: ${notFoundSubs.join(', ')}`);
|
||||
}
|
||||
|
||||
for (const sub of foundSubs) {
|
||||
const asub = availSubs.find(x => x.display_name.toLowerCase() === sub.toLowerCase())
|
||||
if (asub === undefined) {
|
||||
this.logger.warn(`Will not run on ${sub} because is not modded by, or does not have appropriate permissions to mod with, for this client.`);
|
||||
} else {
|
||||
// @ts-ignore
|
||||
const fetchedSub = await asub.fetch();
|
||||
subsToRun.push(fetchedSub);
|
||||
}
|
||||
subsToRun.push(asub as Subreddit);
|
||||
}
|
||||
} else {
|
||||
if(this.excludeSubreddits.length > 0) {
|
||||
this.logger.info(`Will run on all moderated subreddits but own profile and user-defined excluded: ${this.excludeSubreddits.join(', ')}`);
|
||||
this.logger.info(`Will run on all moderated subreddits EXCEPT own profile and operator-defined excluded: ${this.excludeSubreddits.join(', ')}`);
|
||||
const normalExcludes = this.excludeSubreddits.map(x => x.toLowerCase());
|
||||
subsToRun = availSubs.filter(x => !normalExcludes.includes(x.display_name.toLowerCase()));
|
||||
} else {
|
||||
this.logger.info(`No user-defined subreddit constraints detected, will run on all moderated subreddits EXCEPT own profile (${this.botAccount})`);
|
||||
this.logger.info(`No operator-defined subreddit constraints detected, will run on all moderated subreddits EXCEPT own profile (${this.botAccount})`);
|
||||
subsToRun = availSubs;
|
||||
}
|
||||
}
|
||||
@@ -415,30 +456,66 @@ class Bot {
|
||||
return acc;
|
||||
}
|
||||
}, []);
|
||||
const notMatched = difference(normalizedOverrideNames, subsToRunNames);
|
||||
const notMatched = symmetricalDifference(normalizedOverrideNames, subsToRunNames);
|
||||
if(notMatched.length > 0) {
|
||||
this.logger.warn(`There are overrides defined for subreddits the bot is not running. Check your spelling! Overrides not matched: ${notMatched.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
let subManagersChanged = false;
|
||||
|
||||
// get configs for subs we want to run on and build/validate them
|
||||
const subsToRunNames = subsToRun.map(x => x.display_name.toLowerCase());
|
||||
|
||||
// first stop and remove any managers with subreddits not in subsToRun
|
||||
// -- this covers scenario where bot is running and mods of a subreddit de-mod the bot
|
||||
// -- or where the include/exclude subs list changed from operator (not yet implemented)
|
||||
if(this.subManagers.length > 0) {
|
||||
let index = 0;
|
||||
for(const manager of this.subManagers) {
|
||||
if(!subsToRunNames.includes(manager.subreddit.display_name.toLowerCase())) {
|
||||
subManagersChanged = true;
|
||||
// determine if bot was de-modded
|
||||
const deModded = !availSubs.some(x => x.display_name.toLowerCase() === manager.subreddit.display_name.toLowerCase());
|
||||
this.logger.warn(`Stopping and removing manager for ${manager.subreddit.display_name.toLowerCase()} because it is ${deModded ? 'no longer moderated by this bot' : 'not in the list of subreddits to moderate'}`);
|
||||
await manager.destroy('system', {reason: deModded ? 'No longer moderated by this bot' : 'Subreddit is not in moderated list'});
|
||||
this.subManagers.splice(index, 1);
|
||||
}
|
||||
index++;
|
||||
}
|
||||
}
|
||||
|
||||
// then create any managers that don't already exist
|
||||
// -- covers init scenario
|
||||
// -- and in-situ adding subreddits IE bot is modded to a new subreddit while CM is running
|
||||
const subsToInit: string[] = [];
|
||||
for (const sub of subsToRun) {
|
||||
try {
|
||||
this.subManagers.push(await this.createManager(sub));
|
||||
} catch (err: any) {
|
||||
if(!this.subManagers.some(x => x.subreddit.display_name === sub.display_name)) {
|
||||
subManagersChanged = true;
|
||||
this.logger.info(`Manager for ${sub.display_name_prefixed} not found in existing managers. Creating now...`);
|
||||
subsToInit.push(sub.display_name);
|
||||
try {
|
||||
this.subManagers.push(await this.createManager(sub));
|
||||
} catch (err: any) {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
for(const m of this.subManagers) {
|
||||
for(const subName of subsToInit) {
|
||||
try {
|
||||
await this.initManager(m);
|
||||
const m = this.subManagers.find(x => x.subreddit.display_name === subName);
|
||||
await this.initManager(m as Manager);
|
||||
} catch (err: any) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
this.parseSharedStreams();
|
||||
if(!subManagersChanged) {
|
||||
this.logger.verbose('All managers were already synced!');
|
||||
} else {
|
||||
this.parseSharedStreams();
|
||||
}
|
||||
|
||||
return subManagersChanged;
|
||||
}
|
||||
|
||||
parseSharedStreams() {
|
||||
@@ -559,7 +636,7 @@ class Bot {
|
||||
}
|
||||
}
|
||||
|
||||
async createManager(sub: Subreddit): Promise<Manager> {
|
||||
async createManager(subVal: Subreddit): Promise<Manager> {
|
||||
const {
|
||||
flowControlDefaults: {
|
||||
maxGotoDepth: botMaxDefault
|
||||
@@ -570,6 +647,15 @@ class Bot {
|
||||
} = {}
|
||||
} = this.config;
|
||||
|
||||
let sub = subVal;
|
||||
// make sure the subreddit is fully fetched
|
||||
// @ts-ignore
|
||||
if(subVal._hasFetched === false) {
|
||||
// @ts-ignore
|
||||
sub = await subVal.fetch();
|
||||
}
|
||||
|
||||
|
||||
const override = overrides.find(x => {
|
||||
const configName = parseRedditEntity(x.name).name;
|
||||
if(configName !== undefined) {
|
||||
@@ -630,6 +716,7 @@ class Bot {
|
||||
managerEntity: managerEntity as ManagerEntity,
|
||||
statDefaults: (statDefaultsFromOverride ?? databaseStatisticsDefaults) as DatabaseStatisticsOperatorConfig,
|
||||
retention,
|
||||
influxClients: this.influxClients,
|
||||
});
|
||||
// all errors from managers will count towards bot-level retry count
|
||||
manager.on('error', async (err) => await this.panicOnRetries(err));
|
||||
@@ -669,21 +756,6 @@ class Bot {
|
||||
await this.client.getSubreddit(name).acceptModeratorInvite();
|
||||
this.logger.info(`Accepted moderator invite for r/${name}!`);
|
||||
await this.cacheManager.deletePendingSubredditInvite(name);
|
||||
// @ts-ignore
|
||||
const sub = await this.client.getSubreddit(name);
|
||||
this.logger.info(`Attempting to add manager for r/${name}`);
|
||||
try {
|
||||
const manager = await this.createManager(sub);
|
||||
this.logger.info(`Starting manager for r/${name}`);
|
||||
this.subManagers.push(manager);
|
||||
await this.initManager(manager);
|
||||
await manager.start('system', {reason: 'Caused by creation due to moderator invite'});
|
||||
await this.runSharedStreams();
|
||||
} catch (err: any) {
|
||||
if (!(err instanceof LoggedError)) {
|
||||
this.logger.error(err);
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
if (err.message.includes('NO_INVITE_FOUND')) {
|
||||
this.logger.warn(`No pending moderation invite for r/${name} was found`);
|
||||
@@ -742,9 +814,14 @@ class Bot {
|
||||
async healthLoop() {
|
||||
while (this.running) {
|
||||
await sleep(5000);
|
||||
const time = dayjs().valueOf()
|
||||
await this.apiHealthCheck(time);
|
||||
if (!this.running) {
|
||||
break;
|
||||
}
|
||||
for(const m of this.subManagers) {
|
||||
await m.writeHealthMetrics(time);
|
||||
}
|
||||
const now = dayjs();
|
||||
if (now.isSameOrAfter(this.nextNannyCheck)) {
|
||||
try {
|
||||
@@ -757,8 +834,17 @@ class Bot {
|
||||
}
|
||||
if(now.isSameOrAfter(this.nextHeartbeat)) {
|
||||
try {
|
||||
await this.heartbeat();
|
||||
|
||||
// run sanity check to see if there is a service issue
|
||||
try {
|
||||
await this.testClient(false);
|
||||
} catch (err: any) {
|
||||
throw new SimpleError(`Something isn't right! This could be a Reddit API issue (service is down? buggy??) or an issue with the Bot account. Will not run heartbeat operations and will wait until next heartbeat (${dayjs.duration(this.nextHeartbeat.diff(dayjs())).humanize()}) to try again`);
|
||||
}
|
||||
|
||||
await this.checkModInvites();
|
||||
await this.buildManagers();
|
||||
await this.heartbeat();
|
||||
} catch (err: any) {
|
||||
this.logger.error(`Error occurred during heartbeat check: ${err.message}`);
|
||||
}
|
||||
@@ -770,6 +856,60 @@ class Bot {
|
||||
this.emitter.emit('healthStopped');
|
||||
}
|
||||
|
||||
getApiUsageSummary() {
|
||||
const depletion = this.apiEstDepletion === undefined ? 'Not Calculated' : this.apiEstDepletion.humanize();
|
||||
return`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${depletion} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`;
|
||||
}
|
||||
|
||||
async apiHealthCheck(time?: number) {
|
||||
|
||||
const rollingSample = this.apiSample.slice(0, 7)
|
||||
rollingSample.unshift(this.client.ratelimitRemaining);
|
||||
this.apiSample = rollingSample;
|
||||
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
|
||||
if (this.apiSample[index + 1] !== undefined) {
|
||||
const d = Math.abs(curr - this.apiSample[index + 1]);
|
||||
if (d === 0) {
|
||||
return [...acc, 0];
|
||||
}
|
||||
return [...acc, d / 10];
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
const diffTotal = diff.reduce((acc, curr) => acc + curr, 0);
|
||||
if(diffTotal === 0 || diff.length === 0) {
|
||||
this.apiRollingAvg = 0;
|
||||
} else {
|
||||
this.apiRollingAvg = diffTotal / diff.length; // api requests per second
|
||||
}
|
||||
this.depletedInSecs = this.apiRollingAvg === 0 ? Number.POSITIVE_INFINITY : this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
|
||||
// if depletion/api usage is 0 we need a sane value to use here for both displaying in logs as well as for api nanny. 10 years seems reasonable
|
||||
this.apiEstDepletion = dayjs.duration((this.depletedInSecs === Number.POSITIVE_INFINITY ? {years: 10} : {seconds: this.depletedInSecs}));
|
||||
|
||||
if(this.influxClients.length > 0) {
|
||||
const apiMeasure = new Point('apiHealth')
|
||||
.intField('remaining', this.client.ratelimitRemaining)
|
||||
.stringField('nannyMod', this.nannyMode ?? 'none');
|
||||
|
||||
if(time !== undefined) {
|
||||
apiMeasure.timestamp(time);
|
||||
}
|
||||
|
||||
if(this.apiSample.length > 1) {
|
||||
const curr = this.apiSample[0];
|
||||
const last = this.apiSample[1];
|
||||
if(curr <= last) {
|
||||
apiMeasure.intField('used', last - curr);
|
||||
}
|
||||
}
|
||||
|
||||
for(const iclient of this.influxClients) {
|
||||
await iclient.writePoint(apiMeasure);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
async retentionCleanup() {
|
||||
const now = dayjs();
|
||||
if(now.isSameOrAfter(this.nextRetentionCheck)) {
|
||||
@@ -783,15 +923,8 @@ class Bot {
|
||||
}
|
||||
|
||||
async heartbeat() {
|
||||
const heartbeat = `HEARTBEAT -- API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ~${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion === undefined ? 'N/A' : this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`
|
||||
this.logger.info(heartbeat);
|
||||
this.logger.info(`HEARTBEAT -- ${this.getApiUsageSummary()}`);
|
||||
|
||||
// run sanity check to see if there is a service issue
|
||||
try {
|
||||
await this.testClient(false);
|
||||
} catch (err: any) {
|
||||
throw new SimpleError(`Something isn't right! This could be a Reddit API issue (service is down? buggy??) or an issue with the Bot account. Will not run heartbeat operations and will wait until next heartbeat (${dayjs.duration(this.nextHeartbeat.diff(dayjs())).humanize()}) to try again`);
|
||||
}
|
||||
let startedAny = false;
|
||||
|
||||
for (const s of this.subManagers) {
|
||||
@@ -844,6 +977,7 @@ class Bot {
|
||||
|
||||
async runApiNanny() {
|
||||
try {
|
||||
this.logger.debug(this.getApiUsageSummary());
|
||||
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
|
||||
const nowish = dayjs().add(10, 'second');
|
||||
if (nowish.isAfter(this.nextExpiration)) {
|
||||
@@ -867,30 +1001,12 @@ class Bot {
|
||||
}
|
||||
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
|
||||
}
|
||||
const rollingSample = this.apiSample.slice(0, 7)
|
||||
rollingSample.unshift(this.client.ratelimitRemaining);
|
||||
this.apiSample = rollingSample;
|
||||
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
|
||||
if (this.apiSample[index + 1] !== undefined) {
|
||||
const d = Math.abs(curr - this.apiSample[index + 1]);
|
||||
if (d === 0) {
|
||||
return [...acc, 0];
|
||||
}
|
||||
return [...acc, d / 10];
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
this.apiRollingAvg = diff.reduce((acc, curr) => acc + curr, 0) / diff.length; // api requests per second
|
||||
this.depletedInSecs = this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
|
||||
this.apiEstDepletion = dayjs.duration({seconds: this.depletedInSecs});
|
||||
this.logger.debug(`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`);
|
||||
|
||||
|
||||
let hardLimitHit = false;
|
||||
if (typeof this.hardLimit === 'string') {
|
||||
if (typeof this.hardLimit === 'string' && this.apiEstDepletion !== undefined) {
|
||||
const hardDur = parseDuration(this.hardLimit);
|
||||
hardLimitHit = hardDur.asSeconds() > this.apiEstDepletion.asSeconds();
|
||||
} else {
|
||||
} else if(typeof this.hardLimit === 'number') {
|
||||
hardLimitHit = this.hardLimit > this.client.ratelimitRemaining;
|
||||
}
|
||||
|
||||
@@ -899,7 +1015,6 @@ class Bot {
|
||||
return;
|
||||
}
|
||||
this.logger.info(`Detected HARD LIMIT of ${this.hardLimit} remaining`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${this.apiRollingAvg}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`All subreddit event polling has been paused`, {leaf: 'Api Nanny'});
|
||||
|
||||
for (const m of this.subManagers) {
|
||||
@@ -916,10 +1031,10 @@ class Bot {
|
||||
}
|
||||
|
||||
let softLimitHit = false;
|
||||
if (typeof this.softLimit === 'string') {
|
||||
if (typeof this.softLimit === 'string' && this.apiEstDepletion !== undefined) {
|
||||
const softDur = parseDuration(this.softLimit);
|
||||
softLimitHit = softDur.asSeconds() > this.apiEstDepletion.asSeconds();
|
||||
} else {
|
||||
} else if(typeof this.softLimit === 'number') {
|
||||
softLimitHit = this.softLimit > this.client.ratelimitRemaining;
|
||||
}
|
||||
|
||||
@@ -928,7 +1043,6 @@ class Bot {
|
||||
return;
|
||||
}
|
||||
this.logger.info(`Detected SOFT LIMIT of ${this.softLimit} remaining`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
|
||||
this.logger.info('Trying to detect heavy usage subreddits...', {leaf: 'Api Nanny'});
|
||||
let threshold = 0.5;
|
||||
let offenders = this.subManagers.filter(x => {
|
||||
|
||||
@@ -1,19 +1,18 @@
|
||||
import {RuleSet, IRuleSet, RuleSetJson, RuleSetObjectJson, isRuleSetJSON} from "../Rule/RuleSet";
|
||||
import {IRule, Rule, RuleJSONConfig} from "../Rule";
|
||||
import Action, {ActionConfig, ActionJson, StructuredActionJson} from "../Action";
|
||||
import {RuleSet, RuleSetConfigData, RuleSetConfigHydratedData, RuleSetConfigObject} from "../Rule/RuleSet";
|
||||
import {Rule} from "../Rule";
|
||||
import Action, {ActionConfig} from "../Action";
|
||||
import {Logger} from "winston";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {Comment, Submission} from "snoowrap";
|
||||
import {actionFactory} from "../Action/ActionFactory";
|
||||
import {ruleFactory} from "../Rule/RuleFactory";
|
||||
import {
|
||||
asPostBehaviorOptionConfig,
|
||||
boolToString,
|
||||
createAjvFactory, determineNewResults,
|
||||
FAIL, isRuleSetResult,
|
||||
createAjvFactory,
|
||||
FAIL,
|
||||
isRuleSetResult,
|
||||
mergeArr,
|
||||
PASS,
|
||||
resultsSummary,
|
||||
ruleNamesFromResults,
|
||||
truncateStringToLength
|
||||
} from "../util";
|
||||
import {
|
||||
@@ -22,19 +21,17 @@ import {
|
||||
CheckSummary,
|
||||
JoinCondition,
|
||||
NotificationEventPayload,
|
||||
PostBehavior, PostBehaviorOptionConfig, PostBehaviorOptionConfigStrong, PostBehaviorStrong,
|
||||
RuleResult,
|
||||
RuleSetResult, UserResultCache
|
||||
PostBehavior,
|
||||
PostBehaviorOptionConfigStrong,
|
||||
PostBehaviorStrong,
|
||||
RuleSetResult
|
||||
} from "../Common/interfaces";
|
||||
import * as RuleSchema from '../Schema/Rule.json';
|
||||
import * as RuleSetSchema from '../Schema/RuleSet.json';
|
||||
import * as ActionSchema from '../Schema/Action.json';
|
||||
import {
|
||||
ActionJson as ActionTypeJson
|
||||
} from "../Common/types";
|
||||
import {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import {ExtendedSnoowrap} from '../Utils/SnoowrapClients';
|
||||
import {ActionProcessingError, CheckProcessingError, isRateLimitError} from "../Utils/Errors";
|
||||
import {ActionProcessingError, CheckProcessingError} from "../Utils/Errors";
|
||||
import {ErrorWithCause, stackWithCauses} from "pony-cause";
|
||||
import {runCheckOptions} from "../Subreddit/Manager";
|
||||
import EventEmitter from "events";
|
||||
@@ -47,24 +44,28 @@ import {RunnableBase} from "../Common/RunnableBase";
|
||||
import {ActionResultEntity} from "../Common/Entities/ActionResultEntity";
|
||||
import {RuleSetResultEntity} from "../Common/Entities/RuleSetResultEntity";
|
||||
import {CheckToRuleResultEntity} from "../Common/Entities/RunnableAssociation/CheckToRuleResultEntity";
|
||||
import {
|
||||
JoinOperands,
|
||||
PostBehaviorType,
|
||||
RecordOutputType,
|
||||
recordOutputTypes
|
||||
} from "../Common/Infrastructure/Atomic";
|
||||
import {
|
||||
MinimalOrFullFilter,
|
||||
MinimalOrFullFilterJson
|
||||
} from "../Common/Infrastructure/Filters/FilterShapes";
|
||||
import {
|
||||
CommentState,
|
||||
SubmissionState,
|
||||
} from "../Common/Infrastructure/Filters/FilterCriteria";
|
||||
import {JoinOperands, PostBehaviorType, RecordOutputType, recordOutputTypes} from "../Common/Infrastructure/Atomic";
|
||||
import {CommentState, SubmissionState,} from "../Common/Infrastructure/Filters/FilterCriteria";
|
||||
import {ActivityType} from "../Common/Infrastructure/Reddit";
|
||||
import {RunnableBaseJson, RunnableBaseOptions, StructuredRunnableBase} from "../Common/Infrastructure/Runnable";
|
||||
import {RuleJson, StructuredRuleObjectJson, StructuredRuleSetObjectJson} from "../Common/Infrastructure/RuleShapes";
|
||||
import {ActionObjectJson, StructuredActionObjectJson} from "../Common/Infrastructure/ActionShapes";
|
||||
import {
|
||||
RunnableBaseJson,
|
||||
RunnableBaseOptions,
|
||||
StructuredRunnableBase,
|
||||
TypedRunnableBaseData, TypedStructuredRunnableBase
|
||||
} from "../Common/Infrastructure/Runnable";
|
||||
import {
|
||||
RuleConfigData, RuleConfigHydratedData,
|
||||
RuleConfigObject,
|
||||
StructuredRuleConfigObject,
|
||||
StructuredRuleSetConfigObject
|
||||
} from "../Common/Infrastructure/RuleShapes";
|
||||
import {
|
||||
ActionConfigData,
|
||||
ActionConfigHydratedData,
|
||||
ActionConfigObject,
|
||||
StructuredActionObjectJson
|
||||
} from "../Common/Infrastructure/ActionShapes";
|
||||
import {IncludesData} from "../Common/Infrastructure/Includes";
|
||||
|
||||
const checkLogName = truncateStringToLength(25);
|
||||
|
||||
@@ -155,7 +156,7 @@ export abstract class Check extends RunnableBase implements Omit<ICheck, 'postTr
|
||||
if(asPostBehaviorOptionConfig(postFail)) {
|
||||
const {
|
||||
behavior = 'next',
|
||||
recordTo = false
|
||||
recordTo = ['influx']
|
||||
} = postFail;
|
||||
let recordStrong: RecordOutputType[] = [];
|
||||
if(typeof recordTo === 'boolean') {
|
||||
@@ -174,7 +175,7 @@ export abstract class Check extends RunnableBase implements Omit<ICheck, 'postTr
|
||||
} else {
|
||||
this.postFail = {
|
||||
behavior: postFail,
|
||||
recordTo: []
|
||||
recordTo: ['influx']
|
||||
}
|
||||
}
|
||||
|
||||
@@ -192,12 +193,12 @@ export abstract class Check extends RunnableBase implements Omit<ICheck, 'postTr
|
||||
let ruleErrors: any = [];
|
||||
if (valid) {
|
||||
const ruleConfig = r;
|
||||
this.rules.push(new RuleSet({...ruleConfig as StructuredRuleSetObjectJson, logger: this.logger, subredditName, resources: this.resources, client: this.client}));
|
||||
this.rules.push(new RuleSet({...ruleConfig as StructuredRuleSetConfigObject, logger: this.logger, subredditName, resources: this.resources, client: this.client}));
|
||||
} else {
|
||||
setErrors = ajv.errors;
|
||||
valid = ajv.validate(RuleSchema, r);
|
||||
if (valid) {
|
||||
this.rules.push(ruleFactory(r as StructuredRuleObjectJson, this.logger, subredditName, this.resources, this.client));
|
||||
this.rules.push(ruleFactory(r as StructuredRuleConfigObject, this.logger, subredditName, this.resources, this.client));
|
||||
} else {
|
||||
ruleErrors = ajv.errors;
|
||||
const leastErrorType = setErrors.length < ruleErrors ? 'RuleSet' : 'Rule';
|
||||
@@ -605,7 +606,7 @@ export interface ICheck extends JoinCondition, PostBehavior, RunnableBaseJson {
|
||||
}
|
||||
|
||||
export interface CheckOptions extends Omit<ICheck, 'authorIs' | 'itemIs'>, RunnableBaseOptions {
|
||||
rules: Array<StructuredRuleSetObjectJson | StructuredRuleObjectJson>;
|
||||
rules: Array<RuleConfigObject | RuleSetConfigObject>;
|
||||
actions: ActionConfig[];
|
||||
logger: Logger;
|
||||
subredditName: string;
|
||||
@@ -616,7 +617,15 @@ export interface CheckOptions extends Omit<ICheck, 'authorIs' | 'itemIs'>, Runna
|
||||
emitter: EventEmitter
|
||||
}
|
||||
|
||||
export interface CheckJson extends ICheck {
|
||||
/*
|
||||
* Can contain actions/rules as:
|
||||
* - full objects
|
||||
* - string to hydrate IE "url:fsdfd"
|
||||
* - named string IE "namedRule"
|
||||
*
|
||||
* Also can contain itemIs/authorIs as full object or named filter
|
||||
* */
|
||||
export interface CheckConfigData extends ICheck, RunnableBaseJson {
|
||||
/**
|
||||
* The type of event (new submission or new comment) this check should be run against
|
||||
* @examples ["submission", "comment"]
|
||||
@@ -631,7 +640,7 @@ export interface CheckJson extends ICheck {
|
||||
*
|
||||
* **If `rules` is an empty array or not present then `actions` are performed immediately.**
|
||||
* */
|
||||
rules?: Array<RuleSetJson | RuleJson>
|
||||
rules?: (RuleSetConfigData | RuleConfigData | string | IncludesData)[]
|
||||
/**
|
||||
* The `Actions` to run after the check is successfully triggered. ALL `Actions` will run in the order they are listed
|
||||
*
|
||||
@@ -639,7 +648,7 @@ export interface CheckJson extends ICheck {
|
||||
*
|
||||
* @examples [[{"kind": "comment", "content": "this is the content of the comment", "distinguish": true}, {"kind": "lock"}]]
|
||||
* */
|
||||
actions?: Array<ActionTypeJson>
|
||||
actions?: ActionConfigData[]
|
||||
|
||||
/**
|
||||
* If notifications are configured and this is `true` then an `eventActioned` event will be sent when this check is triggered.
|
||||
@@ -651,9 +660,49 @@ export interface CheckJson extends ICheck {
|
||||
cacheUserResult?: UserResultCacheOptions;
|
||||
}
|
||||
|
||||
export interface SubmissionCheckJson extends CheckJson {
|
||||
export interface SubmissionCheckConfigData extends CheckConfigData, TypedRunnableBaseData<SubmissionState> {
|
||||
kind: 'submission'
|
||||
itemIs?: MinimalOrFullFilterJson<SubmissionState>
|
||||
}
|
||||
|
||||
export interface CommentCheckConfigData extends CheckConfigData, TypedRunnableBaseData<CommentState> {
|
||||
kind: 'comment'
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Can contain actions/rules as:
|
||||
* - full objects
|
||||
* - named string IE "namedRule"
|
||||
*
|
||||
* Also can contain itemIs/authorIs as full object or named filter
|
||||
* */
|
||||
export interface CheckConfigHydratedData extends CheckConfigData {
|
||||
rules?: (RuleSetConfigHydratedData | RuleConfigHydratedData)[]
|
||||
actions?: ActionConfigHydratedData[]
|
||||
}
|
||||
|
||||
export interface SubmissionCheckConfigHydratedData extends CheckConfigHydratedData, TypedRunnableBaseData<SubmissionState> {
|
||||
kind: 'submission'
|
||||
}
|
||||
|
||||
export interface CommentCheckConfigHydratedData extends CheckConfigHydratedData, TypedRunnableBaseData<CommentState> {
|
||||
kind: 'comment'
|
||||
}
|
||||
|
||||
/*
|
||||
* All actions/rules/filters should now be full objects
|
||||
* */
|
||||
export interface CheckConfigObject extends Omit<CheckConfigHydratedData, 'itemIs' | 'authorIs'>, StructuredRunnableBase {
|
||||
rules: Array<RuleSetConfigObject | RuleConfigObject>
|
||||
actions: Array<ActionConfigObject>
|
||||
}
|
||||
|
||||
export interface SubmissionCheckConfigObject extends Omit<CheckConfigObject, 'itemIs' | 'author'>, TypedStructuredRunnableBase<SubmissionState> {
|
||||
kind: 'submission'
|
||||
}
|
||||
|
||||
export interface CommentCheckConfigObject extends Omit<CheckConfigObject, 'itemIs' | 'author'>, TypedStructuredRunnableBase<CommentState> {
|
||||
kind: 'comment'
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -691,33 +740,18 @@ export const userResultCacheDefault: Required<UserResultCacheOptions> = {
|
||||
runActions: true,
|
||||
}
|
||||
|
||||
export interface CommentCheckJson extends CheckJson {
|
||||
kind: 'comment'
|
||||
itemIs?: MinimalOrFullFilterJson<CommentState>
|
||||
}
|
||||
|
||||
export const asStructuredCommentCheckJson = (val: any): val is CommentCheckStructuredJson => {
|
||||
export const asStructuredCommentCheckJson = (val: any): val is CommentCheckConfigObject => {
|
||||
return val.kind === 'comment';
|
||||
}
|
||||
|
||||
export const asStructuredSubmissionCheckJson = (val: any): val is SubmissionCheckStructuredJson => {
|
||||
export const asStructuredSubmissionCheckJson = (val: any): val is SubmissionCheckConfigObject => {
|
||||
return val.kind === 'submission';
|
||||
}
|
||||
|
||||
export type CheckStructuredJson = SubmissionCheckStructuredJson | CommentCheckStructuredJson;
|
||||
// export interface CheckStructuredJson extends CheckJson {
|
||||
// rules: Array<RuleSetObjectJson | RuleObjectJson>
|
||||
// actions: Array<ActionObjectJson>
|
||||
// }
|
||||
export type ActivityCheckConfigValue = string | IncludesData | SubmissionCheckConfigData | CommentCheckConfigData;
|
||||
|
||||
export interface SubmissionCheckStructuredJson extends Omit<SubmissionCheckJson, 'authorIs' | 'itemIs' | 'rules'>, StructuredRunnableBase {
|
||||
rules: Array<StructuredRuleSetObjectJson | StructuredRuleObjectJson>
|
||||
actions: Array<ActionObjectJson>
|
||||
itemIs?: MinimalOrFullFilter<SubmissionState>
|
||||
}
|
||||
export type ActivityCheckConfigData = Exclude<ActivityCheckConfigValue, IncludesData>;
|
||||
|
||||
export interface CommentCheckStructuredJson extends Omit<CommentCheckJson, 'authorIs' | 'itemIs' | 'rules'>, StructuredRunnableBase {
|
||||
rules: Array<StructuredRuleSetObjectJson | StructuredRuleObjectJson>
|
||||
actions: Array<ActionObjectJson>
|
||||
itemIs?: MinimalOrFullFilter<CommentState>
|
||||
}
|
||||
export type ActivityCheckConfigHydratedData = SubmissionCheckConfigHydratedData | CommentCheckConfigHydratedData;
|
||||
|
||||
export type ActivityCheckObject = SubmissionCheckConfigObject | CommentCheckConfigObject;
|
||||
|
||||
@@ -6,4 +6,5 @@ export interface ConfigToObjectOptions {
|
||||
location?: string,
|
||||
jsonDocFunc?: (content: string, location?: string) => AbstractConfigDocument<OperatorJsonConfig>,
|
||||
yamlDocFunc?: (content: string, location?: string) => AbstractConfigDocument<YamlDocument>
|
||||
allowArrays?: boolean
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ export const parseFromJsonOrYamlToObject = (content: string, options?: ConfigToO
|
||||
location,
|
||||
jsonDocFunc = (content: string, location?: string) => new JsonConfigDocument(content, location),
|
||||
yamlDocFunc = (content: string, location?: string) => new YamlConfigDocument(content, location),
|
||||
allowArrays = false,
|
||||
} = options || {};
|
||||
|
||||
try {
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import {Entity, Column, PrimaryGeneratedColumn, ManyToOne, PrimaryColumn, OneToMany, OneToOne, Index} from "typeorm";
|
||||
import {Entity, Column, ManyToOne, PrimaryColumn, OneToMany, Index} from "typeorm";
|
||||
import {AuthorEntity} from "./AuthorEntity";
|
||||
import {Subreddit} from "./Subreddit";
|
||||
import {CMEvent} from "./CMEvent";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {Comment} from "snoowrap";
|
||||
import {asComment, getActivityAuthorName, parseRedditFullname, redditThingTypeToPrefix} from "../../util";
|
||||
import {ActivityType} from "../Infrastructure/Reddit";
|
||||
import {activityReports, ActivityType, Report, SnoowrapActivity} from "../Infrastructure/Reddit";
|
||||
import {ActivityReport} from "./ActivityReport";
|
||||
import dayjs, {Dayjs} from "dayjs";
|
||||
|
||||
export interface ActivityEntityOptions {
|
||||
id: string
|
||||
@@ -15,6 +15,7 @@ export interface ActivityEntityOptions {
|
||||
permalink: string
|
||||
author: AuthorEntity
|
||||
submission?: Activity
|
||||
reports?: ActivityReport[]
|
||||
}
|
||||
|
||||
@Entity()
|
||||
@@ -69,6 +70,9 @@ export class Activity {
|
||||
@OneToMany(type => Activity, obj => obj.submission, {nullable: true})
|
||||
comments!: Activity[];
|
||||
|
||||
@OneToMany(type => ActivityReport, act => act.activity, {cascade: ['insert'], eager: true})
|
||||
reports: ActivityReport[] | undefined
|
||||
|
||||
constructor(data?: ActivityEntityOptions) {
|
||||
if(data !== undefined) {
|
||||
this.type = data.type;
|
||||
@@ -78,10 +82,76 @@ export class Activity {
|
||||
this.permalink = data.permalink;
|
||||
this.author = data.author;
|
||||
this.submission = data.submission;
|
||||
this.reports = data.reports !== undefined ? data.reports : undefined;
|
||||
}
|
||||
}
|
||||
|
||||
static fromSnoowrapActivity(subreddit: Subreddit, activity: (Submission | Comment)) {
|
||||
/**
|
||||
* @param {SnoowrapActivity} activity
|
||||
* @param {Dayjs|undefined} lastKnownStateTimestamp Override the last good state (useful when tracked through polling)
|
||||
* */
|
||||
syncReports(activity: SnoowrapActivity, lastKnownStateTimestamp?: Dayjs) {
|
||||
if(activity.num_reports > 0 && (this.reports === undefined || activity.num_reports !== this.reports.length)) {
|
||||
if(this.reports === undefined) {
|
||||
this.reports = [];
|
||||
}
|
||||
const reports = activityReports(activity);
|
||||
// match up existing reports
|
||||
const usedReportEntities: string[] = [];
|
||||
const unsyncedReports: Report[] = [];
|
||||
for(const r of reports) {
|
||||
const matchedEntity = this.reports.find(x => !usedReportEntities.includes(x.id) && x.matchReport(r));
|
||||
if(matchedEntity !== undefined) {
|
||||
usedReportEntities.push(matchedEntity.id);
|
||||
} else {
|
||||
// found an unsynced report
|
||||
unsyncedReports.push(r);
|
||||
}
|
||||
}
|
||||
|
||||
// ideally we only have one report but it's possible (probable) there are more
|
||||
//
|
||||
// to simplify tracking over time we will spread out the "create time" for each report to be between NOW
|
||||
// and the last recorded report, or if no reports then the create time of the activity
|
||||
|
||||
// -- the assumptions about tracking should be good enough for most users because:
|
||||
// * default poll interval is 30 seconds so even if there are more than one reports in that time the resolution is high enough for accurate usage (most mods will use "> 1 report in 1 minute" or larger timescales)
|
||||
// * for populating existing reports (CM has not been tracking since activity creation) we don't want to bunch up all reports at the timestamp which could create false positives,
|
||||
// it's more likely that reports would be spread out than all occurring at the same time.
|
||||
|
||||
// TODO additionally, will allow users to specify minimum required granularity to use when filtering by reports over time
|
||||
|
||||
let lastRecordedTime = lastKnownStateTimestamp;
|
||||
if(lastKnownStateTimestamp === undefined) {
|
||||
lastRecordedTime = this.reports.length > 0 ?
|
||||
// get the latest create date for existing reports
|
||||
this.reports.reduce((acc, curr) => curr.createdAt.isAfter(acc) ? curr.createdAt : acc, dayjs('2000-1-1'))
|
||||
// if no reports then use activity create date
|
||||
: dayjs(activity.created_utc * 1000);
|
||||
}
|
||||
|
||||
// find the amount of time between now and last good timestamp
|
||||
const missingTimespan = dayjs.duration(dayjs().diff(lastRecordedTime));
|
||||
const granularity = Math.floor(missingTimespan.asSeconds());
|
||||
|
||||
// each report will have its create date spaced out (mostly) equally between now and the last good timestamp
|
||||
//
|
||||
// if only one report stick it in exact middle
|
||||
// if more than one than decrease span by 1/4 so that we don't end up having reports dead-on the last timestamp
|
||||
const increment = Math.floor(unsyncedReports.length === 1 ? (granularity / 2) : ((granularity / 1.25) / unsyncedReports.length));
|
||||
|
||||
for(let i = 0; i < unsyncedReports.length; i++) {
|
||||
const r = new ActivityReport({...unsyncedReports[i], activity: this, granularity});
|
||||
r.createdAt = dayjs().subtract(increment * (i + 1), 'seconds');
|
||||
this.reports.push(r);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static fromSnoowrapActivity(subreddit: Subreddit, activity: SnoowrapActivity, lastKnownStateTimestamp?: dayjs.Dayjs | undefined) {
|
||||
let submission: Activity | undefined;
|
||||
let type: ActivityType = 'submission';
|
||||
let content: string;
|
||||
@@ -99,7 +169,7 @@ export class Activity {
|
||||
const author = new AuthorEntity();
|
||||
author.name = getActivityAuthorName(activity.author);
|
||||
|
||||
return new Activity({
|
||||
const entity = new Activity({
|
||||
id: activity.name,
|
||||
subreddit,
|
||||
type,
|
||||
@@ -107,6 +177,10 @@ export class Activity {
|
||||
permalink: activity.permalink,
|
||||
author,
|
||||
submission
|
||||
})
|
||||
});
|
||||
|
||||
entity.syncReports(activity, lastKnownStateTimestamp);
|
||||
|
||||
return entity;
|
||||
}
|
||||
}
|
||||
|
||||
57
src/Common/Entities/ActivityReport.ts
Normal file
57
src/Common/Entities/ActivityReport.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import {
|
||||
Entity,
|
||||
Column,
|
||||
ManyToOne, JoinColumn, AfterLoad,
|
||||
} from "typeorm";
|
||||
import {Activity} from "./Activity";
|
||||
import {ManagerEntity} from "./ManagerEntity";
|
||||
import {TimeAwareRandomBaseEntity} from "./Base/TimeAwareRandomBaseEntity";
|
||||
import {Report, ReportType} from "../Infrastructure/Reddit";
|
||||
|
||||
@Entity()
|
||||
export class ActivityReport extends TimeAwareRandomBaseEntity {
|
||||
|
||||
@Column({nullable: false, length: 500})
|
||||
reason!: string
|
||||
|
||||
@Column({nullable: false, length: 20})
|
||||
type!: ReportType
|
||||
|
||||
@Column({nullable: true, length: 100})
|
||||
author?: string
|
||||
|
||||
@Column("int", {nullable: false})
|
||||
granularity: number = 0;
|
||||
|
||||
@ManyToOne(type => Activity, act => act.reports, {cascade: ['update']})
|
||||
@JoinColumn({name: 'activityId'})
|
||||
activity!: Activity;
|
||||
|
||||
@Column({nullable: false, name: 'activityId'})
|
||||
activityId!: string
|
||||
|
||||
constructor(data?: Report & { activity: Activity, granularity: number }) {
|
||||
super();
|
||||
if (data !== undefined) {
|
||||
this.reason = data.reason;
|
||||
this.type = data.type;
|
||||
this.author = data.author;
|
||||
this.activity = data.activity;
|
||||
this.activityId = data.activity.id;
|
||||
this.granularity = data.granularity
|
||||
}
|
||||
}
|
||||
|
||||
matchReport(report: Report): boolean {
|
||||
return this.reason === report.reason
|
||||
&& this.type === report.type
|
||||
&& this.author === report.author;
|
||||
}
|
||||
|
||||
@AfterLoad()
|
||||
convertPrimitives() {
|
||||
if(this.author === null) {
|
||||
this.author = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
115
src/Common/Influx/InfluxClient.ts
Normal file
115
src/Common/Influx/InfluxClient.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import {InfluxConfig} from "./interfaces";
|
||||
import {InfluxDB, Point, WriteApi, setLogger} from "@influxdata/influxdb-client";
|
||||
import {HealthAPI} from "@influxdata/influxdb-client-apis";
|
||||
import dayjs, {Dayjs} from "dayjs";
|
||||
import {Logger} from "winston";
|
||||
import {mergeArr} from "../../util";
|
||||
import {CMError} from "../../Utils/Errors";
|
||||
|
||||
export interface InfluxClientConfig extends InfluxConfig {
|
||||
client?: InfluxDB
|
||||
ready?: boolean
|
||||
}
|
||||
|
||||
export class InfluxClient {
|
||||
config: InfluxConfig;
|
||||
client: InfluxDB;
|
||||
write: WriteApi;
|
||||
health: HealthAPI;
|
||||
|
||||
tags: Record<string, string>;
|
||||
|
||||
logger: Logger;
|
||||
|
||||
ready: boolean;
|
||||
lastReadyAttempt: Dayjs | undefined;
|
||||
|
||||
constructor(config: InfluxClientConfig, logger: Logger, tags: Record<string, string> = {}) {
|
||||
|
||||
const {client, ready = false, ...rest} = config;
|
||||
|
||||
this.logger = logger.child({
|
||||
labels: ['Influx']
|
||||
}, mergeArr);
|
||||
|
||||
this.config = rest;
|
||||
this.ready = ready;
|
||||
if(client !== undefined) {
|
||||
this.client = client;
|
||||
} else {
|
||||
this.client = InfluxClient.createClient(this.config);
|
||||
setLogger(this.logger);
|
||||
}
|
||||
this.write = this.client.getWriteApi(config.credentials.org, config.credentials.bucket, 'ms');
|
||||
this.tags = tags;
|
||||
this.write.useDefaultTags(tags);
|
||||
this.health = new HealthAPI(this.client);
|
||||
}
|
||||
|
||||
async isReady() {
|
||||
if (this.ready) {
|
||||
return true;
|
||||
}
|
||||
if (this.lastReadyAttempt === undefined || dayjs().diff(this.lastReadyAttempt, 's') >= 10) {
|
||||
if (!(await this.testConnection())) {
|
||||
this.logger.warn('Influx endpoint is not ready');
|
||||
} else {
|
||||
this.ready = true;
|
||||
}
|
||||
} else {
|
||||
this.logger.debug(`Influx endpoint testing throttled. Waiting another ${10 - dayjs().diff(this.lastReadyAttempt, 's')} seconds`);
|
||||
}
|
||||
return this.ready;
|
||||
}
|
||||
|
||||
async testConnection() {
|
||||
try {
|
||||
const result = await this.health.getHealth();
|
||||
if (result.status === 'fail') {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
this.logger.error(new CMError(`Testing health of Influx endpoint failed`, {cause: e, isSerious: false}));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async writePoint(data: Point | Point[]) {
|
||||
if (await this.isReady()) {
|
||||
if (Array.isArray(data)) {
|
||||
this.write.writePoints(data);
|
||||
} else {
|
||||
this.write.writePoint(data);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async flush() {
|
||||
if (await this.isReady()) {
|
||||
try {
|
||||
await this.write.flush(true);
|
||||
} catch (e: any) {
|
||||
this.logger.error(new CMError('Failed to flush data to Influx', {cause: e}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static createClient(config: InfluxConfig): InfluxDB {
|
||||
return new InfluxDB({
|
||||
url: config.credentials.url,
|
||||
token: config.credentials.token,
|
||||
writeOptions: {
|
||||
defaultTags: config.defaultTags
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
childClient(logger: Logger, tags: Record<string, string> = {}) {
|
||||
return new InfluxClient({
|
||||
...this.config,
|
||||
client: this.client,
|
||||
ready: this.ready
|
||||
}, logger, {...this.tags, ...tags});
|
||||
}
|
||||
}
|
||||
13
src/Common/Influx/interfaces.ts
Normal file
13
src/Common/Influx/interfaces.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import {InfluxDB, WriteApi} from "@influxdata/influxdb-client/dist";
|
||||
|
||||
export interface InfluxConfig {
|
||||
credentials: InfluxCredentials
|
||||
defaultTags?: Record<string, string>
|
||||
}
|
||||
|
||||
export interface InfluxCredentials {
|
||||
url: string
|
||||
token: string
|
||||
org: string
|
||||
bucket: string
|
||||
}
|
||||
@@ -1,5 +1,8 @@
|
||||
import {StructuredRunnableBase} from "./Runnable";
|
||||
import {ActionJson} from "../types";
|
||||
import {IncludesData} from "./Includes";
|
||||
|
||||
export type ActionObjectJson = Exclude<ActionJson, string>;
|
||||
export type StructuredActionObjectJson = Omit<ActionObjectJson, 'authorIs' | 'itemIs'> & StructuredRunnableBase
|
||||
export type ActionConfigData = ActionJson;
|
||||
export type ActionConfigHydratedData = Exclude<ActionConfigData, IncludesData>;
|
||||
export type ActionConfigObject = Exclude<ActionConfigHydratedData, string>;
|
||||
export type StructuredActionObjectJson = Omit<ActionConfigObject, 'authorIs' | 'itemIs'> & StructuredRunnableBase
|
||||
|
||||
@@ -249,3 +249,31 @@ export type ModActionType =
|
||||
'REMOVAL' |
|
||||
'SPAM' |
|
||||
'APPROVAL';
|
||||
|
||||
export type UserNoteType =
|
||||
'gooduser' |
|
||||
'spamwatch' |
|
||||
'spamwarn' |
|
||||
'abusewarn' |
|
||||
'ban' |
|
||||
'permban' |
|
||||
'botban' |
|
||||
string;
|
||||
|
||||
export const userNoteTypes = ['gooduser', 'spamwatch', 'spamwarn', 'abusewarn', 'ban', 'permban', 'botban'];
|
||||
|
||||
export type ConfigFragmentValidationFunc = (data: object, fetched: boolean) => boolean;
|
||||
|
||||
export interface WikiContext {
|
||||
wiki: string
|
||||
subreddit?: string
|
||||
}
|
||||
|
||||
export interface ExternalUrlContext {
|
||||
url: string
|
||||
}
|
||||
|
||||
export interface UrlContext {
|
||||
value: string
|
||||
context: WikiContext | ExternalUrlContext
|
||||
}
|
||||
|
||||
@@ -2,8 +2,9 @@ import {StringOperator} from "./Atomic";
|
||||
import {Duration} from "dayjs/plugin/duration";
|
||||
import InvalidRegexError from "../../Utils/InvalidRegexError";
|
||||
import dayjs, {Dayjs, OpUnitType} from "dayjs";
|
||||
import {SimpleError} from "../../Utils/Errors";
|
||||
import { parseDuration } from "../../util";
|
||||
import {CMError, SimpleError} from "../../Utils/Errors";
|
||||
import {escapeRegex, parseDuration, parseDurationFromString, parseStringToRegex} from "../../util";
|
||||
import {ReportType} from "./Reddit";
|
||||
|
||||
export interface DurationComparison {
|
||||
operator: StringOperator,
|
||||
@@ -15,8 +16,10 @@ export interface GenericComparison extends HasDisplayText {
|
||||
value: number,
|
||||
isPercent: boolean,
|
||||
extra?: string,
|
||||
groups?: Record<string, string>
|
||||
displayText: string,
|
||||
duration?: Duration
|
||||
durationText?: string
|
||||
}
|
||||
|
||||
export interface HasDisplayText {
|
||||
@@ -53,18 +56,20 @@ export const parseGenericValueComparison = (val: string, options?: {
|
||||
const groups = matches.groups as any;
|
||||
|
||||
let duration: Duration | undefined;
|
||||
let durationText: string | undefined;
|
||||
|
||||
if(typeof groups.extra === 'string' && groups.extra.trim() !== '') {
|
||||
try {
|
||||
duration = parseDuration(groups.extra, false);
|
||||
} catch (e) {
|
||||
// if it returns an invalid regex just means they didn't
|
||||
if (requireDuration || !(e instanceof InvalidRegexError)) {
|
||||
throw e;
|
||||
}
|
||||
try {
|
||||
const durationResult = parseDurationFromString(val, false);
|
||||
if(durationResult.length > 1) {
|
||||
throw new SimpleError(`Must only have one Duration value, found ${durationResult.length} in: ${val}`);
|
||||
}
|
||||
duration = durationResult[0].duration;
|
||||
durationText = durationResult[0].original;
|
||||
} catch (e) {
|
||||
// if it returns an invalid regex just means they didn't
|
||||
if (requireDuration || !(e instanceof InvalidRegexError)) {
|
||||
throw e;
|
||||
}
|
||||
} else if(requireDuration) {
|
||||
throw new SimpleError(`Comparison must contain a duration value but none was found. Given: ${val}`);
|
||||
}
|
||||
|
||||
const displayParts = [`${groups.opStr} ${groups.value}`];
|
||||
@@ -73,13 +78,33 @@ export const parseGenericValueComparison = (val: string, options?: {
|
||||
displayParts.push('%');
|
||||
}
|
||||
|
||||
const {
|
||||
opStr,
|
||||
value,
|
||||
percent,
|
||||
extra,
|
||||
...rest
|
||||
} = matches.groups || {};
|
||||
|
||||
const extraGroups: Record<string,string> = {};
|
||||
let hasExtraGroups = false;
|
||||
|
||||
for(const [k,v] of Object.entries(rest)) {
|
||||
if(typeof v === 'string' && v.trim() !== '') {
|
||||
extraGroups[k] = v;
|
||||
hasExtraGroups = true;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
operator: groups.opStr as StringOperator,
|
||||
value: Number.parseFloat(groups.value),
|
||||
isPercent: hasPercent,
|
||||
extra: groups.extra,
|
||||
groups: hasExtraGroups ? extraGroups : undefined,
|
||||
displayText: displayParts.join(''),
|
||||
duration
|
||||
duration,
|
||||
durationText,
|
||||
}
|
||||
}
|
||||
const GENERIC_VALUE_PERCENT_COMPARISON = /^\s*(?<opStr>>|>=|<|<=)\s*(?<value>\d+)\s*(?<percent>%)?(?<extra>.*)$/
|
||||
@@ -93,18 +118,16 @@ export const parseGenericValueOrPercentComparison = (val: string, options?: {req
|
||||
const DURATION_COMPARISON_REGEX: RegExp = /^\s*(?<opStr>>|>=|<|<=)\s*(?<time>\d+)\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\s*$/;
|
||||
const DURATION_COMPARISON_REGEX_URL = 'https://regexr.com/609n8';
|
||||
export const parseDurationComparison = (val: string): DurationComparison => {
|
||||
const matches = val.match(DURATION_COMPARISON_REGEX);
|
||||
if (matches === null) {
|
||||
throw new InvalidRegexError(DURATION_COMPARISON_REGEX, val, DURATION_COMPARISON_REGEX_URL)
|
||||
const result = parseGenericValueComparison(val, {requireDuration: true});
|
||||
if(result.isPercent) {
|
||||
throw new InvalidRegexError(DURATION_COMPARISON_REGEX, val, DURATION_COMPARISON_REGEX_URL, 'Duration comparison value cannot be a percentage');
|
||||
}
|
||||
const groups = matches.groups as any;
|
||||
const dur: Duration = dayjs.duration(groups.time, groups.unit);
|
||||
if (!dayjs.isDuration(dur)) {
|
||||
throw new SimpleError(`Parsed value '${val}' did not result in a valid Dayjs Duration`);
|
||||
if(result.value < 0) {
|
||||
throw new InvalidRegexError(DURATION_COMPARISON_REGEX, val, DURATION_COMPARISON_REGEX_URL,'Duration value cannot be negative');
|
||||
}
|
||||
return {
|
||||
operator: groups.opStr as StringOperator,
|
||||
duration: dur
|
||||
operator: result.operator as StringOperator,
|
||||
duration: result.duration as Duration
|
||||
}
|
||||
}
|
||||
export const dateComparisonTextOp = (val1: Dayjs, strOp: StringOperator, val2: Dayjs, granularity?: OpUnitType): boolean => {
|
||||
@@ -139,3 +162,51 @@ export const comparisonTextOp = (val1: number, strOp: string, val2: number): boo
|
||||
throw new Error(`${strOp} was not a recognized operator`);
|
||||
}
|
||||
}
|
||||
|
||||
export interface ReportComparison extends Omit<GenericComparison, 'groups'> {
|
||||
reportType?: ReportType
|
||||
reasonRegex?: RegExp
|
||||
reasonMatch?: string
|
||||
}
|
||||
|
||||
const REPORT_COMPARISON = /^\s*(?<opStr>>|>=|<|<=)\s*(?<value>\d+)(?<percent>\s*%)?(?:\s+(?<reportType>mods?|users?))?(?:\s+(?<reasonMatch>["'].*["']|\/.*\/))?.*(?<time>\d+)?\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\s*$/i
|
||||
const REPORT_REASON_LITERAL = /["'](.*)["']/i
|
||||
export const parseReportComparison = (str: string): ReportComparison => {
|
||||
const generic = parseGenericValueComparison(str, {reg: REPORT_COMPARISON});
|
||||
|
||||
|
||||
const {
|
||||
groups: {
|
||||
reportType,
|
||||
reasonMatch
|
||||
} = {},
|
||||
...rest
|
||||
} = generic;
|
||||
|
||||
const result: ReportComparison = {...rest, reasonMatch};
|
||||
|
||||
if(reportType !== undefined) {
|
||||
if(reportType.toLocaleLowerCase().includes('mod')) {
|
||||
result.reportType = 'mod' as ReportType;
|
||||
} else if (reportType.toLocaleLowerCase().includes('user')) {
|
||||
result.reportType = 'user' as ReportType;
|
||||
}
|
||||
}
|
||||
if(reasonMatch !== undefined) {
|
||||
const literalMatch = reasonMatch.match(REPORT_REASON_LITERAL);
|
||||
if(literalMatch !== null) {
|
||||
const cleanLiteralMatch = `/.*${escapeRegex(literalMatch[1].trim())}.*/`;
|
||||
result.reasonRegex = parseStringToRegex(cleanLiteralMatch, 'i');
|
||||
if(result.reasonRegex === undefined) {
|
||||
throw new CMError(`Could not convert reason match value to Regex: ${cleanLiteralMatch}`, {isSerious: false})
|
||||
}
|
||||
} else {
|
||||
result.reasonRegex = parseStringToRegex(reasonMatch, 'i');
|
||||
if(result.reasonRegex === undefined) {
|
||||
throw new CMError(`Could not convert reason match value to Regex: ${reasonMatch}`, {isSerious: false})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -405,20 +405,40 @@ export interface ActivityState {
|
||||
approved?: boolean | ModeratorNames | ModeratorNames[] | ModeratorNameCriteria
|
||||
score?: CompareValue
|
||||
/**
|
||||
* A string containing a comparison operator and a value to compare against
|
||||
* A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint
|
||||
*
|
||||
* The syntax is `(< OR > OR <= OR >=) <number>`
|
||||
* The syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`
|
||||
*
|
||||
* If only comparison and number is given then defaults to TOTAL reports on an Activity.
|
||||
*
|
||||
* * EX `> 2` => greater than 2 total reports
|
||||
*
|
||||
* Defaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:
|
||||
* Type (optional) determines which type of reports to look at:
|
||||
*
|
||||
* * EX `> 3 mod` => greater than 3 mod reports
|
||||
* * EX `>= 1 user` => greater than 1 user report
|
||||
* * `mod` -- mod reports
|
||||
* * EX `> 3 mod` => greater than 3 mod reports
|
||||
* * `user` -- user reports
|
||||
* * EX `> 3 user` => greater than 3 user reports
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
|
||||
* Report reason qualifiers can be:
|
||||
*
|
||||
* * enclosed double or single quotes -- report reason contains
|
||||
* * EX `> 1 "misinformation" => greater than 1 report with reason containing "misinformation"
|
||||
* * enclosed in backslashes -- match regex
|
||||
* * EX `> 1 \harassment towards .*\` => greater than 1 report with reason matching regex \harassment towards .*\
|
||||
*
|
||||
* Type and reason qualifiers can be used together:
|
||||
*
|
||||
* EX `> 2 user "misinformation" => greater than 2 user reports with reasons containing "misinformation"
|
||||
*
|
||||
* The time constraint filter reports created between NOW and [timeConstraint] in the past:
|
||||
*
|
||||
* * `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago
|
||||
* * `> 2 user "misinformation" in 2 hours` => more than 2 user reports containing "misinformation" created between NOW and 2 hours ago
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)(\s*%)?(\s+(?:mods?|users?))?(\s+(?:["'].*["']|\/.*\/))?.*(\d+)?\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\s*$
|
||||
* */
|
||||
reports?: CompareValue
|
||||
reports?: string
|
||||
age?: DurationComparor
|
||||
/**
|
||||
* Test whether the activity is present in dispatched/delayed activities
|
||||
@@ -487,6 +507,18 @@ export interface SubmissionState extends ActivityState {
|
||||
* Is the submission a reddit-hosted image or video?
|
||||
* */
|
||||
isRedditMediaDomain?: boolean
|
||||
|
||||
/**
|
||||
* Compare the upvote ratio for this Submission, expressed as a whole number
|
||||
*
|
||||
* Can be either a comparison string or a number. If a number then CM assumes upvote ratio must be greater than or equal to this.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* * `< 90` => less than 90% upvoted
|
||||
* * 45 => greater than or equal to 45% upvoted
|
||||
* */
|
||||
upvoteRatio?: number | CompareValue
|
||||
}
|
||||
|
||||
export const cmActivityProperties = ['submissionState', 'score', 'reports', 'removed', 'deleted', 'filtered', 'age', 'title'];
|
||||
|
||||
45
src/Common/Infrastructure/Includes.ts
Normal file
45
src/Common/Infrastructure/Includes.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import {ConfigFormat} from "./Atomic";
|
||||
|
||||
export interface IncludesData {
|
||||
/**
|
||||
* The special-form path to the config fragment to retrieve.
|
||||
*
|
||||
* If value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit
|
||||
*
|
||||
* * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/mybot`
|
||||
*
|
||||
* If the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page
|
||||
*
|
||||
* * EX `wiki:replytemplates/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/replytemplates/test`
|
||||
*
|
||||
* If the value starts with `url:` then the value is fetched as an external url and expects raw text returned
|
||||
*
|
||||
* * EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`
|
||||
* */
|
||||
path: string
|
||||
/**
|
||||
* An unused hint about the content type. Not implemented yet
|
||||
* */
|
||||
type?: ConfigFormat
|
||||
/**
|
||||
* Control caching for the config fragment.
|
||||
*
|
||||
* If not specified the value for `wikiTTL` will be used
|
||||
*
|
||||
* * If true then value is cached forever
|
||||
* * If false then value is never cached
|
||||
* * If a number then the number of seconds to cache value
|
||||
* * If 'response' then CM will attempt to use Cache-Control or Expires headers from the response to determine how long to cache the value
|
||||
* */
|
||||
ttl?: number | boolean | 'response'
|
||||
}
|
||||
|
||||
export type IncludesUrl = `url:${string}`;
|
||||
export type IncludesWiki = `wiki:${string}`;
|
||||
export type IncludesString = IncludesUrl | IncludesWiki;
|
||||
|
||||
export type IncludesType = string | IncludesData;
|
||||
|
||||
export const asIncludesData = (val: any): val is IncludesData => {
|
||||
return val !== null && typeof val === 'object' && 'path' in val;
|
||||
}
|
||||
@@ -30,3 +30,48 @@ export interface CachedFetchedActivitiesResult {
|
||||
export interface FetchedActivitiesResult extends CachedFetchedActivitiesResult {
|
||||
post: SnoowrapActivity[]
|
||||
}
|
||||
|
||||
export type ReportType = 'mod' | 'user';
|
||||
|
||||
export interface Report {
|
||||
reason: string
|
||||
type: ReportType
|
||||
author?: string
|
||||
snoozed: boolean
|
||||
canSnooze: boolean
|
||||
}
|
||||
|
||||
export type RawRedditUserReport = [
|
||||
string, // reason
|
||||
number, // number of reports with this reason
|
||||
boolean, // is report snoozed
|
||||
boolean // can the reports be snoozed
|
||||
];
|
||||
|
||||
export type RawRedditModReport = [string, string];
|
||||
|
||||
export const activityReports = (activity: SnoowrapActivity): Report[] => {
|
||||
const reports: Report[] = [];
|
||||
for(const r of (activity.user_reports as unknown as RawRedditUserReport[])) {
|
||||
const report = {
|
||||
reason: r[0],
|
||||
type: 'user' as ReportType,
|
||||
snoozed: r[2],
|
||||
canSnooze: r[3]
|
||||
};
|
||||
for(let i = 0; i < r[1]; i++) {
|
||||
reports.push(report);
|
||||
}
|
||||
}
|
||||
|
||||
for(const r of (activity.mod_reports as unknown as RawRedditModReport[])) {
|
||||
reports.push({
|
||||
reason: r[0],
|
||||
type: 'mod' as ReportType,
|
||||
author: r[1],
|
||||
snoozed: false,
|
||||
canSnooze: false
|
||||
})
|
||||
}
|
||||
return reports;
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import {StructuredRunnableBase} from "./Runnable";
|
||||
import {RuleSetObjectJson} from "../../Rule/RuleSet";
|
||||
import {RuleSetConfigObject} from "../../Rule/RuleSet";
|
||||
import {RuleObjectJsonTypes} from "../types";
|
||||
import {IncludesData} from "./Includes";
|
||||
|
||||
export type RuleJson = RuleObjectJsonTypes | string;
|
||||
export type RuleObjectJson = Exclude<RuleJson, string>
|
||||
export type StructuredRuleObjectJson = Omit<RuleObjectJson, 'authorIs' | 'itemIs'> & StructuredRunnableBase
|
||||
export type StructuredRuleSetObjectJson = Omit<RuleSetObjectJson, 'rules'> & {
|
||||
rules: StructuredRuleObjectJson[]
|
||||
export type RuleConfigData = RuleObjectJsonTypes | string | IncludesData;
|
||||
export type RuleConfigHydratedData = Exclude<RuleConfigData, IncludesData>
|
||||
export type RuleConfigObject = Exclude<RuleConfigHydratedData, string>
|
||||
export type StructuredRuleConfigObject = Omit<RuleConfigObject, 'authorIs' | 'itemIs'> & StructuredRunnableBase
|
||||
export type StructuredRuleSetConfigObject = Omit<RuleSetConfigObject, 'rules'> & {
|
||||
rules: StructuredRuleConfigObject[]
|
||||
}
|
||||
|
||||
@@ -10,11 +10,16 @@ export interface RunnableBaseOptions extends Omit<RunnableBaseJson, 'itemIs' | '
|
||||
authorIs?: MinimalOrFullFilter<AuthorCriteria>
|
||||
}
|
||||
|
||||
export interface StructuredRunnableBase {
|
||||
export interface StructuredRunnableBase extends RunnableBaseJson {
|
||||
itemIs?: MinimalOrFullFilter<TypedActivityState>
|
||||
authorIs?: MinimalOrFullFilter<AuthorCriteria>
|
||||
}
|
||||
|
||||
export interface TypedStructuredRunnableBase<T> extends TypedRunnableBaseData<T> {
|
||||
itemIs?: MinimalOrFullFilter<T>
|
||||
authorIs?: MinimalOrFullFilter<AuthorCriteria>
|
||||
}
|
||||
|
||||
export interface RunnableBaseJson {
|
||||
/**
|
||||
* A list of criteria to test the state of the `Activity` against before running the check.
|
||||
@@ -31,3 +36,10 @@ export interface RunnableBaseJson {
|
||||
* */
|
||||
authorIs?: MinimalOrFullFilterJson<AuthorCriteria>
|
||||
}
|
||||
|
||||
export interface TypedRunnableBaseData<T extends TypedActivityState> extends RunnableBaseJson {
|
||||
/**
|
||||
* If present then these Author criteria are checked before running the Check. If criteria fails then the Check will fail.
|
||||
* */
|
||||
authorIs?: MinimalOrFullFilterJson<AuthorCriteria>
|
||||
}
|
||||
|
||||
80
src/Common/Migrations/Database/MigrationUtil.ts
Normal file
80
src/Common/Migrations/Database/MigrationUtil.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import {TableIndex} from "typeorm";
|
||||
|
||||
/**
|
||||
* Boilerplate for creating generic index
|
||||
* */
|
||||
export const index = (prefix: string, columns: string[], unique = true) => new TableIndex({
|
||||
name: `IDX_${unique ? 'UN_' : ''}${prefix}_${columns.join('-')}`,
|
||||
columnNames: columns,
|
||||
isUnique: unique,
|
||||
});
|
||||
/**
|
||||
* Create index on id column
|
||||
* */
|
||||
export const idIndex = (prefix: string, unique: boolean) => index(prefix, ['id'], unique);
|
||||
|
||||
/**
|
||||
* Boilerplate primary key column for random ID
|
||||
* */
|
||||
export const randomIdColumn = () => ({
|
||||
name: 'id',
|
||||
type: 'varchar',
|
||||
length: '20',
|
||||
isPrimary: true,
|
||||
isUnique: true,
|
||||
});
|
||||
|
||||
/**
|
||||
* Create a time data column based on database type
|
||||
* */
|
||||
export const timeAtColumn = (columnName: string, dbType: string, nullable?: boolean) => {
|
||||
const dbSpecifics = dbType === 'postgres' ? {
|
||||
type: 'timestamptz'
|
||||
} : {
|
||||
type: 'datetime',
|
||||
// required to get millisecond precision on mysql/mariadb
|
||||
// https://mariadb.com/kb/en/datetime/
|
||||
// https://dev.mysql.com/doc/refman/8.0/en/fractional-seconds.html
|
||||
length: '3',
|
||||
}
|
||||
return {
|
||||
name: columnName,
|
||||
isNullable: nullable ?? false,
|
||||
...dbSpecifics
|
||||
}
|
||||
}
|
||||
export const createdAtColumn = (type: string) => timeAtColumn('createdAt', type);
|
||||
const updatedAtColumn = (type: string) => timeAtColumn('updatedAt', type);
|
||||
const createdUpdatedAtColumns = (type: string) => [
|
||||
timeAtColumn('createdAt', type),
|
||||
timeAtColumn('updatedAt', type)
|
||||
];
|
||||
export const createdAtIndex = (prefix: string) => index(prefix, ['createdAt'], false);
|
||||
|
||||
const updatedAtIndex = (prefix: string) => index(prefix, ['updatedAt'], false);
|
||||
const createdUpdatedAtIndices = (prefix: string) => {
|
||||
return [
|
||||
createdAtIndex(prefix),
|
||||
updatedAtIndex(prefix)
|
||||
]
|
||||
}
|
||||
/**
|
||||
* Boilerplate for filter (itemIs, authorIs) FK column -- uses FK is filter ID
|
||||
* */
|
||||
const filterColumn = (name: string) => ({
|
||||
name,
|
||||
type: 'varchar',
|
||||
length: '20',
|
||||
isNullable: true
|
||||
});
|
||||
const authorIsColumn = () => filterColumn('authorIs');
|
||||
const itemIsColumn = () => filterColumn('itemIs');
|
||||
export const filterColumns = () => ([authorIsColumn(), itemIsColumn()]);
|
||||
const authorIsIndex = (prefix: string) => index(prefix, ['authorIs']);
|
||||
const itemIsIndex = (prefix: string) => index(prefix, ['itemIs']);
|
||||
export const filterIndices = (prefix: string) => {
|
||||
return [
|
||||
authorIsIndex(prefix),
|
||||
itemIsIndex(prefix)
|
||||
]
|
||||
}
|
||||
@@ -1,86 +1,12 @@
|
||||
import {MigrationInterface, QueryRunner, Table, TableIndex, TableColumn, TableForeignKey} from "typeorm";
|
||||
|
||||
const randomIdColumn = () => ({
|
||||
name: 'id',
|
||||
type: 'varchar',
|
||||
length: '20',
|
||||
isPrimary: true,
|
||||
isUnique: true,
|
||||
});
|
||||
|
||||
const timeAtColumn = (columnName: string, dbType: string, nullable?: boolean) => {
|
||||
const dbSpecifics = dbType === 'postgres' ? {
|
||||
type: 'timestamptz'
|
||||
} : {
|
||||
type: 'datetime',
|
||||
// required to get millisecond precision on mysql/mariadb
|
||||
// https://mariadb.com/kb/en/datetime/
|
||||
// https://dev.mysql.com/doc/refman/8.0/en/fractional-seconds.html
|
||||
length: '3',
|
||||
}
|
||||
return {
|
||||
name: columnName,
|
||||
isNullable: nullable ?? false,
|
||||
...dbSpecifics
|
||||
}
|
||||
}
|
||||
|
||||
const createdAtColumn = (type: string) => timeAtColumn('createdAt', type);
|
||||
const updatedAtColumn = (type: string) => timeAtColumn('updatedAt', type);
|
||||
|
||||
const createdUpdatedAtColumns = (type: string) => [
|
||||
timeAtColumn('createdAt', type),
|
||||
timeAtColumn('updatedAt', type)
|
||||
];
|
||||
|
||||
|
||||
const createdAtIndex = (prefix: string) => new TableIndex({
|
||||
name: `IDX_${prefix}_createdAt`,
|
||||
columnNames: ['createdAt']
|
||||
});
|
||||
|
||||
const updatedAtIndex = (prefix: string) => new TableIndex({
|
||||
name: `IDX_${prefix}_updatedAt`,
|
||||
columnNames: ['updatedAt']
|
||||
})
|
||||
|
||||
const createdUpdatedAtIndices = (prefix: string) => {
|
||||
return [
|
||||
createdAtIndex(prefix),
|
||||
updatedAtIndex(prefix)
|
||||
]
|
||||
}
|
||||
|
||||
const filterColumn = (name: string) => ({
|
||||
name,
|
||||
type: 'varchar',
|
||||
length: '20',
|
||||
isNullable: true
|
||||
});
|
||||
|
||||
const authorIsColumn = () => filterColumn('authorIs');
|
||||
const itemIsColumn = () => filterColumn('itemIs');
|
||||
|
||||
const filterColumns = () => ([authorIsColumn(), itemIsColumn()]);
|
||||
|
||||
const authorIsIndex = (prefix: string) => new TableIndex({
|
||||
name: `IDX_${prefix}_authorIs`,
|
||||
columnNames: ['authorIs'],
|
||||
isUnique: true,
|
||||
});
|
||||
|
||||
const itemIsIndex = (prefix: string) => new TableIndex({
|
||||
name: `IDX_${prefix}_itemIs`,
|
||||
columnNames: ['itemIs'],
|
||||
isUnique: true
|
||||
});
|
||||
|
||||
const filterIndices = (prefix: string) => {
|
||||
return [
|
||||
authorIsIndex(prefix),
|
||||
itemIsIndex(prefix)
|
||||
]
|
||||
}
|
||||
import {MigrationInterface, QueryRunner, Table, TableIndex} from "typeorm";
|
||||
import {
|
||||
createdAtColumn,
|
||||
createdAtIndex,
|
||||
filterColumns,
|
||||
filterIndices,
|
||||
randomIdColumn,
|
||||
timeAtColumn
|
||||
} from "../MigrationUtil";
|
||||
|
||||
export class initApi1642180264563 implements MigrationInterface {
|
||||
name = 'initApi1642180264563'
|
||||
|
||||
@@ -1,12 +1,5 @@
|
||||
import {MigrationInterface, QueryRunner, Table, TableIndex} from "typeorm"
|
||||
|
||||
const index = (prefix: string, columns: string[], unique = true) => new TableIndex({
|
||||
name: `IDX_${unique ? 'UN_' : ''}${prefix}_${columns.join('-')}_MIG`,
|
||||
columnNames: columns,
|
||||
isUnique: unique,
|
||||
});
|
||||
|
||||
const idIndex = (prefix: string, unique: boolean) => index(prefix, ['id'], unique);
|
||||
import {MigrationInterface, QueryRunner, Table} from "typeorm"
|
||||
import {idIndex, index} from "../MigrationUtil";
|
||||
|
||||
export class indexes1653586738904 implements MigrationInterface {
|
||||
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
import {MigrationInterface, QueryRunner, Table, TableIndex} from "typeorm"
|
||||
import {createdAtColumn, createdAtIndex, idIndex, index, randomIdColumn} from "../MigrationUtil";
|
||||
|
||||
export class reportTracking1657632517934 implements MigrationInterface {
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
|
||||
const dbType = queryRunner.connection.driver.options.type;
|
||||
|
||||
await queryRunner.createTable(
|
||||
new Table({
|
||||
name: 'ActivityReport',
|
||||
columns: [
|
||||
randomIdColumn(),
|
||||
{
|
||||
name: 'activityId',
|
||||
type: 'varchar',
|
||||
length: '20',
|
||||
isNullable: false
|
||||
},
|
||||
{
|
||||
name: 'reason',
|
||||
type: 'varchar',
|
||||
length: '500',
|
||||
isNullable: false
|
||||
},
|
||||
{
|
||||
name: 'type',
|
||||
type: 'varchar',
|
||||
length: '200',
|
||||
isNullable: false
|
||||
},
|
||||
{
|
||||
name: 'author',
|
||||
type: 'varchar',
|
||||
length: '100',
|
||||
isNullable: true
|
||||
},
|
||||
{
|
||||
name: 'granularity',
|
||||
type: 'int',
|
||||
isNullable: false
|
||||
},
|
||||
createdAtColumn(dbType),
|
||||
],
|
||||
indices: [
|
||||
idIndex('ActivityReport', true),
|
||||
index('ActivityReport', ['activityId'], false),
|
||||
index('ActivityReportReason', ['reason'], false),
|
||||
createdAtIndex('report'),
|
||||
]
|
||||
}),
|
||||
true,
|
||||
true,
|
||||
true
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -42,4 +42,4 @@ export const filterCriteriaDefault: FilterCriteriaDefaults = {
|
||||
export const defaultDataDir = path.resolve(__dirname, '../..');
|
||||
export const defaultConfigFilenames = ['config.json', 'config.yaml'];
|
||||
|
||||
export const VERSION = '0.11.1';
|
||||
export const VERSION = '0.11.4';
|
||||
|
||||
@@ -6,7 +6,6 @@ import Comment from "snoowrap/dist/objects/Comment";
|
||||
import RedditUser from "snoowrap/dist/objects/RedditUser";
|
||||
import {DataSource} from "typeorm";
|
||||
import {JsonOperatorConfigDocument, YamlOperatorConfigDocument} from "./Config/Operator";
|
||||
import {CommentCheckJson, SubmissionCheckJson} from "../Check";
|
||||
import {SafeDictionary} from "ts-essentials";
|
||||
import {RuleResultEntity} from "./Entities/RuleResultEntity";
|
||||
import {Dayjs} from "dayjs";
|
||||
@@ -43,8 +42,16 @@ import {
|
||||
ItemOptions
|
||||
} from "./Infrastructure/Filters/FilterShapes";
|
||||
import {LoggingOptions, LogLevel, StrongLoggingOptions} from "./Infrastructure/Logging";
|
||||
import {DatabaseConfig, DatabaseDriver, DatabaseDriverConfig, DatabaseDriverType} from "./Infrastructure/Database";
|
||||
import {
|
||||
DatabaseConfig,
|
||||
DatabaseDriver,
|
||||
DatabaseDriverConfig,
|
||||
DatabaseDriverType
|
||||
} from "./Infrastructure/Database";
|
||||
import {ActivityType} from "./Infrastructure/Reddit";
|
||||
import {InfluxDB, WriteApi} from "@influxdata/influxdb-client";
|
||||
import {InfluxConfig} from "./Influx/interfaces";
|
||||
import {InfluxClient} from "./Influx/InfluxClient";
|
||||
|
||||
|
||||
export interface ReferenceSubmission {
|
||||
@@ -725,19 +732,14 @@ export interface SearchAndReplaceRegExp {
|
||||
}
|
||||
|
||||
export interface NamedGroup {
|
||||
[name: string]: string
|
||||
}
|
||||
|
||||
export interface GlobalRegExResult {
|
||||
match: string,
|
||||
groups: string[],
|
||||
named: NamedGroup | undefined
|
||||
[name: string]: any
|
||||
}
|
||||
|
||||
export interface RegExResult {
|
||||
matched: boolean,
|
||||
matches: string[],
|
||||
global: GlobalRegExResult[]
|
||||
match: string,
|
||||
groups: string[],
|
||||
index: number
|
||||
named: NamedGroup
|
||||
}
|
||||
|
||||
export type StrongCache = {
|
||||
@@ -1119,6 +1121,8 @@ export interface BotInstanceJsonConfig {
|
||||
retention?: EventRetentionPolicyRange
|
||||
}
|
||||
|
||||
influxConfig?: InfluxConfig
|
||||
|
||||
/**
|
||||
* Settings related to bot behavior for subreddits it is managing
|
||||
* */
|
||||
@@ -1374,6 +1378,8 @@ export interface OperatorJsonConfig {
|
||||
retention?: EventRetentionPolicyRange
|
||||
}
|
||||
|
||||
influxConfig?: InfluxConfig
|
||||
|
||||
/**
|
||||
* Set global snoowrap options as well as default snoowrap config for all bots that don't specify their own
|
||||
* */
|
||||
@@ -1547,6 +1553,23 @@ export interface OperatorJsonConfig {
|
||||
}
|
||||
|
||||
credentials?: ThirdPartyCredentialsJsonConfig
|
||||
|
||||
dev?: {
|
||||
/**
|
||||
* Invoke `process.memoryUsage()` on an interval and send metrics to Influx
|
||||
*
|
||||
* Only works if Influx config is provided
|
||||
* */
|
||||
monitorMemory?: boolean
|
||||
/**
|
||||
* Interval, in seconds, to invoke `process.memoryUsage()` at
|
||||
*
|
||||
* Defaults to 15 seconds
|
||||
*
|
||||
* @default 15
|
||||
* */
|
||||
monitorMemoryInterval?: number
|
||||
};
|
||||
}
|
||||
|
||||
export interface RequiredOperatorRedditCredentials extends RedditCredentials {
|
||||
@@ -1580,6 +1603,7 @@ export interface BotInstanceConfig extends BotInstanceJsonConfig {
|
||||
database: DataSource
|
||||
snoowrap: SnoowrapOptions
|
||||
databaseStatisticsDefaults: DatabaseStatisticsOperatorConfig
|
||||
opInflux?: InfluxClient,
|
||||
subreddits: {
|
||||
names?: string[],
|
||||
exclude?: string[],
|
||||
@@ -1620,6 +1644,7 @@ export interface OperatorConfig extends OperatorJsonConfig {
|
||||
retention?: EventRetentionPolicyRange
|
||||
}
|
||||
database: DataSource
|
||||
influx?: InfluxClient,
|
||||
web: {
|
||||
database: DataSource,
|
||||
databaseConfig: {
|
||||
@@ -1651,6 +1676,10 @@ export interface OperatorConfig extends OperatorJsonConfig {
|
||||
databaseStatisticsDefaults: DatabaseStatisticsOperatorConfig
|
||||
bots: BotInstanceConfig[]
|
||||
credentials: ThirdPartyCredentialsJsonConfig
|
||||
dev: {
|
||||
monitorMemory: boolean
|
||||
monitorMemoryInterval: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface OperatorFileConfig {
|
||||
@@ -1901,8 +1930,6 @@ export interface TextMatchOptions {
|
||||
caseSensitive?: boolean
|
||||
}
|
||||
|
||||
export type ActivityCheckJson = SubmissionCheckJson | CommentCheckJson;
|
||||
|
||||
export interface PostBehaviorOptionConfig {
|
||||
recordTo?: RecordOutputOption
|
||||
behavior?: PostBehaviorType
|
||||
|
||||
@@ -20,7 +20,8 @@ import {CancelDispatchActionJson} from "../Action/CancelDispatchAction";
|
||||
import {ContributorActionJson} from "../Action/ContributorAction";
|
||||
import {SentimentRuleJSONConfig} from "../Rule/SentimentRule";
|
||||
import {ModNoteActionJson} from "../Action/ModNoteAction";
|
||||
import {IncludesData} from "./Infrastructure/Includes";
|
||||
|
||||
export type RuleObjectJsonTypes = RecentActivityRuleJSONConfig | RepeatActivityJSONConfig | AuthorRuleJSONConfig | AttributionJSONConfig | HistoryJSONConfig | RegexRuleJSONConfig | RepostRuleJSONConfig | SentimentRuleJSONConfig
|
||||
|
||||
export type ActionJson = CommentActionJson | FlairActionJson | ReportActionJson | LockActionJson | RemoveActionJson | ApproveActionJson | BanActionJson | UserNoteActionJson | MessageActionJson | UserFlairActionJson | DispatchActionJson | CancelDispatchActionJson | ContributorActionJson | ModNoteActionJson | string;
|
||||
export type ActionJson = CommentActionJson | FlairActionJson | ReportActionJson | LockActionJson | RemoveActionJson | ApproveActionJson | BanActionJson | UserNoteActionJson | MessageActionJson | UserFlairActionJson | DispatchActionJson | CancelDispatchActionJson | ContributorActionJson | ModNoteActionJson | string | IncludesData;
|
||||
|
||||
@@ -1,24 +1,30 @@
|
||||
import winston, {Logger} from "winston";
|
||||
import {
|
||||
asNamedCriteria,
|
||||
buildCacheOptionsFromProvider, buildCachePrefix, buildFilter, castToBool,
|
||||
asNamedCriteria, asWikiContext,
|
||||
buildCachePrefix, buildFilter, castToBool,
|
||||
createAjvFactory, fileOrDirectoryIsWriteable,
|
||||
mergeArr, mergeFilters,
|
||||
normalizeName,
|
||||
overwriteMerge,
|
||||
parseBool, randomId,
|
||||
readConfigFile, removeFromSourceIfKeysExistsInDestination,
|
||||
parseBool, parseExternalUrl, parseUrlContext, parseWikiContext, randomId,
|
||||
readConfigFile,
|
||||
removeUndefinedKeys, resolvePathFromEnvWithRelative
|
||||
} from "./util";
|
||||
import {CommentCheck} from "./Check/CommentCheck";
|
||||
import {SubmissionCheck} from "./Check/SubmissionCheck";
|
||||
|
||||
import Ajv, {Schema} from 'ajv';
|
||||
import * as appSchema from './Schema/App.json';
|
||||
import * as runSchema from './Schema/Run.json';
|
||||
import * as checkSchema from './Schema/Check.json';
|
||||
import * as operatorSchema from './Schema/OperatorConfig.json';
|
||||
import {JSONConfig} from "./JsonConfig";
|
||||
//import * as rulesetSchema from './Schema/RuleSet.json';
|
||||
import {SubredditConfigHydratedData, SubredditConfigData} from "./SubredditConfigData";
|
||||
import LoggedError from "./Utils/LoggedError";
|
||||
import {CheckStructuredJson} from "./Check";
|
||||
import {
|
||||
ActivityCheckConfigData,
|
||||
ActivityCheckConfigHydratedData,
|
||||
CheckConfigHydratedData,
|
||||
CheckConfigObject
|
||||
} from "./Check";
|
||||
import {
|
||||
DEFAULT_POLLING_INTERVAL,
|
||||
DEFAULT_POLLING_LIMIT,
|
||||
@@ -37,15 +43,11 @@ import {
|
||||
OperatorFileConfig,
|
||||
PostBehavior
|
||||
} from "./Common/interfaces";
|
||||
import {isRuleSetJSON, RuleSetJson, RuleSetObjectJson} from "./Rule/RuleSet";
|
||||
import {isRuleSetJSON, RuleSetConfigData, RuleSetConfigHydratedData, RuleSetConfigObject} from "./Rule/RuleSet";
|
||||
import deepEqual from "fast-deep-equal";
|
||||
import {
|
||||
ActionJson
|
||||
} from "./Common/types";
|
||||
import {isActionJson} from "./Action";
|
||||
import {getLogger, resolveLogDir} from "./Utils/loggerFactory";
|
||||
import {getLogger} from "./Utils/loggerFactory";
|
||||
import {GetEnvVars} from 'env-cmd';
|
||||
import {operatorConfig} from "./Utils/CommandConfig";
|
||||
import merge from 'deepmerge';
|
||||
import * as process from "process";
|
||||
import {
|
||||
@@ -59,57 +61,53 @@ import objectHash from "object-hash";
|
||||
import {
|
||||
createAppDatabaseConnection,
|
||||
createDatabaseConfig,
|
||||
createDatabaseConnection,
|
||||
createWebDatabaseConnection
|
||||
} from "./Utils/databaseUtils";
|
||||
import path from 'path';
|
||||
import {
|
||||
JsonOperatorConfigDocument,
|
||||
OperatorConfigDocumentInterface,
|
||||
YamlOperatorConfigDocument
|
||||
} from "./Common/Config/Operator";
|
||||
import {
|
||||
ConfigDocumentInterface
|
||||
} from "./Common/Config/AbstractConfigDocument";
|
||||
import {Document as YamlDocument} from "yaml";
|
||||
import {SimpleError} from "./Utils/Errors";
|
||||
import {CMError, SimpleError} from "./Utils/Errors";
|
||||
import {ErrorWithCause} from "pony-cause";
|
||||
import {RunJson, RunStructuredJson} from "./Run";
|
||||
import {RunConfigHydratedData, RunConfigData, RunConfigObject} from "./Run";
|
||||
import {AuthorRuleConfig} from "./Rule/AuthorRule";
|
||||
import {
|
||||
CacheProvider, ConfigFormat,
|
||||
CacheProvider, ConfigFormat, ConfigFragmentValidationFunc,
|
||||
PollOn
|
||||
} from "./Common/Infrastructure/Atomic";
|
||||
import {
|
||||
AuthorOptions,
|
||||
FilterCriteriaDefaults,
|
||||
FilterCriteriaDefaultsJson,
|
||||
FilterOptionsJson,
|
||||
MaybeAnonymousCriteria,
|
||||
MaybeAnonymousOrStringCriteria, MinimalOrFullFilter, MinimalOrFullFilterJson, NamedCriteria
|
||||
} from "./Common/Infrastructure/Filters/FilterShapes";
|
||||
import {AuthorCriteria, TypedActivityState, TypedActivityStates} from "./Common/Infrastructure/Filters/FilterCriteria";
|
||||
import {AuthorCriteria, TypedActivityState} from "./Common/Infrastructure/Filters/FilterCriteria";
|
||||
import {StrongLoggingOptions} from "./Common/Infrastructure/Logging";
|
||||
import {DatabaseDriver, DatabaseDriverConfig, DatabaseDriverType} from "./Common/Infrastructure/Database";
|
||||
import {DatabaseDriver, DatabaseDriverType} from "./Common/Infrastructure/Database";
|
||||
import {parseFromJsonOrYamlToObject} from "./Common/Config/ConfigUtil";
|
||||
import {RunnableBaseJson, RunnableBaseOptions, StructuredRunnableBase} from "./Common/Infrastructure/Runnable";
|
||||
import {RunnableBaseJson, StructuredRunnableBase} from "./Common/Infrastructure/Runnable";
|
||||
import {
|
||||
RuleJson,
|
||||
RuleObjectJson,
|
||||
StructuredRuleObjectJson,
|
||||
StructuredRuleSetObjectJson
|
||||
RuleConfigData, RuleConfigHydratedData,
|
||||
RuleConfigObject,
|
||||
} from "./Common/Infrastructure/RuleShapes";
|
||||
import {ActionObjectJson, StructuredActionObjectJson} from "./Common/Infrastructure/ActionShapes";
|
||||
import {
|
||||
ActionConfigHydratedData, ActionConfigObject,
|
||||
} from "./Common/Infrastructure/ActionShapes";
|
||||
import {SubredditResources} from "./Subreddit/SubredditResources";
|
||||
import {asIncludesData, IncludesData, IncludesString} from "./Common/Infrastructure/Includes";
|
||||
import ConfigParseError from "./Utils/ConfigParseError";
|
||||
import {InfluxClient} from "./Common/Influx/InfluxClient";
|
||||
|
||||
export interface ConfigBuilderOptions {
|
||||
logger: Logger,
|
||||
}
|
||||
|
||||
export const validateJson = (config: object, schema: Schema, logger: Logger): any => {
|
||||
export const validateJson = <T>(config: object, schema: Schema, logger: Logger): T => {
|
||||
const ajv = createAjvFactory(logger);
|
||||
const valid = ajv.validate(schema, config);
|
||||
if (valid) {
|
||||
return config;
|
||||
return config as unknown as T;
|
||||
} else {
|
||||
logger.error('Json config was not valid. Please use schema to check validity.', {leaf: 'Config'});
|
||||
if (Array.isArray(ajv.errors)) {
|
||||
@@ -162,30 +160,217 @@ export class ConfigBuilder {
|
||||
|
||||
constructor(options: ConfigBuilderOptions) {
|
||||
|
||||
this.configLogger = options.logger.child({leaf: 'Config'}, mergeArr);
|
||||
this.configLogger = options.logger.child({labels: ['Config']}, mergeArr);
|
||||
this.logger = options.logger;
|
||||
}
|
||||
|
||||
validateJson(config: object): JSONConfig {
|
||||
const validConfig = validateJson(config, appSchema, this.logger);
|
||||
return validConfig as JSONConfig;
|
||||
validateJson(config: object): SubredditConfigData {
|
||||
return validateJson<SubredditConfigData>(config, appSchema, this.logger);
|
||||
}
|
||||
|
||||
parseToStructured(config: JSONConfig, filterCriteriaDefaultsFromBot?: FilterCriteriaDefaults, postCheckBehaviorDefaultsFromBot: PostBehavior = {}): RunStructuredJson[] {
|
||||
let namedRules: Map<string, RuleObjectJson> = new Map();
|
||||
let namedActions: Map<string, ActionObjectJson> = new Map();
|
||||
const {checks = [], runs = [], filterCriteriaDefaults, postCheckBehaviorDefaults} = config;
|
||||
async hydrateConfigFragment<T>(val: IncludesData | string | object, resource: SubredditResources, validateFunc?: ConfigFragmentValidationFunc): Promise<T[]> {
|
||||
let includes: IncludesData | undefined = undefined;
|
||||
if(typeof val === 'string') {
|
||||
const strContextResult = parseUrlContext(val);
|
||||
if(strContextResult !== undefined) {
|
||||
this.configLogger.debug(`Detected ${asWikiContext(strContextResult) !== undefined ? 'REDDIT WIKI' : 'URL'} type Config Fragment from string: ${val}`);
|
||||
includes = {
|
||||
path: val as IncludesString
|
||||
};
|
||||
} else {
|
||||
this.configLogger.debug(`Did not detect Config Fragment as a URL resource: ${val}`);
|
||||
}
|
||||
} else if (asIncludesData(val)) {
|
||||
includes = val;
|
||||
const strContextResult = parseUrlContext(val.path);
|
||||
if(strContextResult === undefined) {
|
||||
throw new ConfigParseError(`Could not detect Config Fragment path as a valid URL Resource. Resource must be prefixed with either 'url:' or 'wiki:' -- ${val.path}`);
|
||||
}
|
||||
}
|
||||
|
||||
if(includes === undefined) {
|
||||
if(Array.isArray(val)) {
|
||||
return val as unknown as T[];
|
||||
} else {
|
||||
return [val as unknown as T];
|
||||
}
|
||||
}
|
||||
|
||||
const resolvedFragment = await resource.getConfigFragment(includes, validateFunc);
|
||||
if(Array.isArray(resolvedFragment)) {
|
||||
return resolvedFragment
|
||||
}
|
||||
return [resolvedFragment as T];
|
||||
}
|
||||
|
||||
async hydrateConfig(config: SubredditConfigData, resource: SubredditResources): Promise<SubredditConfigHydratedData> {
|
||||
const {
|
||||
runs = [],
|
||||
checks = [],
|
||||
...restConfig
|
||||
} = config;
|
||||
|
||||
if(checks.length > 0 && runs.length > 0) {
|
||||
// cannot have both checks and runs at top-level
|
||||
throw new Error(`Subreddit configuration cannot contain both 'checks' and 'runs' at top-level.`);
|
||||
}
|
||||
|
||||
// TODO consolidate this with parseToStructured
|
||||
const realRuns = runs;
|
||||
if(checks.length > 0) {
|
||||
realRuns.push({name: 'Run1', checks: checks});
|
||||
}
|
||||
|
||||
const hydratedRuns: RunConfigHydratedData[] = [];
|
||||
|
||||
let runIndex = 1;
|
||||
for(const r of realRuns) {
|
||||
|
||||
let hydratedRunArr: RunConfigData | RunConfigData[];
|
||||
|
||||
try {
|
||||
hydratedRunArr = await this.hydrateConfigFragment<RunConfigData>(r, resource, <RunConfigData>(data: object, fetched: boolean) => {
|
||||
if (fetched) {
|
||||
if (Array.isArray(data)) {
|
||||
for (const runData of data) {
|
||||
validateJson<RunConfigData>(runData, runSchema, this.logger);
|
||||
}
|
||||
} else {
|
||||
validateJson<RunConfigData>(data, runSchema, this.logger);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
} catch (e: any) {
|
||||
throw new CMError(`Could not fetch or validate Run #${runIndex}`, {cause: e});
|
||||
}
|
||||
|
||||
for(const hydratedRunVal of hydratedRunArr) {
|
||||
if (typeof hydratedRunVal === 'string') {
|
||||
throw new ConfigParseError(`Run Config Fragment #${runIndex} was not in a recognized Config Fragment format. Given: ${hydratedRunVal}`);
|
||||
}
|
||||
|
||||
// validate run with unhydrated checks
|
||||
const preValidatedRun = hydratedRunVal as RunConfigData;
|
||||
|
||||
const {checks, ...rest} = preValidatedRun;
|
||||
|
||||
const hydratedChecks: CheckConfigHydratedData[] = [];
|
||||
let checkIndex = 1;
|
||||
for (const c of preValidatedRun.checks) {
|
||||
let hydratedCheckDataArr: ActivityCheckConfigHydratedData[];
|
||||
|
||||
try {
|
||||
hydratedCheckDataArr = await this.hydrateConfigFragment<ActivityCheckConfigHydratedData>(c, resource, (data: object, fetched: boolean) => {
|
||||
if (fetched) {
|
||||
if (Array.isArray(data)) {
|
||||
for (const checkObj of data) {
|
||||
validateJson<ActivityCheckConfigHydratedData>(checkObj, checkSchema, this.logger);
|
||||
}
|
||||
} else {
|
||||
validateJson<ActivityCheckConfigHydratedData>(data, checkSchema, this.logger);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
} catch (e: any) {
|
||||
throw new CMError(`Could not fetch or validate Check Config Fragment #${checkIndex} in Run #${runIndex}`, {cause: e});
|
||||
}
|
||||
|
||||
for (const hydratedCheckData of hydratedCheckDataArr) {
|
||||
if (typeof hydratedCheckData === 'string') {
|
||||
throw new ConfigParseError(`Check #${checkIndex} in Run #${runIndex} was not in a recognized include format. Given: ${hydratedCheckData}`);
|
||||
}
|
||||
|
||||
const preValidatedCheck = hydratedCheckData as ActivityCheckConfigHydratedData;
|
||||
|
||||
const {rules, actions, ...rest} = preValidatedCheck;
|
||||
const hydratedCheckConfigData: CheckConfigHydratedData = rest;
|
||||
|
||||
if (rules !== undefined) {
|
||||
const hydratedRulesOrSets: (RuleSetConfigHydratedData | RuleConfigHydratedData)[] = [];
|
||||
|
||||
let ruleIndex = 1;
|
||||
for (const r of rules) {
|
||||
let hydratedRuleOrSetArr: (RuleConfigHydratedData | RuleSetConfigHydratedData)[];
|
||||
try {
|
||||
hydratedRuleOrSetArr = await this.hydrateConfigFragment<(RuleSetConfigHydratedData | RuleConfigHydratedData)>(r, resource);
|
||||
} catch (e: any) {
|
||||
throw new CMError(`Rule Config Fragment #${ruleIndex} in Check #${checkIndex} could not be fetched`, {cause: e});
|
||||
}
|
||||
for (const hydratedRuleOrSet of hydratedRuleOrSetArr) {
|
||||
if (typeof hydratedRuleOrSet === 'string') {
|
||||
hydratedRulesOrSets.push(hydratedRuleOrSet);
|
||||
} else if (isRuleSetJSON(hydratedRuleOrSet)) {
|
||||
const hydratedRulesetRules: RuleConfigHydratedData[] = [];
|
||||
for (const rsr of hydratedRuleOrSet.rules) {
|
||||
const hydratedRuleSetRuleArr = await this.hydrateConfigFragment<RuleConfigHydratedData>(rsr, resource);
|
||||
for(const rsrData of hydratedRuleSetRuleArr) {
|
||||
// either a string or rule data at this point
|
||||
// we will validate the whole check again so this rule will be validated eventually
|
||||
hydratedRulesetRules.push(rsrData)
|
||||
}
|
||||
}
|
||||
hydratedRuleOrSet.rules = hydratedRulesetRules;
|
||||
hydratedRulesOrSets.push(hydratedRuleOrSet);
|
||||
} else {
|
||||
hydratedRulesOrSets.push(hydratedRuleOrSet);
|
||||
}
|
||||
ruleIndex++;
|
||||
}
|
||||
}
|
||||
hydratedCheckConfigData.rules = hydratedRulesOrSets;
|
||||
}
|
||||
|
||||
if (actions !== undefined) {
|
||||
const hydratedActions: ActionConfigHydratedData[] = [];
|
||||
|
||||
let actionIndex = 1;
|
||||
for (const a of actions) {
|
||||
let hydratedActionArr: ActionConfigHydratedData[];
|
||||
try {
|
||||
hydratedActionArr = await this.hydrateConfigFragment<ActionConfigHydratedData>(a, resource);
|
||||
} catch (e: any) {
|
||||
throw new CMError(`Action Config Fragment #${actionIndex} in Check #${checkIndex} could not be fetched`, {cause: e});
|
||||
}
|
||||
for (const hydratedAction of hydratedActionArr) {
|
||||
hydratedActions.push(hydratedAction);
|
||||
actionIndex++;
|
||||
}
|
||||
}
|
||||
hydratedCheckConfigData.actions = hydratedActions;
|
||||
}
|
||||
|
||||
hydratedChecks.push(hydratedCheckConfigData);
|
||||
checkIndex++;
|
||||
}
|
||||
}
|
||||
|
||||
const hydratedRun: RunConfigHydratedData = {...rest, checks: hydratedChecks};
|
||||
|
||||
hydratedRuns.push(hydratedRun);
|
||||
runIndex++;
|
||||
}
|
||||
}
|
||||
|
||||
const hydratedConfig: SubredditConfigHydratedData = {...restConfig, runs: hydratedRuns};
|
||||
|
||||
const validatedHydratedConfig = validateJson<SubredditConfigHydratedData>(hydratedConfig, appSchema, this.logger);
|
||||
|
||||
return validatedHydratedConfig;
|
||||
}
|
||||
|
||||
async parseToStructured(config: SubredditConfigData, resource: SubredditResources, filterCriteriaDefaultsFromBot?: FilterCriteriaDefaults, postCheckBehaviorDefaultsFromBot: PostBehavior = {}): Promise<RunConfigObject[]> {
|
||||
let namedRules: Map<string, RuleConfigObject> = new Map();
|
||||
let namedActions: Map<string, ActionConfigObject> = new Map();
|
||||
const {filterCriteriaDefaults, postCheckBehaviorDefaults} = config;
|
||||
|
||||
const hydratedConfig = await this.hydrateConfig(config, resource);
|
||||
|
||||
const {runs: realRuns = []} = hydratedConfig;
|
||||
|
||||
for(const r of realRuns) {
|
||||
for (const c of r.checks) {
|
||||
const {rules = [], actions = []} = c;
|
||||
@@ -194,11 +379,11 @@ export class ConfigBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
const [namedAuthorFilters, namedItemFilters] = extractNamedFilters({...config, runs: realRuns});
|
||||
const [namedAuthorFilters, namedItemFilters] = extractNamedFilters({...hydratedConfig, runs: realRuns});
|
||||
|
||||
const configFilterDefaults = filterCriteriaDefaults === undefined ? undefined : buildDefaultFilterCriteriaFromJson(filterCriteriaDefaults, namedAuthorFilters, namedItemFilters);
|
||||
|
||||
const structuredRuns: RunStructuredJson[] = [];
|
||||
const structuredRuns: RunConfigObject[] = [];
|
||||
|
||||
const namedFilters = insertNameFilters(namedAuthorFilters, namedItemFilters);
|
||||
|
||||
@@ -210,7 +395,7 @@ export class ConfigBuilder {
|
||||
|
||||
const configFilterDefaultsFromRun = filterCriteriaDefaultsFromRun === undefined ? undefined : buildDefaultFilterCriteriaFromJson(filterCriteriaDefaultsFromRun, namedAuthorFilters, namedItemFilters);
|
||||
|
||||
const structuredChecks: CheckStructuredJson[] = [];
|
||||
const structuredChecks: CheckConfigObject[] = [];
|
||||
for (const c of r.checks) {
|
||||
const {rules = [], actions = [], authorIs = {}, itemIs = []} = c;
|
||||
const strongRules = insertNamedRules(rules, namedRules, namedAuthorFilters, namedItemFilters);
|
||||
@@ -227,7 +412,7 @@ export class ConfigBuilder {
|
||||
rules: strongRules,
|
||||
actions: strongActions,
|
||||
...postCheckBehaviors
|
||||
} as CheckStructuredJson;
|
||||
} as CheckConfigObject;
|
||||
structuredChecks.push(strongCheck);
|
||||
}
|
||||
structuredRuns.push({
|
||||
@@ -283,19 +468,19 @@ export const buildDefaultFilterCriteriaFromJson = (val: FilterCriteriaDefaultsJs
|
||||
return def;
|
||||
}
|
||||
|
||||
export const extractNamedRules = (rules: Array<RuleSetJson | RuleJson>, namedRules: Map<string, RuleObjectJson> = new Map()): Map<string, RuleObjectJson> => {
|
||||
export const extractNamedRules = (rules: Array<RuleSetConfigData | RuleConfigData>, namedRules: Map<string, RuleConfigObject> = new Map()): Map<string, RuleConfigObject> => {
|
||||
//const namedRules = new Map();
|
||||
for (const r of rules) {
|
||||
let rulesToAdd: RuleObjectJson[] = [];
|
||||
let rulesToAdd: RuleConfigObject[] = [];
|
||||
if ((typeof r === 'object')) {
|
||||
if ((r as RuleObjectJson).kind !== undefined) {
|
||||
if ((r as RuleConfigObject).kind !== undefined) {
|
||||
// itsa rule
|
||||
const rule = r as RuleObjectJson;
|
||||
const rule = r as RuleConfigObject;
|
||||
if (rule.name !== undefined) {
|
||||
rulesToAdd.push(rule);
|
||||
}
|
||||
} else {
|
||||
const ruleSet = r as RuleSetJson;
|
||||
const ruleSet = r as RuleSetConfigData;
|
||||
const nestedNamed = extractNamedRules(ruleSet.rules);
|
||||
rulesToAdd = [...nestedNamed.values()];
|
||||
}
|
||||
@@ -306,7 +491,7 @@ export const extractNamedRules = (rules: Array<RuleSetJson | RuleJson>, namedRul
|
||||
const ruleNoName = {...rest};
|
||||
|
||||
if (namedRules.has(normalName)) {
|
||||
const {name: nn, ...ruleRest} = namedRules.get(normalName) as RuleObjectJson;
|
||||
const {name: nn, ...ruleRest} = namedRules.get(normalName) as RuleConfigObject;
|
||||
if (!deepEqual(ruleRest, ruleNoName)) {
|
||||
throw new Error(`Rule names must be unique (case-insensitive). Conflicting name: ${name}`);
|
||||
}
|
||||
@@ -352,7 +537,7 @@ const parseFilterJson = <T>(addToFilter: FilterJsonFuncArg<T>) => (val: MinimalO
|
||||
}
|
||||
}
|
||||
|
||||
export const extractNamedFilters = (config: JSONConfig, namedAuthorFilters: Map<string, NamedCriteria<AuthorCriteria>> = new Map(), namedItemFilters: Map<string, NamedCriteria<TypedActivityState>> = new Map()): [Map<string, NamedCriteria<AuthorCriteria>>, Map<string, NamedCriteria<TypedActivityState>>] => {
|
||||
export const extractNamedFilters = (config: SubredditConfigHydratedData, namedAuthorFilters: Map<string, NamedCriteria<AuthorCriteria>> = new Map(), namedItemFilters: Map<string, NamedCriteria<TypedActivityState>> = new Map()): [Map<string, NamedCriteria<AuthorCriteria>>, Map<string, NamedCriteria<TypedActivityState>>] => {
|
||||
const addToAuthors = addToNamedFilter(namedAuthorFilters, 'authorIs');
|
||||
const addToItems = addToNamedFilter(namedItemFilters, 'itemIs');
|
||||
|
||||
@@ -407,7 +592,7 @@ export const extractNamedFilters = (config: JSONConfig, namedAuthorFilters: Map<
|
||||
parseAuthorIs(filterCriteriaDefaults?.authorIs);
|
||||
parseItemIs(filterCriteriaDefaults?.itemIs);
|
||||
|
||||
for (const r of runs as RunJson[]) {
|
||||
for (const r of runs) {
|
||||
|
||||
const {
|
||||
filterCriteriaDefaults: filterCriteriaDefaultsFromRun
|
||||
@@ -508,13 +693,13 @@ export const insertNameFilters = (namedAuthorFilters: Map<string, NamedCriteria<
|
||||
return runnableOpts;
|
||||
}
|
||||
|
||||
export const insertNamedRules = (rules: Array<RuleSetJson | RuleJson>, namedRules: Map<string, RuleObjectJson> = new Map(), namedAuthorFilters: Map<string, NamedCriteria<AuthorCriteria>> = new Map(), namedItemFilters: Map<string, NamedCriteria<TypedActivityState>> = new Map()): (StructuredRuleSetObjectJson | StructuredRuleObjectJson)[] => {
|
||||
export const insertNamedRules = (rules: Array<RuleSetConfigHydratedData | RuleConfigHydratedData>, namedRules: Map<string, RuleConfigObject> = new Map(), namedAuthorFilters: Map<string, NamedCriteria<AuthorCriteria>> = new Map(), namedItemFilters: Map<string, NamedCriteria<TypedActivityState>> = new Map()): (RuleSetConfigObject | RuleConfigObject)[] => {
|
||||
|
||||
const namedFilters = insertNameFilters(namedAuthorFilters, namedItemFilters);
|
||||
|
||||
const strongRules: (StructuredRuleSetObjectJson | StructuredRuleObjectJson)[] = [];
|
||||
const strongRules: (RuleSetConfigObject | RuleConfigObject)[] = [];
|
||||
for (const r of rules) {
|
||||
let rule: StructuredRuleObjectJson | undefined;
|
||||
let rule: RuleConfigObject | undefined;
|
||||
if (typeof r === 'string') {
|
||||
const foundRule = namedRules.get(r.toLowerCase());
|
||||
if (foundRule === undefined) {
|
||||
@@ -523,20 +708,20 @@ export const insertNamedRules = (rules: Array<RuleSetJson | RuleJson>, namedRule
|
||||
rule = {
|
||||
...foundRule,
|
||||
...namedFilters(foundRule)
|
||||
} as StructuredRuleObjectJson
|
||||
} as RuleConfigObject
|
||||
//strongRules.push(foundRule);
|
||||
} else if (isRuleSetJSON(r)) {
|
||||
const {rules: sr, ...rest} = r;
|
||||
const setRules = insertNamedRules(sr, namedRules, namedAuthorFilters, namedItemFilters);
|
||||
const strongSet = {rules: setRules, ...rest} as StructuredRuleSetObjectJson;
|
||||
const strongSet = {rules: setRules, ...rest} as RuleSetConfigObject;
|
||||
strongRules.push(strongSet);
|
||||
} else {
|
||||
rule = {...r, ...namedFilters(r)} as StructuredRuleObjectJson;
|
||||
rule = {...r, ...namedFilters(r)} as RuleConfigObject;
|
||||
}
|
||||
|
||||
if(rule !== undefined) {
|
||||
if(rule.kind === 'author') {
|
||||
const authorRuleConfig = rule as (StructuredRuleObjectJson & AuthorRuleConfig);
|
||||
const authorRuleConfig = rule as (RuleConfigObject & AuthorRuleConfig);
|
||||
const filters = namedFilters({authorIs: {include: authorRuleConfig.include, exclude: authorRuleConfig.exclude}});
|
||||
const builtFilter = buildFilter(filters.authorIs as MinimalOrFullFilter<AuthorCriteria>);
|
||||
rule = {
|
||||
@@ -546,14 +731,14 @@ export const insertNamedRules = (rules: Array<RuleSetJson | RuleJson>, namedRule
|
||||
exclude: builtFilter.exclude
|
||||
}
|
||||
}
|
||||
strongRules.push(rule as StructuredRuleObjectJson);
|
||||
strongRules.push(rule as RuleConfigObject);
|
||||
}
|
||||
}
|
||||
|
||||
return strongRules;
|
||||
}
|
||||
|
||||
export const extractNamedActions = (actions: Array<ActionJson>, namedActions: Map<string, ActionObjectJson> = new Map()): Map<string, ActionObjectJson> => {
|
||||
export const extractNamedActions = (actions: Array<ActionConfigHydratedData>, namedActions: Map<string, ActionConfigObject> = new Map()): Map<string, ActionConfigObject> => {
|
||||
for (const a of actions) {
|
||||
if (!(typeof a === 'string')) {
|
||||
if (isActionJson(a) && a.name !== undefined) {
|
||||
@@ -562,7 +747,7 @@ export const extractNamedActions = (actions: Array<ActionJson>, namedActions: Ma
|
||||
const actionNoName = {...rest};
|
||||
if (namedActions.has(normalName)) {
|
||||
// @ts-ignore
|
||||
const {name: nn, ...aRest} = namedActions.get(normalName) as ActionObjectJson;
|
||||
const {name: nn, ...aRest} = namedActions.get(normalName) as ActionConfigObject;
|
||||
if (!deepEqual(aRest, actionNoName)) {
|
||||
throw new Error(`Actions names must be unique (case-insensitive). Conflicting name: ${a.name}`);
|
||||
}
|
||||
@@ -575,20 +760,20 @@ export const extractNamedActions = (actions: Array<ActionJson>, namedActions: Ma
|
||||
return namedActions;
|
||||
}
|
||||
|
||||
export const insertNamedActions = (actions: Array<ActionJson>, namedActions: Map<string, ActionObjectJson> = new Map(), namedAuthorFilters: Map<string, NamedCriteria<AuthorCriteria>> = new Map(), namedItemFilters: Map<string, NamedCriteria<TypedActivityState>> = new Map()): Array<StructuredActionObjectJson> => {
|
||||
export const insertNamedActions = (actions: Array<ActionConfigHydratedData>, namedActions: Map<string, ActionConfigObject> = new Map(), namedAuthorFilters: Map<string, NamedCriteria<AuthorCriteria>> = new Map(), namedItemFilters: Map<string, NamedCriteria<TypedActivityState>> = new Map()): Array<ActionConfigObject> => {
|
||||
|
||||
const namedFilters = insertNameFilters(namedAuthorFilters, namedItemFilters);
|
||||
|
||||
const strongActions: Array<StructuredActionObjectJson> = [];
|
||||
const strongActions: Array<ActionConfigObject> = [];
|
||||
for (const a of actions) {
|
||||
if (typeof a === 'string') {
|
||||
const foundAction = namedActions.get(a.toLowerCase());
|
||||
if (foundAction === undefined) {
|
||||
throw new Error(`No named Action with the name ${a} was found`);
|
||||
}
|
||||
strongActions.push({...foundAction, ...namedFilters(foundAction)} as StructuredActionObjectJson);
|
||||
strongActions.push({...foundAction, ...namedFilters(foundAction)} as ActionConfigObject);
|
||||
} else {
|
||||
strongActions.push({...a, ...namedFilters(a)} as StructuredActionObjectJson);
|
||||
strongActions.push({...a, ...namedFilters(a)} as ActionConfigObject);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1024,6 +1209,7 @@ export const buildOperatorConfigWithDefaults = async (data: OperatorJsonConfig):
|
||||
migrations = {},
|
||||
retention,
|
||||
} = {},
|
||||
influxConfig,
|
||||
web: {
|
||||
port = 8085,
|
||||
maxLogs = 200,
|
||||
@@ -1053,6 +1239,10 @@ export const buildOperatorConfigWithDefaults = async (data: OperatorJsonConfig):
|
||||
} = {},
|
||||
credentials = {},
|
||||
bots = [],
|
||||
dev: {
|
||||
monitorMemory = false,
|
||||
monitorMemoryInterval = 15
|
||||
} = {},
|
||||
} = data;
|
||||
|
||||
let cache: StrongCache;
|
||||
@@ -1141,6 +1331,13 @@ export const buildOperatorConfigWithDefaults = async (data: OperatorJsonConfig):
|
||||
}
|
||||
const webDbConfig = createDatabaseConfig(realdbConnectionWeb);
|
||||
|
||||
let influx: InfluxClient | undefined = undefined;
|
||||
if(influxConfig !== undefined) {
|
||||
const tags = friendly !== undefined ? {server: friendly} : undefined;
|
||||
influx = new InfluxClient(influxConfig, appLogger, tags);
|
||||
await influx.isReady();
|
||||
}
|
||||
|
||||
const config: OperatorConfig = {
|
||||
mode,
|
||||
operator: {
|
||||
@@ -1160,6 +1357,7 @@ export const buildOperatorConfigWithDefaults = async (data: OperatorJsonConfig):
|
||||
migrations,
|
||||
retention,
|
||||
},
|
||||
influx,
|
||||
userAgent,
|
||||
web: {
|
||||
database: await createWebDatabaseConnection(webDbConfig, appLogger),
|
||||
@@ -1193,6 +1391,10 @@ export const buildOperatorConfigWithDefaults = async (data: OperatorJsonConfig):
|
||||
},
|
||||
bots: [],
|
||||
credentials,
|
||||
dev: {
|
||||
monitorMemory,
|
||||
monitorMemoryInterval
|
||||
}
|
||||
};
|
||||
|
||||
config.bots = bots.map(x => buildBotConfig(x, config));
|
||||
@@ -1214,6 +1416,7 @@ export const buildBotConfig = (data: BotInstanceJsonConfig, opConfig: OperatorCo
|
||||
databaseConfig: {
|
||||
retention: retentionFromOp,
|
||||
} = {},
|
||||
influx: opInflux
|
||||
} = opConfig;
|
||||
const {
|
||||
name: botName,
|
||||
@@ -1239,6 +1442,7 @@ export const buildBotConfig = (data: BotInstanceJsonConfig, opConfig: OperatorCo
|
||||
databaseConfig: {
|
||||
retention,
|
||||
} = {},
|
||||
influxConfig,
|
||||
flowControlDefaults,
|
||||
credentials = {},
|
||||
subreddits: {
|
||||
@@ -1370,6 +1574,7 @@ export const buildBotConfig = (data: BotInstanceJsonConfig, opConfig: OperatorCo
|
||||
databaseConfig: {
|
||||
retention: retention ?? retentionFromOp
|
||||
},
|
||||
opInflux,
|
||||
subreddits: {
|
||||
names,
|
||||
exclude,
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
import {CheckJson, CommentCheckJson, SubmissionCheckJson} from "./Check";
|
||||
import {ActivityCheckJson, ManagerOptions} from "./Common/interfaces";
|
||||
import {RunJson} from "./Run";
|
||||
|
||||
export interface JSONConfig extends ManagerOptions {
|
||||
/**
|
||||
* A list of all the checks that should be run for a subreddit.
|
||||
*
|
||||
* Checks are split into two lists -- submission or comment -- based on kind and run independently.
|
||||
*
|
||||
* Checks in each list are run in the order found in the configuration.
|
||||
*
|
||||
* When a check "passes", and actions are performed, then all subsequent checks are skipped.
|
||||
* @minItems 1
|
||||
* */
|
||||
checks?: ActivityCheckJson[]
|
||||
|
||||
/**
|
||||
* A list of sets of Checks to run
|
||||
* @minItems 1
|
||||
* */
|
||||
runs?: RunJson[]
|
||||
}
|
||||
@@ -380,8 +380,10 @@ export class RegexRule extends Rule {
|
||||
|
||||
for (const c of contents) {
|
||||
const results = parseRegex(reg, c);
|
||||
if (results.matched) {
|
||||
m = m.concat(results.matches);
|
||||
if(results !== undefined) {
|
||||
for(const r of results) {
|
||||
m.push(r.match);
|
||||
}
|
||||
}
|
||||
}
|
||||
return m;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import {RecentActivityRule, RecentActivityRuleJSONConfig} from "./RecentActivityRule";
|
||||
import RepeatActivityRule, {RepeatActivityJSONConfig} from "./RepeatActivityRule";
|
||||
import {Rule, RuleJSONConfig, StructuredRuleJson} from "./index";
|
||||
import {Rule} from "./index";
|
||||
import AuthorRule, {AuthorRuleJSONConfig} from "./AuthorRule";
|
||||
import {AttributionJSONConfig, AttributionRule} from "./AttributionRule";
|
||||
import {Logger} from "winston";
|
||||
@@ -11,9 +11,10 @@ import Snoowrap from "snoowrap";
|
||||
import {RepostRule, RepostRuleJSONConfig} from "./RepostRule";
|
||||
import {StructuredFilter} from "../Common/Infrastructure/Filters/FilterShapes";
|
||||
import {SentimentRule, SentimentRuleJSONConfig} from "./SentimentRule";
|
||||
import {StructuredRuleConfigObject} from "../Common/Infrastructure/RuleShapes";
|
||||
|
||||
export function ruleFactory
|
||||
(config: StructuredRuleJson, logger: Logger, subredditName: string, resources: SubredditResources, client: Snoowrap): Rule {
|
||||
(config: StructuredRuleConfigObject, logger: Logger, subredditName: string, resources: SubredditResources, client: Snoowrap): Rule {
|
||||
let cfg;
|
||||
switch (config.kind) {
|
||||
case 'recentActivity':
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
import {IRule, Triggerable, Rule, RuleJSONConfig, StructuredRuleJson} from "./index";
|
||||
import {IRule, Rule} from "./index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {ruleFactory} from "./RuleFactory";
|
||||
import {createAjvFactory, mergeArr} from "../util";
|
||||
import {Logger} from "winston";
|
||||
import {JoinCondition, RuleResult, RuleSetResult} from "../Common/interfaces";
|
||||
import {JoinCondition, RuleSetResult} from "../Common/interfaces";
|
||||
import * as RuleSchema from '../Schema/Rule.json';
|
||||
import Ajv from 'ajv';
|
||||
import {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import {runCheckOptions} from "../Subreddit/Manager";
|
||||
import {RuleResultEntity} from "../Common/Entities/RuleResultEntity";
|
||||
import {RuleSetResultEntity} from "../Common/Entities/RuleSetResultEntity";
|
||||
import {JoinOperands} from "../Common/Infrastructure/Atomic";
|
||||
import {RuleJson, RuleObjectJson, StructuredRuleObjectJson} from "../Common/Infrastructure/RuleShapes";
|
||||
import {
|
||||
RuleConfigData,
|
||||
RuleConfigHydratedData,
|
||||
RuleConfigObject,
|
||||
StructuredRuleConfigObject
|
||||
} from "../Common/Infrastructure/RuleShapes";
|
||||
|
||||
export class RuleSet implements IRuleSet {
|
||||
rules: Rule[] = [];
|
||||
@@ -99,7 +102,7 @@ export interface IRuleSet extends JoinCondition {
|
||||
}
|
||||
|
||||
export interface RuleSetOptions extends IRuleSet {
|
||||
rules: Array<StructuredRuleObjectJson>,
|
||||
rules: Array<StructuredRuleConfigObject>,
|
||||
logger: Logger
|
||||
subredditName: string
|
||||
resources: SubredditResources
|
||||
@@ -109,20 +112,24 @@ export interface RuleSetOptions extends IRuleSet {
|
||||
/**
|
||||
* A RuleSet is a "nested" set of `Rule` objects that can be used to create more complex AND/OR behavior. Think of the outcome of a `RuleSet` as the result of all of its run `Rule` objects (based on `condition`)
|
||||
* */
|
||||
export interface RuleSetJson extends JoinCondition {
|
||||
export interface RuleSetConfigData extends JoinCondition {
|
||||
/**
|
||||
* Can be `Rule` or the `name` of any **named** `Rule` in your subreddit's configuration
|
||||
* @minItems 1
|
||||
* */
|
||||
rules: Array<RuleJson>
|
||||
rules: RuleConfigData[]
|
||||
}
|
||||
|
||||
export interface RuleSetObjectJson extends RuleSetJson {
|
||||
rules: Array<RuleObjectJson>
|
||||
export interface RuleSetConfigHydratedData extends RuleSetConfigData {
|
||||
rules: RuleConfigHydratedData[]
|
||||
}
|
||||
|
||||
export const isRuleSetJSON = (obj: object): obj is RuleSetJson => {
|
||||
return (obj as RuleSetJson).rules !== undefined;
|
||||
export interface RuleSetConfigObject extends RuleSetConfigHydratedData {
|
||||
rules: RuleConfigObject[]
|
||||
}
|
||||
|
||||
export const isRuleSetJSON = (obj: object): obj is RuleSetConfigData => {
|
||||
return (obj as RuleSetConfigData).rules !== undefined;
|
||||
}
|
||||
|
||||
export const isRuleSet = (obj: object): obj is RuleSet => {
|
||||
|
||||
@@ -187,8 +187,3 @@ export interface RuleJSONConfig extends IRule {
|
||||
*/
|
||||
kind: 'recentActivity' | 'repeatActivity' | 'author' | 'attribution' | 'history' | 'regex' | 'repost' | 'sentiment'
|
||||
}
|
||||
|
||||
|
||||
export interface StructuredRuleJson extends Omit<RuleJSONConfig, 'authorIs' | 'itemIs'>, StructuredRunnableBase {
|
||||
|
||||
}
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
import {asStructuredCommentCheckJson, asStructuredSubmissionCheckJson, Check, CheckStructuredJson} from "../Check";
|
||||
import {
|
||||
ActivityCheckJson,
|
||||
ActivityCheckConfigHydratedData,
|
||||
ActivityCheckConfigValue, ActivityCheckObject,
|
||||
asStructuredCommentCheckJson,
|
||||
asStructuredSubmissionCheckJson,
|
||||
Check,
|
||||
} from "../Check";
|
||||
import {
|
||||
PostBehavior, PostBehaviorOption,
|
||||
RunResult
|
||||
} from "../Common/interfaces";
|
||||
@@ -20,6 +25,7 @@ import {RuleResultEntity} from "../Common/Entities/RuleResultEntity";
|
||||
import {RunnableBase} from "../Common/RunnableBase";
|
||||
import {RunnableBaseJson, RunnableBaseOptions, StructuredRunnableBase} from "../Common/Infrastructure/Runnable";
|
||||
import {FilterCriteriaDefaults} from "../Common/Infrastructure/Filters/FilterShapes";
|
||||
import {IncludesData} from "../Common/Infrastructure/Includes";
|
||||
|
||||
export class Run extends RunnableBase {
|
||||
name: string;
|
||||
@@ -296,7 +302,7 @@ export interface IRun extends PostBehavior, RunnableBaseJson {
|
||||
enable?: boolean,
|
||||
}
|
||||
|
||||
export interface RunOptions extends RunStructuredJson, RunnableBaseOptions {
|
||||
export interface RunOptions extends RunConfigObject, RunnableBaseOptions {
|
||||
// submissionChecks?: SubmissionCheck[]
|
||||
// commentChecks?: CommentCheck[]
|
||||
//checks: CheckStructuredJson[]
|
||||
@@ -308,10 +314,16 @@ export interface RunOptions extends RunStructuredJson, RunnableBaseOptions {
|
||||
emitter: EventEmitter;
|
||||
}
|
||||
|
||||
export interface RunJson extends IRun {
|
||||
checks: ActivityCheckJson[]
|
||||
export interface RunConfigData extends IRun {
|
||||
checks: ActivityCheckConfigValue[]
|
||||
}
|
||||
|
||||
export interface RunStructuredJson extends Omit<RunJson, 'authorIs' | 'itemIs' | 'checks'>, StructuredRunnableBase {
|
||||
checks: CheckStructuredJson[]
|
||||
export type RunConfigValue = string | IncludesData | RunConfigData;
|
||||
|
||||
export interface RunConfigHydratedData extends IRun {
|
||||
checks: ActivityCheckConfigHydratedData[]
|
||||
}
|
||||
|
||||
export interface RunConfigObject extends Omit<RunConfigHydratedData, 'authorIs' | 'itemIs'>, StructuredRunnableBase {
|
||||
checks: ActivityCheckObject[]
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -78,8 +78,8 @@
|
||||
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
@@ -919,19 +919,17 @@
|
||||
"type": "string"
|
||||
},
|
||||
"note": {
|
||||
"description": "A mod note for this ban",
|
||||
"description": "A mod note for this ban. Can use Templating.\n\nIf the length expands to more than 100 characters it will truncated with \"...\"",
|
||||
"examples": [
|
||||
"Sock puppet for u/AnotherUser"
|
||||
],
|
||||
"maxLength": 100,
|
||||
"type": "string"
|
||||
},
|
||||
"reason": {
|
||||
"description": "Reason for ban.",
|
||||
"description": "Reason for ban. Can use Templating.\n\nIf the length expands to more than 100 characters it will truncated with \"...\"",
|
||||
"examples": [
|
||||
"repeat spam"
|
||||
],
|
||||
"maxLength": 100,
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
@@ -1386,7 +1384,7 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"CommentCheckJson": {
|
||||
"CommentCheckConfigData": {
|
||||
"properties": {
|
||||
"actions": {
|
||||
"description": "The `Actions` to run after the check is successfully triggered. ALL `Actions` will run in the order they are listed\n\n Can be `Action` or the `name` of any **named** `Action` in your subreddit's configuration",
|
||||
@@ -1404,6 +1402,9 @@
|
||||
],
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/IncludesData"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/FlairActionJson"
|
||||
},
|
||||
@@ -1517,17 +1518,17 @@
|
||||
},
|
||||
"itemIs": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/FilterOptionsJson<CommentState>"
|
||||
},
|
||||
{
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubmissionState"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/CommentState"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/NamedCriteria<CommentState>"
|
||||
"$ref": "#/definitions/NamedCriteria<TypedActivityState>"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
@@ -1535,6 +1536,9 @@
|
||||
]
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/FilterOptionsJson<TypedActivityState>"
|
||||
}
|
||||
],
|
||||
"description": "A list of criteria to test the state of the `Activity` against before running the check.\n\nIf any set of criteria passes the Check will be run. If the criteria fails then the Check will fail.\n\n* @examples [[{\"over_18\": true, \"removed': false}]]"
|
||||
@@ -1587,6 +1591,9 @@
|
||||
"description": "A list of Rules to run.\n\nIf `Rule` objects are triggered based on `condition` then `actions` will be performed.\n\nCan be `Rule`, `RuleSet`, or the `name` of any **named** `Rule` in your subreddit's configuration.\n\n**If `rules` is an empty array or not present then `actions` are performed immediately.**",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/IncludesData"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/RecentActivityRuleJSONConfig"
|
||||
},
|
||||
@@ -1612,7 +1619,7 @@
|
||||
"$ref": "#/definitions/SentimentRuleJSONConfig"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/RuleSetJson"
|
||||
"$ref": "#/definitions/RuleSetConfigData"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
@@ -1721,8 +1728,8 @@
|
||||
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
@@ -2519,102 +2526,6 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"FilterOptionsJson<CommentState>": {
|
||||
"properties": {
|
||||
"exclude": {
|
||||
"description": "Only runs if `include` is not present. Each Criteria is comprised of conditions that the filter (Author/Item) being checked must \"not\" pass. See excludeCondition for set behavior\n\nEX: `isMod: true, name: Automoderator` => Will pass if the Author IS NOT a mod and IS NOT named Automoderator",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CommentState"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/NamedCriteria<CommentState>"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"excludeCondition": {
|
||||
"default": "OR",
|
||||
"description": "* OR => if ANY exclude condition \"does not\" pass then the exclude test passes\n* AND => if ALL exclude conditions \"do not\" pass then the exclude test passes\n\nDefaults to OR",
|
||||
"enum": [
|
||||
"AND",
|
||||
"OR"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"include": {
|
||||
"description": "Will \"pass\" if any set of Criteria passes",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CommentState"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/NamedCriteria<CommentState>"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"FilterOptionsJson<SubmissionState>": {
|
||||
"properties": {
|
||||
"exclude": {
|
||||
"description": "Only runs if `include` is not present. Each Criteria is comprised of conditions that the filter (Author/Item) being checked must \"not\" pass. See excludeCondition for set behavior\n\nEX: `isMod: true, name: Automoderator` => Will pass if the Author IS NOT a mod and IS NOT named Automoderator",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubmissionState"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/NamedCriteria<SubmissionState>"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"excludeCondition": {
|
||||
"default": "OR",
|
||||
"description": "* OR => if ANY exclude condition \"does not\" pass then the exclude test passes\n* AND => if ALL exclude conditions \"do not\" pass then the exclude test passes\n\nDefaults to OR",
|
||||
"enum": [
|
||||
"AND",
|
||||
"OR"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"include": {
|
||||
"description": "Will \"pass\" if any set of Criteria passes",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubmissionState"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/NamedCriteria<SubmissionState>"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"FilterOptionsJson<SubredditCriteria>": {
|
||||
"properties": {
|
||||
"exclude": {
|
||||
@@ -3286,6 +3197,41 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"IncludesData": {
|
||||
"properties": {
|
||||
"path": {
|
||||
"description": "The special-form path to the config fragment to retrieve.\n\nIf value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit\n\n * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/mybot`\n\nIf the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page\n\n* EX `wiki:replytemplates/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/replytemplates/test`\n\nIf the value starts with `url:` then the value is fetched as an external url and expects raw text returned\n\n* EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`",
|
||||
"type": "string"
|
||||
},
|
||||
"ttl": {
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": [
|
||||
false,
|
||||
"response",
|
||||
true
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "number"
|
||||
}
|
||||
],
|
||||
"description": "Control caching for the config fragment.\n\nIf not specified the value for `wikiTTL` will be used\n\n* If true then value is cached forever\n* If false then value is never cached\n* If a number then the number of seconds to cache value\n* If 'response' then CM will attempt to use Cache-Control or Expires headers from the response to determine how long to cache the value"
|
||||
},
|
||||
"type": {
|
||||
"description": "An unused hint about the content type. Not implemented yet",
|
||||
"enum": [
|
||||
"json",
|
||||
"yaml"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"path"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"LockActionJson": {
|
||||
"description": "Lock the Activity",
|
||||
"properties": {
|
||||
@@ -5268,7 +5214,7 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"RuleSetJson": {
|
||||
"RuleSetConfigData": {
|
||||
"description": "A RuleSet is a \"nested\" set of `Rule` objects that can be used to create more complex AND/OR behavior. Think of the outcome of a `RuleSet` as the result of all of its run `Rule` objects (based on `condition`)",
|
||||
"properties": {
|
||||
"condition": {
|
||||
@@ -5287,6 +5233,9 @@
|
||||
"description": "Can be `Rule` or the `name` of any **named** `Rule` in your subreddit's configuration",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/IncludesData"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/RecentActivityRuleJSONConfig"
|
||||
},
|
||||
@@ -5325,7 +5274,7 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"RunJson": {
|
||||
"RunConfigData": {
|
||||
"properties": {
|
||||
"authorIs": {
|
||||
"anyOf": [
|
||||
@@ -5355,10 +5304,16 @@
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubmissionCheckJson"
|
||||
"$ref": "#/definitions/IncludesData"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/CommentCheckJson"
|
||||
"$ref": "#/definitions/SubmissionCheckConfigData"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/CommentCheckConfigData"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -5697,7 +5652,7 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SubmissionCheckJson": {
|
||||
"SubmissionCheckConfigData": {
|
||||
"properties": {
|
||||
"actions": {
|
||||
"description": "The `Actions` to run after the check is successfully triggered. ALL `Actions` will run in the order they are listed\n\n Can be `Action` or the `name` of any **named** `Action` in your subreddit's configuration",
|
||||
@@ -5715,6 +5670,9 @@
|
||||
],
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/IncludesData"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/FlairActionJson"
|
||||
},
|
||||
@@ -5835,7 +5793,10 @@
|
||||
"$ref": "#/definitions/SubmissionState"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/NamedCriteria<SubmissionState>"
|
||||
"$ref": "#/definitions/CommentState"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/NamedCriteria<TypedActivityState>"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
@@ -5845,7 +5806,7 @@
|
||||
"type": "array"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/FilterOptionsJson<SubmissionState>"
|
||||
"$ref": "#/definitions/FilterOptionsJson<TypedActivityState>"
|
||||
}
|
||||
],
|
||||
"description": "A list of criteria to test the state of the `Activity` against before running the check.\n\nIf any set of criteria passes the Check will be run. If the criteria fails then the Check will fail.\n\n* @examples [[{\"over_18\": true, \"removed': false}]]"
|
||||
@@ -5898,6 +5859,9 @@
|
||||
"description": "A list of Rules to run.\n\nIf `Rule` objects are triggered based on `condition` then `actions` will be performed.\n\nCan be `Rule`, `RuleSet`, or the `name` of any **named** `Rule` in your subreddit's configuration.\n\n**If `rules` is an empty array or not present then `actions` are performed immediately.**",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/IncludesData"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/RecentActivityRuleJSONConfig"
|
||||
},
|
||||
@@ -5923,7 +5887,7 @@
|
||||
"$ref": "#/definitions/SentimentRuleJSONConfig"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/RuleSetJson"
|
||||
"$ref": "#/definitions/RuleSetConfigData"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
@@ -6088,8 +6052,8 @@
|
||||
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
@@ -6123,6 +6087,13 @@
|
||||
"title": {
|
||||
"description": "A valid regular expression to match against the title of the submission",
|
||||
"type": "string"
|
||||
},
|
||||
"upvoteRatio": {
|
||||
"description": "Compare the upvote ratio for this Submission, expressed as a whole number\n\nCan be either a comparison string or a number. If a number then CM assumes upvote ratio must be greater than or equal to this.\n\nExample:\n\n* `< 90` => less than 90% upvoted\n* 45 => greater than or equal to 45% upvoted",
|
||||
"type": [
|
||||
"string",
|
||||
"number"
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
@@ -6492,10 +6463,16 @@
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SubmissionCheckJson"
|
||||
"$ref": "#/definitions/IncludesData"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/CommentCheckJson"
|
||||
"$ref": "#/definitions/SubmissionCheckConfigData"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/CommentCheckConfigData"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -6596,7 +6573,17 @@
|
||||
"runs": {
|
||||
"description": "A list of sets of Checks to run",
|
||||
"items": {
|
||||
"$ref": "#/definitions/RunJson"
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/IncludesData"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/RunConfigData"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"minItems": 1,
|
||||
"type": "array"
|
||||
|
||||
5765
src/Schema/Check.json
Normal file
5765
src/Schema/Check.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -258,6 +258,9 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"influxConfig": {
|
||||
"$ref": "#/definitions/InfluxConfig"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -578,8 +581,8 @@
|
||||
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
@@ -866,6 +869,43 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"InfluxConfig": {
|
||||
"properties": {
|
||||
"credentials": {
|
||||
"$ref": "#/definitions/InfluxCredentials"
|
||||
},
|
||||
"defaultTags": {
|
||||
"$ref": "#/definitions/Record<string,string>"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"credentials"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"InfluxCredentials": {
|
||||
"properties": {
|
||||
"bucket": {
|
||||
"type": "string"
|
||||
},
|
||||
"org": {
|
||||
"type": "string"
|
||||
},
|
||||
"token": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"bucket",
|
||||
"org",
|
||||
"token",
|
||||
"url"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"LoggingOptions": {
|
||||
"properties": {
|
||||
"console": {
|
||||
@@ -1669,6 +1709,9 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"Record<string,string>": {
|
||||
"type": "object"
|
||||
},
|
||||
"RedditCredentials": {
|
||||
"description": "Credentials required for the bot to interact with Reddit's API\n\nThese credentials will provided to both the API and Web interface unless otherwise specified with the `web.credentials` property\n\nRefer to the [required credentials table](https://github.com/FoxxMD/context-mod/blob/master/docs/operatorConfiguration.md#minimum-required-configuration) to see what is necessary to run the bot.",
|
||||
"examples": [
|
||||
@@ -1906,8 +1949,8 @@
|
||||
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
@@ -1941,6 +1984,13 @@
|
||||
"title": {
|
||||
"description": "A valid regular expression to match against the title of the submission",
|
||||
"type": "string"
|
||||
},
|
||||
"upvoteRatio": {
|
||||
"description": "Compare the upvote ratio for this Submission, expressed as a whole number\n\nCan be either a comparison string or a number. If a number then CM assumes upvote ratio must be greater than or equal to this.\n\nExample:\n\n* `< 90` => less than 90% upvoted\n* 45 => greater than or equal to 45% upvoted",
|
||||
"type": [
|
||||
"string",
|
||||
"number"
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
@@ -2193,6 +2243,23 @@
|
||||
"$ref": "#/definitions/DatabaseStatisticsOperatorJsonConfig",
|
||||
"description": "Set defaults for the frequency time series stats are collected"
|
||||
},
|
||||
"dev": {
|
||||
"properties": {
|
||||
"monitorMemory": {
|
||||
"description": "Invoke `process.memoryUsage()` on an interval and send metrics to Influx\n\nOnly works if Influx config is provided",
|
||||
"type": "boolean"
|
||||
},
|
||||
"monitorMemoryInterval": {
|
||||
"default": 15,
|
||||
"description": "Interval, in seconds, to invoke `process.memoryUsage()` at\n\nDefaults to 15 seconds",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"influxConfig": {
|
||||
"$ref": "#/definitions/InfluxConfig"
|
||||
},
|
||||
"logging": {
|
||||
"$ref": "#/definitions/LoggingOptions",
|
||||
"description": "Settings to configure global logging defaults"
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/IncludesData"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/RecentActivityRuleJSONConfig"
|
||||
},
|
||||
@@ -107,8 +110,8 @@
|
||||
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
@@ -819,8 +822,8 @@
|
||||
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
@@ -1655,6 +1658,41 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"IncludesData": {
|
||||
"properties": {
|
||||
"path": {
|
||||
"description": "The special-form path to the config fragment to retrieve.\n\nIf value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit\n\n * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/mybot`\n\nIf the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page\n\n* EX `wiki:replytemplates/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/replytemplates/test`\n\nIf the value starts with `url:` then the value is fetched as an external url and expects raw text returned\n\n* EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`",
|
||||
"type": "string"
|
||||
},
|
||||
"ttl": {
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": [
|
||||
false,
|
||||
"response",
|
||||
true
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "number"
|
||||
}
|
||||
],
|
||||
"description": "Control caching for the config fragment.\n\nIf not specified the value for `wikiTTL` will be used\n\n* If true then value is cached forever\n* If false then value is never cached\n* If a number then the number of seconds to cache value\n* If 'response' then CM will attempt to use Cache-Control or Expires headers from the response to determine how long to cache the value"
|
||||
},
|
||||
"type": {
|
||||
"description": "An unused hint about the content type. Not implemented yet",
|
||||
"enum": [
|
||||
"json",
|
||||
"yaml"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"path"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ModLogCriteria": {
|
||||
"properties": {
|
||||
"action": {
|
||||
@@ -3318,8 +3356,8 @@
|
||||
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
@@ -3353,6 +3391,13 @@
|
||||
"title": {
|
||||
"description": "A valid regular expression to match against the title of the submission",
|
||||
"type": "string"
|
||||
},
|
||||
"upvoteRatio": {
|
||||
"description": "Compare the upvote ratio for this Submission, expressed as a whole number\n\nCan be either a comparison string or a number. If a number then CM assumes upvote ratio must be greater than or equal to this.\n\nExample:\n\n* `< 90` => less than 90% upvoted\n* 45 => greater than or equal to 45% upvoted",
|
||||
"type": [
|
||||
"string",
|
||||
"number"
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
|
||||
@@ -78,8 +78,8 @@
|
||||
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
@@ -790,8 +790,8 @@
|
||||
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
@@ -1626,6 +1626,41 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"IncludesData": {
|
||||
"properties": {
|
||||
"path": {
|
||||
"description": "The special-form path to the config fragment to retrieve.\n\nIf value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit\n\n * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/mybot`\n\nIf the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page\n\n* EX `wiki:replytemplates/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/replytemplates/test`\n\nIf the value starts with `url:` then the value is fetched as an external url and expects raw text returned\n\n* EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`",
|
||||
"type": "string"
|
||||
},
|
||||
"ttl": {
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": [
|
||||
false,
|
||||
"response",
|
||||
true
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "number"
|
||||
}
|
||||
],
|
||||
"description": "Control caching for the config fragment.\n\nIf not specified the value for `wikiTTL` will be used\n\n* If true then value is cached forever\n* If false then value is never cached\n* If a number then the number of seconds to cache value\n* If 'response' then CM will attempt to use Cache-Control or Expires headers from the response to determine how long to cache the value"
|
||||
},
|
||||
"type": {
|
||||
"description": "An unused hint about the content type. Not implemented yet",
|
||||
"enum": [
|
||||
"json",
|
||||
"yaml"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"path"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ModLogCriteria": {
|
||||
"properties": {
|
||||
"action": {
|
||||
@@ -3289,8 +3324,8 @@
|
||||
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
@@ -3324,6 +3359,13 @@
|
||||
"title": {
|
||||
"description": "A valid regular expression to match against the title of the submission",
|
||||
"type": "string"
|
||||
},
|
||||
"upvoteRatio": {
|
||||
"description": "Compare the upvote ratio for this Submission, expressed as a whole number\n\nCan be either a comparison string or a number. If a number then CM assumes upvote ratio must be greater than or equal to this.\n\nExample:\n\n* `< 90` => less than 90% upvoted\n* 45 => greater than or equal to 45% upvoted",
|
||||
"type": [
|
||||
"string",
|
||||
"number"
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
@@ -3440,6 +3482,9 @@
|
||||
"description": "Can be `Rule` or the `name` of any **named** `Rule` in your subreddit's configuration",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/IncludesData"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/RecentActivityRuleJSONConfig"
|
||||
},
|
||||
|
||||
6011
src/Schema/Run.json
Normal file
6011
src/Schema/Run.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -57,8 +57,7 @@ import ConfigParseError from "../Utils/ConfigParseError";
|
||||
import dayjs, {Dayjs as DayjsObj} from "dayjs";
|
||||
import Action from "../Action";
|
||||
import {queue, QueueObject} from 'async';
|
||||
import {JSONConfig} from "../JsonConfig";
|
||||
import {Check, CheckStructuredJson} from "../Check";
|
||||
import {SubredditConfigHydratedData, SubredditConfigData} from "../SubredditConfigData";
|
||||
import NotificationManager from "../Notification/NotificationManager";
|
||||
import {createHistoricalDisplayDefaults} from "../Common/defaults";
|
||||
import {ExtendedSnoowrap} from "../Utils/SnoowrapClients";
|
||||
@@ -98,6 +97,8 @@ import {
|
||||
} from "../Common/Infrastructure/Atomic";
|
||||
import {parseFromJsonOrYamlToObject} from "../Common/Config/ConfigUtil";
|
||||
import {FilterCriteriaDefaults} from "../Common/Infrastructure/Filters/FilterShapes";
|
||||
import {InfluxClient} from "../Common/Influx/InfluxClient";
|
||||
import { Point } from "@influxdata/influxdb-client";
|
||||
|
||||
export interface RunningState {
|
||||
state: RunState,
|
||||
@@ -139,6 +140,7 @@ export interface RuntimeManagerOptions extends Omit<ManagerOptions, 'filterCrite
|
||||
managerEntity: ManagerEntity
|
||||
filterCriteriaDefaults?: FilterCriteriaDefaults
|
||||
statDefaults: DatabaseStatisticsOperatorConfig
|
||||
influxClients: InfluxClient[]
|
||||
}
|
||||
|
||||
interface QueuedIdentifier {
|
||||
@@ -227,7 +229,8 @@ export class Manager extends EventEmitter implements RunningStates {
|
||||
rulesUniqueSample: number[] = [];
|
||||
rulesUniqueSampleInterval: any;
|
||||
rulesUniqueRollingAvg: number = 0;
|
||||
actionedEvents: ActionedEvent[] = [];
|
||||
|
||||
modqueueInterval: number = 0;
|
||||
|
||||
delayedQueueInterval: any;
|
||||
|
||||
@@ -237,6 +240,8 @@ export class Manager extends EventEmitter implements RunningStates {
|
||||
authorRepo!: Repository<AuthorEntity>
|
||||
eventRepo!: Repository<CMEvent>;
|
||||
|
||||
influxClients: InfluxClient[] = [];
|
||||
|
||||
getStats = async (): Promise<ManagerStats> => {
|
||||
const data: any = {
|
||||
eventsAvg: formatNumber(this.eventsRollingAvg),
|
||||
@@ -308,7 +313,8 @@ export class Manager extends EventEmitter implements RunningStates {
|
||||
botEntity,
|
||||
managerEntity,
|
||||
statDefaults,
|
||||
retention
|
||||
retention,
|
||||
influxClients,
|
||||
} = opts || {};
|
||||
this.displayLabel = opts.nickname || `${sub.display_name_prefixed}`;
|
||||
const getLabels = this.getCurrentLabels;
|
||||
@@ -325,7 +331,11 @@ export class Manager extends EventEmitter implements RunningStates {
|
||||
}, mergeArr);
|
||||
this.logger.stream().on('log', (log: LogInfo) => {
|
||||
if(log.subreddit !== undefined && log.subreddit === this.getDisplay()) {
|
||||
this.logs = [log, ...this.logs].slice(0, 301);
|
||||
this.logs.unshift(log);
|
||||
if(this.logs.length > 300) {
|
||||
// remove all elements starting from the 300th index (301st item)
|
||||
this.logs.splice(300);
|
||||
}
|
||||
}
|
||||
});
|
||||
this.globalDryRun = dryRun;
|
||||
@@ -338,6 +348,9 @@ export class Manager extends EventEmitter implements RunningStates {
|
||||
this.pollingRetryHandler = createRetryHandler({maxRequestRetry: 3, maxOtherRetry: 2}, this.logger);
|
||||
this.subreddit = sub;
|
||||
this.botEntity = botEntity;
|
||||
for(const client of influxClients) {
|
||||
this.influxClients.push(client.childClient(this.logger, {manager: this.displayLabel, subreddit: sub.display_name_prefixed}));
|
||||
}
|
||||
|
||||
this.managerEntity = managerEntity;
|
||||
// always init in stopped state but use last invokee to determine if we should start the manager automatically afterwards
|
||||
@@ -651,7 +664,7 @@ export class Manager extends EventEmitter implements RunningStates {
|
||||
this.logger.info('Subreddit-specific options updated');
|
||||
this.logger.info('Building Runs and Checks...');
|
||||
|
||||
const structuredRuns = configBuilder.parseToStructured(validJson, this.filterCriteriaDefaults, this.postCheckBehaviorDefaults);
|
||||
const structuredRuns = await configBuilder.parseToStructured(validJson, this.resources, this.filterCriteriaDefaults, this.postCheckBehaviorDefaults);
|
||||
|
||||
let runs: Run[] = [];
|
||||
|
||||
@@ -887,12 +900,67 @@ export class Manager extends EventEmitter implements RunningStates {
|
||||
force = false,
|
||||
} = options;
|
||||
|
||||
const event = new CMEvent();
|
||||
|
||||
if(refresh) {
|
||||
this.logger.verbose(`Refreshed data`);
|
||||
// @ts-ignore
|
||||
item = await activity.refresh();
|
||||
}
|
||||
|
||||
let activityEntity: Activity;
|
||||
const existingEntity = await this.activityRepo.findOneBy({_id: item.name});
|
||||
|
||||
|
||||
/**
|
||||
* Report Tracking
|
||||
*
|
||||
* Store ids for activities we process. Enables us to be sure of whether modqueue has been monitored since we've last seen the activity
|
||||
*
|
||||
* */
|
||||
let lastKnownStateTimestamp = await this.resources.getActivityLastSeenDate(item.name);
|
||||
if(lastKnownStateTimestamp !== undefined && lastKnownStateTimestamp.isBefore(this.startedAt)) {
|
||||
// if we last saw this activity BEFORE we started event polling (modqueue) then it's not useful to us
|
||||
lastKnownStateTimestamp = undefined;
|
||||
}
|
||||
await this.resources.setActivityLastSeenDate(item.name);
|
||||
|
||||
// if modqueue is running then we know we are checking for new reports every X seconds
|
||||
if(options.activitySource.identifier === 'modqueue') {
|
||||
// if the activity is from modqueue and only has one report then we know that report was just created
|
||||
if(item.num_reports === 1
|
||||
// otherwise if it has more than one report AND we have seen it (its only seen if it has already been stored (in below block))
|
||||
// then we are reasonably sure that any reports created were in the last X seconds
|
||||
|| (item.num_reports > 1 && lastKnownStateTimestamp !== undefined)) {
|
||||
|
||||
lastKnownStateTimestamp = dayjs().subtract(this.modqueueInterval, 'seconds');
|
||||
}
|
||||
}
|
||||
// if activity is not from modqueue then known good timestamps for "time between last known report and now" is dependent on these things:
|
||||
// 1) (most accurate) lastKnownStateTimestamp -- only available if activity either had 0 reports OR 1+ and existing reports have been stored (see below code)
|
||||
// 2) last stored report time from Activity
|
||||
// 3) create date of activity
|
||||
|
||||
let shouldPersistReports = false;
|
||||
|
||||
if (existingEntity === null) {
|
||||
activityEntity = Activity.fromSnoowrapActivity(this.managerEntity.subreddit, activity, lastKnownStateTimestamp);
|
||||
// always persist if activity is not already persisted and any reports exist
|
||||
if (item.num_reports > 0) {
|
||||
shouldPersistReports = true;
|
||||
}
|
||||
} else {
|
||||
activityEntity = existingEntity;
|
||||
// always persist if reports need to be updated
|
||||
if (activityEntity.syncReports(item, lastKnownStateTimestamp)) {
|
||||
shouldPersistReports = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldPersistReports) {
|
||||
activityEntity = await this.activityRepo.save(activityEntity);
|
||||
}
|
||||
|
||||
const itemId = await item.id;
|
||||
|
||||
if(await this.resources.hasRecentSelf(item)) {
|
||||
@@ -905,15 +973,6 @@ export class Manager extends EventEmitter implements RunningStates {
|
||||
}
|
||||
}
|
||||
|
||||
let activityEntity: Activity;
|
||||
const existingEntity = await this.activityRepo.findOneBy({_id: item.name});
|
||||
if(existingEntity === null) {
|
||||
activityEntity = Activity.fromSnoowrapActivity(this.managerEntity.subreddit, activity);
|
||||
} else {
|
||||
activityEntity = existingEntity;
|
||||
}
|
||||
|
||||
const event = new CMEvent();
|
||||
event.triggered = false;
|
||||
event.manager = this.managerEntity;
|
||||
event.activity = activityEntity;
|
||||
@@ -928,7 +987,6 @@ export class Manager extends EventEmitter implements RunningStates {
|
||||
itemIdentifiers.push(`${checkType === 'Submission' ? 'SUB' : 'COM'} ${itemId}`);
|
||||
this.currentLabels = itemIdentifiers;
|
||||
let ePeek = '';
|
||||
let peekParts: ItemContent;
|
||||
try {
|
||||
const [peek, { content: peekContent }] = await itemContentPeek(item);
|
||||
ePeek = peekContent;
|
||||
@@ -1084,6 +1142,10 @@ export class Manager extends EventEmitter implements RunningStates {
|
||||
event.runResults = runResults;
|
||||
//actionedEvent.runResults = runResults;
|
||||
|
||||
const checksRun = actionedEvent.runResults.map(x => x.checkResults).flat().length;
|
||||
let actionsRun = actionedEvent.runResults.map(x => x.checkResults?.map(y => y.actionResults)).flat().length;
|
||||
let totalRulesRun = actionedEvent.runResults.map(x => x.checkResults?.map(y => y.ruleResults)).flat(5).length;
|
||||
|
||||
// determine if event should be recorded
|
||||
const allOutputs = [...new Set(runResults.map(x => x.checkResults.map(y => y.recordOutputs ?? [])).flat(2).filter(x => recordOutputTypes.includes(x)))];
|
||||
if(allOutputs.length > 0) {
|
||||
@@ -1115,11 +1177,108 @@ export class Manager extends EventEmitter implements RunningStates {
|
||||
}
|
||||
await this.eventRepo.save(event);
|
||||
}
|
||||
}
|
||||
if (allOutputs.includes('influx') && this.influxClients.length > 0) {
|
||||
try {
|
||||
const time = dayjs().valueOf()
|
||||
|
||||
const checksRun = actionedEvent.runResults.map(x => x.checkResults).flat().length;
|
||||
let actionsRun = actionedEvent.runResults.map(x => x.checkResults?.map(y => y.actionResults)).flat().length;
|
||||
let totalRulesRun = actionedEvent.runResults.map(x => x.checkResults?.map(y => y.ruleResults)).flat().length;
|
||||
const measurements: Point[] = [];
|
||||
|
||||
measurements.push(new Point('event')
|
||||
.timestamp(time)
|
||||
.tag('triggered', event.triggered ? '1' : '0')
|
||||
.tag('activityType', isSubmission(item) ? 'submission' : 'comment')
|
||||
.tag('sourceIdentifier', event.source.identifier ?? 'unknown')
|
||||
.tag('sourceType', event.source.type)
|
||||
.stringField('eventId', event.id)
|
||||
.stringField('activityId', event.activity.id)
|
||||
.stringField('author', actionedEvent.activity.author)
|
||||
.intField('processingTime', time - event.processedAt.valueOf())
|
||||
.intField('queuedTime', event.processedAt.valueOf() - event.queuedAt.valueOf())
|
||||
.intField('runsProcessed', actionedEvent.runResults.length)
|
||||
.intField('runsTriggered', actionedEvent.runResults.filter(x => x.triggered).length)
|
||||
.intField('checksProcessed', checksRun)
|
||||
.intField('checksTriggered', actionedEvent.runResults.map(x => x.checkResults).flat().filter(x => x.triggered).length)
|
||||
.intField('totalRulesProcessed', totalRulesRun)
|
||||
.intField('rulesTriggered', actionedEvent.runResults.map(x => x.checkResults?.map(y => y.ruleResults)).flat(5).filter((x: RuleResultEntity) => x.triggered === true).length)
|
||||
.intField('uniqueRulesProcessed', allRuleResults.length)
|
||||
.intField('cachedRulesProcessed', totalRulesRun - allRuleResults.length)
|
||||
.intField('actionsProcessed', actionsRun)
|
||||
.intField('apiUsage', startingApiLimit - this.client.ratelimitRemaining));
|
||||
|
||||
const defaultPoint = () => new Point('triggeredEntity')
|
||||
.timestamp(time)
|
||||
.tag('activityType', isSubmission(item) ? 'submission' : 'comment')
|
||||
.tag('sourceIdentifier', event.source.identifier ?? 'unknown')
|
||||
.tag('sourceType', event.source.type)
|
||||
.stringField('activityId', event.activity.id)
|
||||
.stringField('author', actionedEvent.activity.author)
|
||||
.stringField('eventId', event.id);
|
||||
|
||||
for (const r of event.runResults) {
|
||||
if (r.triggered) {
|
||||
measurements.push(defaultPoint()
|
||||
.tag('entityType', 'run')
|
||||
.tag('name', r.run.name));
|
||||
for (const c of r.checkResults) {
|
||||
if (c.triggered) {
|
||||
measurements.push(defaultPoint()
|
||||
.tag('entityType', 'check')
|
||||
.stringField('name', c.check.name)
|
||||
.tag('fromCache', c.fromCache ? '1' : '0'));
|
||||
|
||||
if (c.ruleResults !== undefined) {
|
||||
for (const ru of c.ruleResults) {
|
||||
if (ru.result.triggered) {
|
||||
measurements.push(defaultPoint()
|
||||
.tag('entityType', 'rule')
|
||||
.stringField('name', ru.result.premise.name)
|
||||
.tag('fromCache', ru.result.fromCache ? '1' : '0'))
|
||||
}
|
||||
}
|
||||
}
|
||||
if (c.ruleSetResults !== undefined) {
|
||||
for (const rs of c.ruleSetResults) {
|
||||
if (rs.result.triggered) {
|
||||
measurements.push(defaultPoint()
|
||||
.tag('entityType', 'ruleSet'));
|
||||
for (const ru of rs.result.results) {
|
||||
if (ru.triggered) {
|
||||
measurements.push(defaultPoint()
|
||||
.tag('entityType', 'rule')
|
||||
.stringField('name', ru.premise.name))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (c.actionResults !== undefined) {
|
||||
for (const a of c.actionResults) {
|
||||
if (a.run) {
|
||||
measurements.push(defaultPoint()
|
||||
.tag('entityType', 'action')
|
||||
.stringField('name', a.premise.name)
|
||||
.tag('dryRun', a.dryRun ? '1' : '0')
|
||||
.tag('succes', a.success ? '1' : '0')
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const client of this.influxClients) {
|
||||
await client.writePoint(measurements);
|
||||
}
|
||||
} catch (e: any) {
|
||||
this.logger.error(new CMError('Error occurred while building or sending Influx data', {
|
||||
cause: e,
|
||||
isSerious: false
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.verbose(`Run Stats: Checks ${checksRun} | Rules => Total: ${totalRulesRun} Unique: ${allRuleResults.length} Cached: ${totalRulesRun - allRuleResults.length} Rolling Avg: ~${formatNumber(this.rulesUniqueRollingAvg)}/s | Actions ${actionsRun}`);
|
||||
this.logger.verbose(`Reddit API Stats: Initial ${startingApiLimit} | Current ${this.client.ratelimitRemaining} | Used ~${startingApiLimit - this.client.ratelimitRemaining} | Events ~${formatNumber(this.eventsRollingAvg)}/s`);
|
||||
@@ -1474,6 +1633,11 @@ export class Manager extends EventEmitter implements RunningStates {
|
||||
s.startInterval();
|
||||
}
|
||||
this.startedAt = dayjs();
|
||||
|
||||
const modQueuePollOpts = this.pollOptions.find(x => x.pollOn === 'modqueue');
|
||||
if(modQueuePollOpts !== undefined) {
|
||||
this.modqueueInterval = modQueuePollOpts.interval;
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.info('Event polling STARTED');
|
||||
@@ -1571,6 +1735,14 @@ export class Manager extends EventEmitter implements RunningStates {
|
||||
await this.syncRunningState('managerState');
|
||||
}
|
||||
|
||||
async destroy(causedBy: Invokee = 'system', options?: ManagerStateChangeOption) {
|
||||
await this.stop(causedBy, options);
|
||||
clearInterval(this.eventsSampleInterval);
|
||||
clearInterval(this.delayedQueueInterval);
|
||||
clearInterval(this.rulesUniqueSampleInterval)
|
||||
await this.cacheManager.destroy(this.subreddit.display_name);
|
||||
}
|
||||
|
||||
setInitialRunningState(managerEntity: RunningStateEntities, type: RunningStateTypes): RunningState {
|
||||
if(managerEntity[type].runType.name === 'stopped' && managerEntity[type].invokee.name === 'user') {
|
||||
return {state: STOPPED, causedBy: 'user'};
|
||||
@@ -1591,4 +1763,32 @@ export class Manager extends EventEmitter implements RunningStates {
|
||||
|
||||
await this.cacheManager.defaultDatabase.getRepository(ManagerEntity).save(this.managerEntity);
|
||||
}
|
||||
|
||||
async writeHealthMetrics(time?: number) {
|
||||
if (this.influxClients.length > 0) {
|
||||
const metric = new Point('managerHealth')
|
||||
.intField('delayedActivities', this.resources !== undefined ? this.resources.delayedItems.length : 0)
|
||||
.intField('processing', this.queue.running())
|
||||
.intField('queued', this.queue.length())
|
||||
.booleanField('eventsRunning', this.eventsState.state === RUNNING)
|
||||
.booleanField('queueRunning', this.queueState.state === RUNNING)
|
||||
.booleanField('running', this.managerState.state === RUNNING)
|
||||
.intField('uptime', this.startedAt !== undefined ? dayjs().diff(this.startedAt, 'seconds') : 0)
|
||||
.intField('configAge', this.lastWikiRevision === undefined ? 0 : dayjs().diff(this.lastWikiRevision, 'seconds'));
|
||||
|
||||
if (this.resources !== undefined) {
|
||||
const {req, miss} = this.resources.getCacheTotals();
|
||||
metric.intField('cacheRequests', req)
|
||||
.intField('cacheMisses', miss);
|
||||
}
|
||||
|
||||
if (time !== undefined) {
|
||||
metric.timestamp(time);
|
||||
}
|
||||
|
||||
for (const client of this.influxClients) {
|
||||
await client.writePoint(metric);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,12 +4,12 @@ import {
|
||||
activityIsDeleted, activityIsFiltered,
|
||||
activityIsRemoved,
|
||||
AuthorTypedActivitiesOptions, BOT_LINK,
|
||||
getAuthorHistoryAPIOptions
|
||||
getAuthorHistoryAPIOptions, renderContent
|
||||
} from "../Utils/SnoowrapUtils";
|
||||
import {map as mapAsync} from 'async';
|
||||
import winston, {Logger} from "winston";
|
||||
import as from 'async';
|
||||
import fetch from 'node-fetch';
|
||||
import fetch, {Response} from 'node-fetch';
|
||||
import {
|
||||
asActivity,
|
||||
asSubmission,
|
||||
@@ -20,7 +20,7 @@ import {
|
||||
createCacheManager,
|
||||
escapeRegex,
|
||||
FAIL,
|
||||
fetchExternalUrl,
|
||||
fetchExternalResult,
|
||||
filterCriteriaSummary,
|
||||
formatNumber,
|
||||
generateItemFilterHelpers,
|
||||
@@ -96,7 +96,7 @@ import {CMEvent as ActionedEventEntity, CMEvent } from "../Common/Entities/CMEve
|
||||
import {RuleResultEntity} from "../Common/Entities/RuleResultEntity";
|
||||
import globrex from 'globrex';
|
||||
import {runMigrations} from "../Common/Migrations/CacheMigrationUtils";
|
||||
import {isStatusError, MaybeSeriousErrorWithCause, SimpleError} from "../Utils/Errors";
|
||||
import {CMError, isStatusError, MaybeSeriousErrorWithCause, SimpleError} from "../Utils/Errors";
|
||||
import {ErrorWithCause} from "pony-cause";
|
||||
import {ManagerEntity} from "../Common/Entities/ManagerEntity";
|
||||
import {Bot} from "../Common/Entities/Bot";
|
||||
@@ -119,7 +119,7 @@ import {
|
||||
UserNoteCriteria
|
||||
} from "../Common/Infrastructure/Filters/FilterCriteria";
|
||||
import {
|
||||
ActivitySource, DurationVal,
|
||||
ActivitySource, ConfigFragmentValidationFunc, DurationVal,
|
||||
EventRetentionPolicyRange,
|
||||
JoinOperands,
|
||||
ModActionType,
|
||||
@@ -141,6 +141,7 @@ import {
|
||||
} from "../Common/Infrastructure/ActivityWindow";
|
||||
import {Duration} from "dayjs/plugin/duration";
|
||||
import {
|
||||
activityReports,
|
||||
|
||||
ActivityType,
|
||||
AuthorHistorySort,
|
||||
@@ -153,9 +154,13 @@ import {
|
||||
compareDurationValue, comparisonTextOp,
|
||||
parseDurationComparison,
|
||||
parseGenericValueComparison,
|
||||
parseGenericValueOrPercentComparison
|
||||
parseGenericValueOrPercentComparison, parseReportComparison
|
||||
} from "../Common/Infrastructure/Comparisons";
|
||||
import {asCreateModNoteData, CreateModNoteData, ModNote, ModNoteRaw} from "./ModNotes/ModNote";
|
||||
import {IncludesData} from "../Common/Infrastructure/Includes";
|
||||
import {parseFromJsonOrYamlToObject} from "../Common/Config/ConfigUtil";
|
||||
import ConfigParseError from "../Utils/ConfigParseError";
|
||||
import {ActivityReport} from "../Common/Entities/ActivityReport";
|
||||
|
||||
export const DEFAULT_FOOTER = '\r\n*****\r\nThis action was performed by [a bot.]({{botLink}}) Mention a moderator or [send a modmail]({{modmailLink}}) if you any ideas, questions, or concerns about this action.';
|
||||
|
||||
@@ -345,7 +350,7 @@ export class SubredditResources {
|
||||
// set default prune interval
|
||||
this.pruneInterval = setInterval(() => {
|
||||
// @ts-ignore
|
||||
this.defaultCache?.store.prune();
|
||||
this.cache?.store.prune();
|
||||
this.logger.debug('Pruned cache');
|
||||
// prune interval should be twice the smallest TTL
|
||||
},min * 1000 * 2)
|
||||
@@ -367,6 +372,16 @@ export class SubredditResources {
|
||||
}
|
||||
}
|
||||
|
||||
async destroy() {
|
||||
if(this.historicalSaveInterval !== undefined) {
|
||||
clearInterval(this.historicalSaveInterval);
|
||||
}
|
||||
if(this.pruneInterval !== undefined && this.cacheType === 'memory' && this.cacheSettingsHash !== 'default') {
|
||||
clearInterval(this.pruneInterval);
|
||||
this.cache?.reset();
|
||||
}
|
||||
}
|
||||
|
||||
async retentionCleanup() {
|
||||
const logger = this.logger.child({labels: ['Event Retention'], mergeArr});
|
||||
logger.debug('Starting cleanup');
|
||||
@@ -495,9 +510,15 @@ export class SubredditResources {
|
||||
this.delayedItems.push(data);
|
||||
}
|
||||
|
||||
async removeDelayedActivity(id: string) {
|
||||
await this.dispatchedActivityRepo.delete(id);
|
||||
this.delayedItems = this.delayedItems.filter(x => x.id !== id);
|
||||
async removeDelayedActivity(val?: string | string[]) {
|
||||
if(val === undefined) {
|
||||
await this.dispatchedActivityRepo.delete({manager: {id: this.managerEntity.id}});
|
||||
this.delayedItems = [];
|
||||
} else {
|
||||
const ids = typeof val === 'string' ? [val] : val;
|
||||
await this.dispatchedActivityRepo.delete(ids);
|
||||
this.delayedItems = this.delayedItems.filter(x => !ids.includes(x.id));
|
||||
}
|
||||
}
|
||||
|
||||
async initStats() {
|
||||
@@ -758,11 +779,15 @@ export class SubredditResources {
|
||||
}
|
||||
}
|
||||
|
||||
async getStats() {
|
||||
const totals = Object.values(this.stats.cache).reduce((acc, curr) => ({
|
||||
getCacheTotals() {
|
||||
return Object.values(this.stats.cache).reduce((acc, curr) => ({
|
||||
miss: acc.miss + curr.miss,
|
||||
req: acc.req + curr.requests,
|
||||
}), {miss: 0, req: 0});
|
||||
}
|
||||
|
||||
async getStats() {
|
||||
const totals = this.getCacheTotals();
|
||||
const cacheKeys = Object.keys(this.stats.cache);
|
||||
const res = {
|
||||
cache: {
|
||||
@@ -901,6 +926,29 @@ export class SubredditResources {
|
||||
// return events;
|
||||
// }
|
||||
|
||||
async getActivityLastSeenDate(value: SnoowrapActivity | string): Promise<Dayjs | undefined> {
|
||||
if(this.selfTTL !== false) {
|
||||
const id = typeof(value) === 'string' ? value : value.name;
|
||||
const hash = `activityLastSeen-${id}`;
|
||||
const lastSeenUnix = await this.cache.get(hash) as string | undefined | null;
|
||||
if(lastSeenUnix !== undefined && lastSeenUnix !== null) {
|
||||
return dayjs.unix(Number.parseInt(lastSeenUnix, 10));
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async setActivityLastSeenDate(value: SnoowrapActivity | string, timestamp?: number): Promise<void> {
|
||||
if(this.selfTTL !== false) {
|
||||
const id = typeof(value) === 'string' ? value : value.name;
|
||||
const hash = `activityLastSeen-${id}`;
|
||||
this.cache.set(hash, timestamp ?? dayjs().unix(), {
|
||||
ttl: 86400 // store for 24 hours (seconds)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async getActivity(item: Submission | Comment) {
|
||||
try {
|
||||
let hash = '';
|
||||
@@ -1631,13 +1679,12 @@ export class SubredditResources {
|
||||
return filteredListing;
|
||||
}
|
||||
|
||||
|
||||
async getContent(val: string, subredditArg?: Subreddit): Promise<string> {
|
||||
async getExternalResource(val: string, subredditArg?: Subreddit): Promise<{val: string, fromCache: boolean, response?: Response, hash?: string}> {
|
||||
const subreddit = subredditArg || this.subreddit;
|
||||
let cacheKey;
|
||||
const wikiContext = parseWikiContext(val);
|
||||
if (wikiContext !== undefined) {
|
||||
cacheKey = `${wikiContext.wiki}${wikiContext.subreddit !== undefined ? `|${wikiContext.subreddit}` : ''}`;
|
||||
cacheKey = `${subreddit.display_name}-content-${wikiContext.wiki}${wikiContext.subreddit !== undefined ? `|${wikiContext.subreddit}` : ''}`;
|
||||
}
|
||||
const extUrl = wikiContext === undefined ? parseExternalUrl(val) : undefined;
|
||||
if (extUrl !== undefined) {
|
||||
@@ -1645,25 +1692,25 @@ export class SubredditResources {
|
||||
}
|
||||
|
||||
if (cacheKey === undefined) {
|
||||
return val;
|
||||
return {val, fromCache: false, hash: cacheKey};
|
||||
}
|
||||
|
||||
// try to get cached value first
|
||||
let hash = `${subreddit.display_name}-content-${cacheKey}`;
|
||||
if (this.wikiTTL !== false) {
|
||||
await this.stats.cache.content.identifierRequestCount.set(cacheKey, (await this.stats.cache.content.identifierRequestCount.wrap(cacheKey, () => 0) as number) + 1);
|
||||
this.stats.cache.content.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.content.requests++;
|
||||
const cachedContent = await this.cache.get(hash);
|
||||
const cachedContent = await this.cache.get(cacheKey);
|
||||
if (cachedContent !== undefined && cachedContent !== null) {
|
||||
this.logger.debug(`Content Cache Hit: ${cacheKey}`);
|
||||
return cachedContent as string;
|
||||
return {val: cachedContent as string, fromCache: true, hash: cacheKey};
|
||||
} else {
|
||||
this.stats.cache.content.miss++;
|
||||
}
|
||||
}
|
||||
|
||||
let wikiContent: string;
|
||||
let response: Response | undefined;
|
||||
|
||||
// no cache hit, get from source
|
||||
if (wikiContext !== undefined) {
|
||||
@@ -1691,7 +1738,9 @@ export class SubredditResources {
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
wikiContent = await fetchExternalUrl(extUrl as string, this.logger);
|
||||
const [wikiContentVal, responseVal] = await fetchExternalResult(extUrl as string, this.logger);
|
||||
wikiContent = wikiContentVal;
|
||||
response = responseVal;
|
||||
} catch (err: any) {
|
||||
const msg = `Error occurred while trying to fetch the url ${extUrl}`;
|
||||
this.logger.error(msg, err);
|
||||
@@ -1699,13 +1748,118 @@ export class SubredditResources {
|
||||
}
|
||||
}
|
||||
|
||||
if (this.wikiTTL !== false) {
|
||||
return {val: wikiContent, fromCache: false, response, hash: cacheKey};
|
||||
}
|
||||
|
||||
async getContent(val: string, subredditArg?: Subreddit): Promise<string> {
|
||||
const {val: wikiContent, fromCache, hash} = await this.getExternalResource(val, subredditArg);
|
||||
|
||||
if (!fromCache && hash !== undefined && this.wikiTTL !== false) {
|
||||
this.cache.set(hash, wikiContent, {ttl: this.wikiTTL});
|
||||
}
|
||||
|
||||
return wikiContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience method for using getContent and SnoowrapUtils@renderContent in one method
|
||||
* */
|
||||
async renderContent(contentStr: string, data: SnoowrapActivity, ruleResults: RuleResultEntity[] = [], usernotes?: UserNotes) {
|
||||
const content = await this.getContent(contentStr);
|
||||
return await renderContent(content, data, ruleResults, usernotes ?? this.userNotes);
|
||||
}
|
||||
|
||||
async getConfigFragment<T>(includesData: IncludesData, validateFunc?: ConfigFragmentValidationFunc): Promise<T> {
|
||||
|
||||
const {
|
||||
path,
|
||||
ttl = this.wikiTTL,
|
||||
} = includesData;
|
||||
|
||||
const {val: configStr, fromCache, hash, response} = await this.getExternalResource(path);
|
||||
|
||||
const [format, configObj, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(configStr);
|
||||
if (configObj === undefined) {
|
||||
//this.logger.error(`Could not parse includes URL of '${configStr}' contents as JSON or YAML.`);
|
||||
this.logger.error(yamlErr);
|
||||
this.logger.debug(jsonErr);
|
||||
throw new ConfigParseError(`Could not parse includes URL of '${configStr}' contents as JSON or YAML.`)
|
||||
}
|
||||
|
||||
// if its from cache then we know the data is valid
|
||||
if(fromCache) {
|
||||
this.logger.verbose(`Got Config Fragment ${path} from cache`);
|
||||
return configObj.toJS() as unknown as T;
|
||||
}
|
||||
|
||||
const rawData = configObj.toJS();
|
||||
let validatedData: T;
|
||||
// otherwise now we want to validate it if a function is present
|
||||
if(validateFunc !== undefined) {
|
||||
try {
|
||||
validateFunc(configObj.toJS(), fromCache);
|
||||
validatedData = rawData as unknown as T;
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
} else {
|
||||
validatedData = rawData as unknown as T;
|
||||
}
|
||||
|
||||
let ttlVal: number | false = this.wikiTTL;
|
||||
// never cache
|
||||
if(ttl === false) {
|
||||
return validatedData;
|
||||
} else if(typeof ttl === 'number') {
|
||||
ttlVal = ttl;
|
||||
} else if(ttl === true) {
|
||||
// cache forever
|
||||
ttlVal = 0;
|
||||
} else if(ttl === 'response') {
|
||||
// try to get cache time from response headers, if they exist
|
||||
// otherwise fallback to wiki ttl
|
||||
|
||||
if(response === undefined) {
|
||||
ttlVal = this.wikiTTL;
|
||||
} else {
|
||||
const cc = response.headers.get('cache-control');
|
||||
const ex = response.headers.get('expires');
|
||||
if(cc !== null) {
|
||||
// response doesn't want to be stored :(
|
||||
if(null !== cc.match('/no-(cache|store)/i')) {
|
||||
return validatedData;
|
||||
}
|
||||
const matches = cc.match(/max-age=(\d+)/);
|
||||
if(null === matches) {
|
||||
// huh? why include max-age but no value?
|
||||
ttlVal = this.wikiTTL;
|
||||
} else {
|
||||
ttlVal = parseInt(matches[1], 10);
|
||||
}
|
||||
} else if(ex !== null) {
|
||||
const expDate = dayjs(ex);
|
||||
if(dayjs.isDayjs(expDate) && expDate.isValid()) {
|
||||
const seconds = expDate.diff(dayjs(), 'second');
|
||||
if(seconds < 0) {
|
||||
// expiration is older than now?? don't cache
|
||||
return validatedData;
|
||||
}
|
||||
ttlVal = seconds;
|
||||
}
|
||||
} else {
|
||||
// couldn't get a cache header, fallback
|
||||
ttlVal = this.wikiTTL;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ttlVal !== false) {
|
||||
this.cache.set(hash as string, configStr, {ttl: ttlVal});
|
||||
}
|
||||
|
||||
return validatedData;
|
||||
}
|
||||
|
||||
async cacheSubreddits(subs: (Subreddit | string)[]) {
|
||||
const allSubs = subs.map(x => typeof x !== 'string' ? x.display_name : x);
|
||||
const subNames = [...new Set(allSubs)];
|
||||
@@ -2132,7 +2286,7 @@ export class SubredditResources {
|
||||
const requestedSourcesVal: string[] = !Array.isArray(itemOptVal) ? [itemOptVal] as string[] : itemOptVal as string[];
|
||||
const requestedSources = requestedSourcesVal.map(x => strToActivitySource(x).toLowerCase());
|
||||
|
||||
propResultsMap.source!.passed = criteriaPassWithIncludeBehavior(requestedSources.some(x => source.toLowerCase().includes(x)), include);
|
||||
propResultsMap.source!.passed = criteriaPassWithIncludeBehavior(requestedSources.some(x => source.toLowerCase().trim() === x.toLowerCase().trim()), include);
|
||||
break;
|
||||
}
|
||||
case 'score':
|
||||
@@ -2148,27 +2302,45 @@ export class SubredditResources {
|
||||
propResultsMap.reports!.reason = reportsMsg;
|
||||
break;
|
||||
}
|
||||
const reportCompare = parseGenericValueComparison(itemOptVal as string);
|
||||
let reportType = 'total';
|
||||
if(reportCompare.extra !== undefined && reportCompare.extra.trim() !== '') {
|
||||
const requestedType = reportCompare.extra.toLocaleLowerCase().trim();
|
||||
if(requestedType.includes('mod')) {
|
||||
reportType = 'mod';
|
||||
} else if(requestedType.includes('user')) {
|
||||
reportType = 'user';
|
||||
} else {
|
||||
const reportTypeWarn = `Did not recognize the report type "${requestedType}" -- can only use "mod" or "user". Will default to TOTAL reports`;
|
||||
log.debug(reportTypeWarn);
|
||||
propResultsMap.reports!.reason = reportTypeWarn;
|
||||
}
|
||||
|
||||
const reportSummaryParts: string[] = [];
|
||||
|
||||
let reports: ActivityReport[] = [];
|
||||
|
||||
if(item.num_reports > 0) {
|
||||
reports = await this.database.getRepository(ActivityReport).createQueryBuilder('report')
|
||||
.select('report')
|
||||
.where({activityId: item.name})
|
||||
.getMany();
|
||||
}
|
||||
let reportNum = item.num_reports;
|
||||
if(reportType === 'user') {
|
||||
reportNum = item.user_reports.length;
|
||||
} else {
|
||||
reportNum = item.mod_reports.length;
|
||||
|
||||
const reportCompare = parseReportComparison(itemOptVal as string);
|
||||
|
||||
let reportType = reportCompare.reportType ?? 'total';
|
||||
|
||||
let validReports = reports;
|
||||
|
||||
if(reportCompare.reportType === 'user') {
|
||||
validReports = validReports.filter(x => x.type === 'user');
|
||||
} else if(reportCompare.reportType === 'mod') {
|
||||
validReports = validReports.filter(x => x.type === 'mod');
|
||||
}
|
||||
propResultsMap.reports!.found = `${reportNum} ${reportType}`;
|
||||
|
||||
if(reportCompare.reasonRegex !== undefined) {
|
||||
reportSummaryParts.push(`containing reason matching ${reportCompare.reasonMatch}`);
|
||||
validReports = validReports.filter(x => reportCompare.reasonRegex?.test(x.reason));
|
||||
}
|
||||
if(reportCompare.durationText !== undefined) {
|
||||
reportSummaryParts.push(`within ${reportCompare.durationText}`);
|
||||
const earliestDate = dayjs().subtract(reportCompare.duration as Duration);
|
||||
validReports = validReports.filter(x => x.createdAt.isSameOrAfter(earliestDate));
|
||||
}
|
||||
|
||||
let reportNum = validReports.length;
|
||||
|
||||
reportSummaryParts.unshift(`${reportNum} ${reportType} reports`);
|
||||
|
||||
propResultsMap.reports!.found = reportSummaryParts.join(' ');
|
||||
propResultsMap.reports!.passed = criteriaPassWithIncludeBehavior(comparisonTextOp(reportNum, reportCompare.operator, reportCompare.value), include);
|
||||
break;
|
||||
case 'removed':
|
||||
@@ -2381,6 +2553,23 @@ export class SubredditResources {
|
||||
propResultsMap.depth!.found = depth;
|
||||
propResultsMap.depth!.passed = criteriaPassWithIncludeBehavior(comparisonTextOp(depth, depthCompare.operator, depthCompare.value), include);
|
||||
break;
|
||||
case 'upvoteRatio':
|
||||
if(asSubmission(item)) {
|
||||
|
||||
let compareStr = typeof itemOptVal === 'number' ? `>= ${itemOptVal}` : itemOptVal as string;
|
||||
const ratioCompare = parseGenericValueComparison(compareStr);
|
||||
|
||||
const ratio = item.upvote_ratio * 100;
|
||||
propResultsMap.upvoteRatio!.found = ratio;
|
||||
propResultsMap.upvoteRatio!.passed = criteriaPassWithIncludeBehavior(comparisonTextOp(ratio, ratioCompare.operator, ratioCompare.value), include);;
|
||||
break;
|
||||
} else {
|
||||
const ratioCommWarn = `Cannot test for 'upvoteRatio' on a Comment`;
|
||||
log.debug(ratioCommWarn);
|
||||
propResultsMap.depth!.passed = true;
|
||||
propResultsMap.depth!.reason = ratioCommWarn;
|
||||
break;
|
||||
}
|
||||
case 'flairTemplate':
|
||||
case 'link_flair_text':
|
||||
case 'link_flair_css_class':
|
||||
@@ -3380,6 +3569,14 @@ export class BotResourcesManager {
|
||||
return resource;
|
||||
}
|
||||
|
||||
async destroy(subName: string) {
|
||||
const res = this.get(subName);
|
||||
if(res !== undefined) {
|
||||
await res.destroy();
|
||||
this.resources.delete(subName);
|
||||
}
|
||||
}
|
||||
|
||||
async getPendingSubredditInvites(): Promise<(string[])> {
|
||||
const subredditNames = await this.defaultCache.get(`modInvites`);
|
||||
if (subredditNames !== undefined && subredditNames !== null) {
|
||||
@@ -3389,11 +3586,19 @@ export class BotResourcesManager {
|
||||
}
|
||||
|
||||
async addPendingSubredditInvite(subreddit: string): Promise<void> {
|
||||
if(subreddit === null || subreddit === undefined || subreddit == '') {
|
||||
throw new CMError('Subreddit name cannot be empty');
|
||||
}
|
||||
let subredditNames = await this.defaultCache.get(`modInvites`) as (string[] | undefined | null);
|
||||
if (subredditNames === undefined || subredditNames === null) {
|
||||
subredditNames = [];
|
||||
}
|
||||
subredditNames.push(subreddit);
|
||||
const cleanName = subreddit.trim();
|
||||
|
||||
if(subredditNames.some(x => x.trim().toLowerCase() === cleanName.toLowerCase())) {
|
||||
throw new CMError(`An invite for the Subreddit '${subreddit}' already exists`);
|
||||
}
|
||||
subredditNames.push(cleanName);
|
||||
await this.defaultCache.set(`modInvites`, subredditNames, {ttl: 0});
|
||||
return;
|
||||
}
|
||||
@@ -3403,7 +3608,7 @@ export class BotResourcesManager {
|
||||
if (subredditNames === undefined || subredditNames === null) {
|
||||
subredditNames = [];
|
||||
}
|
||||
subredditNames = subredditNames.filter(x => x !== subreddit);
|
||||
subredditNames = subredditNames.filter(x => x.toLowerCase() !== subreddit.trim().toLowerCase());
|
||||
await this.defaultCache.set(`modInvites`, subredditNames, {ttl: 0});
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ import {RichContent} from "../Common/interfaces";
|
||||
import {Cache} from 'cache-manager';
|
||||
import {isScopeError} from "../Utils/Errors";
|
||||
import {ErrorWithCause} from "pony-cause";
|
||||
import {UserNoteType} from "../Common/Infrastructure/Atomic";
|
||||
|
||||
interface RawUserNotesPayload {
|
||||
ver: number,
|
||||
@@ -125,7 +126,7 @@ export class UserNotes {
|
||||
return this.mod as RedditUser;
|
||||
}
|
||||
|
||||
async addUserNote(item: (Submission|Comment), type: string | number, text: string = '', wikiEditReasonPrefix?: string): Promise<UserNote>
|
||||
async addUserNote(item: (Submission|Comment), type: UserNoteType | number, text: string = '', wikiEditReasonPrefix?: string): Promise<UserNote>
|
||||
{
|
||||
const payload = await this.retrieveData();
|
||||
const userName = getActivityAuthorName(item.author);
|
||||
@@ -235,7 +236,7 @@ export interface UserNoteJson extends RichContent {
|
||||
* User Note type key
|
||||
* @examples ["spamwarn"]
|
||||
* */
|
||||
type: string,
|
||||
type: UserNoteType,
|
||||
}
|
||||
|
||||
export class UserNote {
|
||||
@@ -246,7 +247,7 @@ export class UserNote {
|
||||
// noteType: string | null;
|
||||
// link: string;
|
||||
|
||||
constructor(public time: Dayjs, public text: string, public modIndex: number, public noteType: string | number, public link: (string | null) = null, public moderator?: RedditUser) {
|
||||
constructor(public time: Dayjs, public text: string, public modIndex: number, public noteType: UserNoteType | number, public link: (string | null) = null, public moderator?: RedditUser) {
|
||||
|
||||
}
|
||||
|
||||
|
||||
33
src/SubredditConfigData.ts
Normal file
33
src/SubredditConfigData.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import {
|
||||
ActivityCheckConfigValue,
|
||||
} from "./Check";
|
||||
import {ManagerOptions} from "./Common/interfaces";
|
||||
import {RunConfigHydratedData, RunConfigValue, RunConfigObject} from "./Run";
|
||||
|
||||
export interface SubredditConfigData extends ManagerOptions {
|
||||
/**
|
||||
* A list of all the checks that should be run for a subreddit.
|
||||
*
|
||||
* Checks are split into two lists -- submission or comment -- based on kind and run independently.
|
||||
*
|
||||
* Checks in each list are run in the order found in the configuration.
|
||||
*
|
||||
* When a check "passes", and actions are performed, then all subsequent checks are skipped.
|
||||
* @minItems 1
|
||||
* */
|
||||
checks?: ActivityCheckConfigValue[]
|
||||
|
||||
/**
|
||||
* A list of sets of Checks to run
|
||||
* @minItems 1
|
||||
* */
|
||||
runs?: RunConfigValue[]
|
||||
}
|
||||
|
||||
export interface SubredditConfigHydratedData extends Omit<SubredditConfigData, 'checks'> {
|
||||
runs?: RunConfigHydratedData[]
|
||||
}
|
||||
|
||||
export interface SubredditConfigObject extends SubredditConfigHydratedData {
|
||||
runs?: RunConfigObject[]
|
||||
}
|
||||
@@ -1,9 +1,9 @@
|
||||
import ExtendableError from "es6-error";
|
||||
|
||||
class InvalidRegexError extends ExtendableError {
|
||||
constructor(regex: RegExp | RegExp[], val?: string, url?: string) {
|
||||
constructor(regex: RegExp | RegExp[], val?: string, url?: string, message?: string) {
|
||||
const msgParts = [
|
||||
'Regex(es) did not match the value given.',
|
||||
message ?? 'Regex(es) did not match the value given.',
|
||||
];
|
||||
let regArr = Array.isArray(regex) ? regex : [regex];
|
||||
for(const r of regArr) {
|
||||
|
||||
@@ -130,7 +130,7 @@ export class ExtendedSnoowrap extends Snoowrap {
|
||||
user: data.user.name,
|
||||
}
|
||||
if(data.activity !== undefined) {
|
||||
requestData.reddit_id = data.activity.id;
|
||||
requestData.reddit_id = data.activity.name;
|
||||
}
|
||||
|
||||
const response =await this.oauthRequest({
|
||||
|
||||
@@ -116,47 +116,34 @@ export const isSubreddit = async (subreddit: Subreddit, stateCriteria: Subreddit
|
||||
})() as boolean;
|
||||
}
|
||||
|
||||
const renderContentCommentTruncate = truncateStringToLength(50);
|
||||
const shortTitleTruncate = truncateStringToLength(15);
|
||||
|
||||
export const renderContent = async (template: string, data: (Submission | Comment), ruleResults: RuleResultEntity[] = [], usernotes: UserNotes) => {
|
||||
const conditional: any = {};
|
||||
if(data.can_mod_post) {
|
||||
conditional.reports = data.num_reports;
|
||||
conditional.modReports = data.mod_reports.length;
|
||||
conditional.userReports = data.user_reports.length;
|
||||
}
|
||||
if(asSubmission(data)) {
|
||||
conditional.nsfw = data.over_18;
|
||||
conditional.spoiler = data.spoiler;
|
||||
conditional.op = true;
|
||||
conditional.upvoteRatio = `${data.upvote_ratio * 100}%`;
|
||||
} else {
|
||||
conditional.op = data.is_submitter;
|
||||
}
|
||||
const templateData: any = {
|
||||
kind: data instanceof Submission ? 'submission' : 'comment',
|
||||
author: await data.author.name,
|
||||
// make this a getter so that if we don't load notes (and api call) if we don't need to
|
||||
// didn't work either for some reason
|
||||
// tried to get too fancy :(
|
||||
// get notes() {
|
||||
// return usernotes.getUserNotes(data.author).then((notesData) => {
|
||||
// // return usable notes data with some stats
|
||||
// const current = notesData.length > 0 ? notesData[notesData.length -1] : undefined;
|
||||
// // group by type
|
||||
// const grouped = notesData.reduce((acc: any, x) => {
|
||||
// const {[x.noteType]: nt = []} = acc;
|
||||
// return Object.assign(acc, {[x.noteType]: nt.concat(x)});
|
||||
// }, {});
|
||||
// return {
|
||||
// data: notesData,
|
||||
// current,
|
||||
// ...grouped,
|
||||
// };
|
||||
// });
|
||||
// },
|
||||
// when i was trying to use mustache-async (didn't work)
|
||||
// notes: async () => {
|
||||
// const notesData = await usernotes.getUserNotes(data.author);
|
||||
// // return usable notes data with some stats
|
||||
// const current = notesData.length > 0 ? notesData[notesData.length -1] : undefined;
|
||||
// // group by type
|
||||
// const grouped = notesData.reduce((acc: any, x) => {
|
||||
// const {[x.noteType]: nt = []} = acc;
|
||||
// return Object.assign(acc, {[x.noteType]: nt.concat(x)});
|
||||
// }, {});
|
||||
// return {
|
||||
// data: notesData,
|
||||
// current,
|
||||
// ...grouped,
|
||||
// };
|
||||
// },
|
||||
// @ts-ignore
|
||||
author: getActivityAuthorName(await data.author),
|
||||
votes: data.score,
|
||||
age: dayjs.duration(dayjs().diff(dayjs.unix(data.created))).humanize(),
|
||||
permalink: `https://reddit.com${data.permalink}`,
|
||||
botLink: BOT_LINK,
|
||||
id: data.name,
|
||||
...conditional
|
||||
}
|
||||
if (template.includes('{{item.notes')) {
|
||||
// we need to get notes
|
||||
@@ -177,6 +164,10 @@ export const renderContent = async (template: string, data: (Submission | Commen
|
||||
if (data instanceof Submission) {
|
||||
templateData.url = data.url;
|
||||
templateData.title = data.title;
|
||||
templateData.shortTitle = shortTitleTruncate(data.title);
|
||||
} else {
|
||||
templateData.title = renderContentCommentTruncate(data.body);
|
||||
templateData.shortTitle = shortTitleTruncate(data.body);
|
||||
}
|
||||
// normalize rule names and map context data
|
||||
// NOTE: we are relying on users to use unique names for rules. If they don't only the last rule run of kind X will have its results here
|
||||
|
||||
@@ -205,7 +205,9 @@ const webClient = async (options: OperatorConfig) => {
|
||||
logger.stream().on('log', (log: LogInfo) => {
|
||||
emitter.emit('log', log);
|
||||
webLogs.unshift(log);
|
||||
webLogs = webLogs.slice(0, 201);
|
||||
if(webLogs.length > 200) {
|
||||
webLogs.splice(200);
|
||||
}
|
||||
});
|
||||
|
||||
const migrationService = new MigrationService({
|
||||
@@ -712,7 +714,14 @@ const webClient = async (options: OperatorConfig) => {
|
||||
// botUserRouter.use([ensureAuthenticated, defaultSession, botWithPermissions, createUserToken]);
|
||||
// app.use(botUserRouter);
|
||||
|
||||
app.useAsync('/api/', [ensureAuthenticated, defaultSession, instanceWithPermissions, botWithPermissions(false), createUserToken], (req: express.Request, res: express.Response) => {
|
||||
// proxy.on('proxyReq', (req) => {
|
||||
// logger.debug(`Got proxy request: ${req.path}`);
|
||||
// });
|
||||
// proxy.on('proxyRes', (proxyRes, req, res) => {
|
||||
// logger.debug(`Got proxy response: ${res.statusCode} for ${req.url}`);
|
||||
// });
|
||||
|
||||
app.useAsync('/api/', [ensureAuthenticatedApi, defaultSession, instanceWithPermissions, botWithPermissions(false), createUserToken], (req: express.Request, res: express.Response) => {
|
||||
req.headers.Authorization = `Bearer ${req.token}`
|
||||
|
||||
const instance = req.instance as CMInstanceInterface;
|
||||
@@ -723,6 +732,10 @@ const webClient = async (options: OperatorConfig) => {
|
||||
port: instance.url.port,
|
||||
},
|
||||
prependPath: false,
|
||||
proxyTimeout: 11000,
|
||||
}, (e: any) => {
|
||||
logger.error(e);
|
||||
res.status(500).send();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -54,7 +54,7 @@ const addBot = () => {
|
||||
result.error = newBot.error;
|
||||
return res.status(500).json(result);
|
||||
}
|
||||
await newBot.testClient();
|
||||
await newBot.init();
|
||||
// return response early so client doesn't have to wait for all managers to be built
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
|
||||
@@ -20,6 +20,7 @@ import {filterResultsBuilder} from "../../../../../Utils/typeormUtils";
|
||||
import {Brackets} from "typeorm";
|
||||
import {Activity} from "../../../../../Common/Entities/Activity";
|
||||
import {RedditThing} from "../../../../../Common/Infrastructure/Reddit";
|
||||
import {CMError} from "../../../../../Utils/Errors";
|
||||
|
||||
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
|
||||
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);
|
||||
@@ -54,7 +55,17 @@ const addInvite = async (req: Request, res: Response) => {
|
||||
if (subreddit === undefined || subreddit === null || subreddit === '') {
|
||||
return res.status(400).send('subreddit must be defined');
|
||||
}
|
||||
await req.serverBot.cacheManager.addPendingSubredditInvite(subreddit);
|
||||
try {
|
||||
await req.serverBot.cacheManager.addPendingSubredditInvite(subreddit);
|
||||
} catch (e: any) {
|
||||
if(e instanceof CMError) {
|
||||
req.logger.warn(e);
|
||||
return res.status(400).send(e.message);
|
||||
} else {
|
||||
req.logger.error(e);
|
||||
return res.status(500).send(e.message);
|
||||
}
|
||||
}
|
||||
return res.status(200).send();
|
||||
};
|
||||
|
||||
@@ -194,19 +205,24 @@ const cancelDelayed = async (req: Request, res: Response) => {
|
||||
const {id} = req.query as any;
|
||||
const {name: userName} = req.user as Express.User;
|
||||
|
||||
if(req.manager?.resources === undefined) {
|
||||
if (req.manager?.resources === undefined) {
|
||||
req.manager?.logger.error('Subreddit does not have delayed items!', {user: userName});
|
||||
return res.status(400).send();
|
||||
}
|
||||
|
||||
const delayedItem = req.manager.resources.delayedItems.find(x => x.id === id);
|
||||
if(delayedItem === undefined) {
|
||||
req.manager?.logger.error(`No delayed items exists with the id ${id}`, {user: userName});
|
||||
return res.status(400).send();
|
||||
if (id === undefined) {
|
||||
await req.manager.resources.removeDelayedActivity();
|
||||
} else {
|
||||
const delayedItem = req.manager.resources.delayedItems.find(x => x.id === id);
|
||||
if (delayedItem === undefined) {
|
||||
req.manager?.logger.error(`No delayed items exists with the id ${id}`, {user: userName});
|
||||
return res.status(400).send();
|
||||
}
|
||||
|
||||
await req.manager.resources.removeDelayedActivity(delayedItem.id);
|
||||
req.manager?.logger.info(`Remove Delayed Item '${delayedItem.id}'`, {user: userName});
|
||||
}
|
||||
|
||||
req.manager.resources.delayedItems = req.manager.resources.delayedItems.filter(x => x.id !== id);
|
||||
req.manager?.logger.info(`Remove Delayed Item '${delayedItem.id}'`, {user: userName});
|
||||
return res.send('OK');
|
||||
};
|
||||
|
||||
|
||||
@@ -1,13 +1,69 @@
|
||||
import {authUserCheck, botRoute, subredditRoute} from "../../../middleware";
|
||||
import {Request, Response} from "express";
|
||||
import Bot from "../../../../../Bot";
|
||||
import {boolToString, cacheStats, filterLogs, formatNumber, logSortFunc, pollingInfo} from "../../../../../util";
|
||||
import {boolToString, cacheStats, difference, filterLogs, formatNumber, logSortFunc, pollingInfo} from "../../../../../util";
|
||||
import dayjs from "dayjs";
|
||||
import {LogInfo, ResourceStats, RUNNING, STOPPED, SYSTEM} from "../../../../../Common/interfaces";
|
||||
import {Manager} from "../../../../../Subreddit/Manager";
|
||||
import winston from "winston";
|
||||
import {opStats} from "../../../../Common/util";
|
||||
import {BotStatusResponse} from "../../../../Common/interfaces";
|
||||
import deepEqual from "fast-deep-equal";
|
||||
import {DispatchedEntity} from "../../../../../Common/Entities/DispatchedEntity";
|
||||
|
||||
const lastFullResponse: Map<string, Record<string, any>> = new Map();
|
||||
|
||||
const mergeDeepEqual = (a: Record<any, any>, b: Record<any, any>): Record<any, any> => {
|
||||
const delta: Record<any, any> = {};
|
||||
for(const [k,v] of Object.entries(a)) {
|
||||
if(typeof v === 'object' && v !== null && typeof b[k] === 'object' && b[k] !== null) {
|
||||
const objDelta = mergeDeepEqual(v, b[k]);
|
||||
if(Object.keys(objDelta).length > 0) {
|
||||
delta[k] = objDelta;
|
||||
}
|
||||
} else if(!deepEqual(v, b[k])) {
|
||||
delta[k] = v;
|
||||
}
|
||||
}
|
||||
return delta;
|
||||
}
|
||||
|
||||
const generateDeltaResponse = (data: Record<string, any>, hash: string, responseType: 'full' | 'delta') => {
|
||||
let resp = data;
|
||||
if(responseType === 'delta') {
|
||||
const reference = lastFullResponse.get(hash);
|
||||
if(reference === undefined) {
|
||||
// shouldn't happen...
|
||||
return data;
|
||||
}
|
||||
const delta: Record<string, any> = {};
|
||||
for(const [k,v] of Object.entries(data)) {
|
||||
if(!deepEqual(v, reference[k])) {
|
||||
// on delayed items delta we will send a different data structure back with just remove/new(add)
|
||||
if(k === 'delayedItems') {
|
||||
const refIds = reference[k].map((x: DispatchedEntity) => x.id);
|
||||
const latestIds = v.map((x: DispatchedEntity) => x.id);
|
||||
|
||||
const newIds = Array.from(difference(latestIds, refIds));
|
||||
const newItems = v.filter((x: DispatchedEntity) => newIds.includes(x.id));
|
||||
|
||||
// just need ids that should be removed on frontend
|
||||
const removedItems = Array.from(difference(refIds, latestIds));
|
||||
delta[k] = {new: newItems, removed: removedItems};
|
||||
|
||||
} else if(v !== null && typeof v === 'object' && reference[k] !== null && typeof reference[k] === 'object') {
|
||||
// for things like cache/stats we only want to delta changed properties, not the entire object
|
||||
delta[k] = mergeDeepEqual(v, reference[k]);
|
||||
} else {
|
||||
delta[k] = v;
|
||||
}
|
||||
}
|
||||
}
|
||||
resp = delta;
|
||||
}
|
||||
lastFullResponse.set(hash, data);
|
||||
return resp;
|
||||
}
|
||||
|
||||
const liveStats = () => {
|
||||
const middleware = [
|
||||
@@ -20,7 +76,9 @@ const liveStats = () => {
|
||||
{
|
||||
const bot = req.serverBot as Bot;
|
||||
const manager = req.manager;
|
||||
|
||||
const responseType = req.query.type === 'delta' ? 'delta' : 'full';
|
||||
const hash = `${bot.botName}${manager !== undefined ? `-${manager.getDisplay()}` : ''}`;
|
||||
|
||||
if(manager === undefined) {
|
||||
// getting all
|
||||
const subManagerData: any[] = [];
|
||||
@@ -213,7 +271,11 @@ const liveStats = () => {
|
||||
},
|
||||
...allManagerData,
|
||||
};
|
||||
return res.json(data);
|
||||
const respData = generateDeltaResponse(data, hash, responseType);
|
||||
if(Object.keys(respData).length === 0) {
|
||||
return res.status(304).send();
|
||||
}
|
||||
return res.json(respData);
|
||||
} else {
|
||||
// getting specific subreddit stats
|
||||
const sd = {
|
||||
@@ -282,7 +344,11 @@ const liveStats = () => {
|
||||
}
|
||||
}
|
||||
|
||||
return res.json(sd);
|
||||
const respData = generateDeltaResponse(sd, hash, responseType);
|
||||
if(Object.keys(respData).length === 0) {
|
||||
return res.status(304).send();
|
||||
}
|
||||
return res.json(respData);
|
||||
}
|
||||
}
|
||||
return [...middleware, response];
|
||||
|
||||
@@ -29,13 +29,14 @@ import {authUserCheck, botRoute} from "./middleware";
|
||||
import Bot from "../../Bot";
|
||||
import addBot from "./routes/authenticated/user/addBot";
|
||||
import ServerUser from "../Common/User/ServerUser";
|
||||
import {SimpleError} from "../../Utils/Errors";
|
||||
import {CMError, SimpleError} from "../../Utils/Errors";
|
||||
import {ErrorWithCause} from "pony-cause";
|
||||
import {Manager} from "../../Subreddit/Manager";
|
||||
import {MESSAGE} from "triple-beam";
|
||||
import dayjs from "dayjs";
|
||||
import { sleep } from '../../util';
|
||||
import {Invokee} from "../../Common/Infrastructure/Atomic";
|
||||
import {Point} from "@influxdata/influxdb-client";
|
||||
|
||||
const server = addAsync(express());
|
||||
server.use(bodyParser.json());
|
||||
@@ -85,7 +86,9 @@ const rcbServer = async function (options: OperatorConfigWithFileContext) {
|
||||
|
||||
if(botName === undefined && subName === undefined) {
|
||||
sysLogs.unshift(log);
|
||||
sysLogs = sysLogs.slice(0, 201);
|
||||
if(sysLogs.length > 200) {
|
||||
sysLogs.splice(200);
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
@@ -145,6 +148,7 @@ const rcbServer = async function (options: OperatorConfigWithFileContext) {
|
||||
server.use(passport.authenticate('jwt', {session: false}));
|
||||
server.use((req, res, next) => {
|
||||
req.botApp = app;
|
||||
req.logger = logger;
|
||||
next();
|
||||
});
|
||||
|
||||
@@ -230,6 +234,33 @@ const rcbServer = async function (options: OperatorConfigWithFileContext) {
|
||||
});
|
||||
}
|
||||
|
||||
// would like to use node-memwatch for more stats but doesn't work with docker (alpine gclib?) and requires more gyp bindings, yuck
|
||||
// https://github.com/airbnb/node-memwatch
|
||||
const writeMemoryMetrics = async () => {
|
||||
if (options.dev.monitorMemory) {
|
||||
if (options.influx !== undefined) {
|
||||
const influx = options.influx;
|
||||
while (true) {
|
||||
await sleep(options.dev.monitorMemoryInterval);
|
||||
try {
|
||||
const memUsage = process.memoryUsage();
|
||||
await influx.writePoint(new Point('serverMemory')
|
||||
.intField('external', memUsage.external)
|
||||
.intField('rss', memUsage.rss)
|
||||
.intField('arrayBuffers', memUsage.arrayBuffers)
|
||||
.intField('heapTotal', memUsage.heapTotal)
|
||||
.intField('heapUsed', memUsage.heapUsed)
|
||||
);
|
||||
} catch (e: any) {
|
||||
logger.warn(new CMError('Error occurred while trying to collect memory metrics', {cause: e}));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logger.warn('Cannot monitor memory because influx config was not set');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
server.postAsync('/init', authUserCheck(), async (req, res) => {
|
||||
logger.info(`${(req.user as Express.User).name} requested the app to be re-built. Starting rebuild now...`, {subreddit: (req.user as Express.User).name});
|
||||
await initBot('user');
|
||||
@@ -283,6 +314,7 @@ const rcbServer = async function (options: OperatorConfigWithFileContext) {
|
||||
|
||||
logger.info('Initializing database...');
|
||||
try {
|
||||
writeMemoryMetrics();
|
||||
const dbReady = await app.initDatabase();
|
||||
if(dbReady) {
|
||||
logger.info('Initializing application...');
|
||||
|
||||
@@ -242,39 +242,64 @@
|
||||
</script>
|
||||
<script defer="defer" src="/public/yaml/entry.js"></script>
|
||||
<script>
|
||||
const gistReg = new RegExp(/.*gist.github.com\/.+\/(.+)/i)
|
||||
const GIST_RAW_REGEX = new RegExp(/.*gist\.github\.com\/(.+)\/(.+)\/raw\/.+/i)
|
||||
// match[1] = username
|
||||
// match[2] = gistId
|
||||
// match[3] = filename (optional)
|
||||
const gistReg = new RegExp(/.*gist\.github\.com\/(.+)\/([^#\/]+)(?:#file-(.+))?/i)
|
||||
const blobReg = new RegExp(/.*github.com\/(.+)\/(.+)\/blob\/(.+)/i);
|
||||
|
||||
const normalizeGistFileKey = (val) => val.replaceAll(/[^\w\d]/g, '').toLowerCase().trim();
|
||||
|
||||
function getPayloadUrl(url) {
|
||||
console.debug(`Attempting to detect resolvable URL for ${url}`);
|
||||
let match = url.match(gistReg);
|
||||
if (match !== null) {
|
||||
const gistApiUrl = `https://api.github.com/gists/${match[1]}`;
|
||||
console.debug(`Looks like a non-raw gist URL! Trying to resolve ${gistApiUrl}`);
|
||||
|
||||
let match = url.match(GIST_RAW_REGEX);
|
||||
if(match !== null) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fetch(gistApiUrl).then((resp) => {
|
||||
if (!resp.ok) {
|
||||
console.error('Response was not OK from Gist API');
|
||||
resolve(url);
|
||||
} else {
|
||||
resp.json().then((data) => {
|
||||
// get first found file
|
||||
const fileKeys = Object.keys(data.files);
|
||||
if (fileKeys.length === 0) {
|
||||
console.error(`No files found in gist!`);
|
||||
} else {
|
||||
if (fileKeys.length > 1) {
|
||||
console.warn(`More than one file found in gist! Using first found: ${fileKeys[0]}`);
|
||||
// need to use CDN url or else we get a CORS policy error
|
||||
resolve(url.replace('gist.github.com', 'gist.githubusercontent.com'));
|
||||
})
|
||||
} else {
|
||||
match = url.match(gistReg);
|
||||
if (match !== null) {
|
||||
const gistApiUrl = `https://api.github.com/gists/${match[2]}`;
|
||||
console.debug(`Looks like a non-raw gist URL! Trying to resolve ${gistApiUrl}`);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
fetch(gistApiUrl).then((resp) => {
|
||||
if (!resp.ok) {
|
||||
console.error('Response was not OK from Gist API');
|
||||
resolve(url);
|
||||
} else {
|
||||
resp.json().then((data) => {
|
||||
// get first found file
|
||||
const fileKeys = Object.keys(data.files);
|
||||
if (fileKeys.length === 0) {
|
||||
console.error(`No files found in gist!`);
|
||||
} else {
|
||||
let fileKey = fileKeys[0];
|
||||
if (fileKeys.length > 1) {
|
||||
if(match[3] !== undefined) {
|
||||
//const normalizedFileName = normalizeGistFileKey(match.named.fileName.replace('/^file-/', ''));
|
||||
const normalizedFileName = normalizeGistFileKey(match[3]);
|
||||
const matchingKey = fileKeys.find(x => normalizeGistFileKey(x) === normalizedFileName);
|
||||
if(matchingKey === undefined) {
|
||||
console.error(`Found Gist ${match[2]} but it did not contain a file named ${match[3]}`);
|
||||
}
|
||||
fileKey = matchingKey;
|
||||
} else {
|
||||
console.warn(`More than one file found in gist but URL did not specify a filename! Using first found: ${fileKey}`);
|
||||
}
|
||||
}
|
||||
const rawUrl = data.files[fileKey].raw_url;
|
||||
console.debug(`Resolving raw gist url for file found (${fileKey}) to ${rawUrl}`);
|
||||
resolve(rawUrl);
|
||||
}
|
||||
const rawUrl = data.files[fileKeys[0]].raw_url;
|
||||
console.debug(`Resolving raw gist url for first file found (${fileKeys[0]}) to ${rawUrl}`);
|
||||
resolve(rawUrl);
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
match = url.match(blobReg);
|
||||
|
||||
@@ -37,7 +37,10 @@
|
||||
} else {
|
||||
resp.json().then(data => {
|
||||
if (data.length > 0) {
|
||||
document.querySelector('#noSubs').style = 'display: none;';
|
||||
const ns = document.querySelector('#noSubs');
|
||||
if(ns !== null) {
|
||||
document.querySelector('#noSubs').style = 'display: none;';
|
||||
}
|
||||
sl.removeChild(sl.childNodes[1]);
|
||||
}
|
||||
for (const sub of data) {
|
||||
@@ -69,7 +72,10 @@
|
||||
document.querySelector("#error").innerHTML = t;
|
||||
});
|
||||
} else {
|
||||
document.querySelector('#noSubs').style = 'display: none;';
|
||||
const ns = document.querySelector('#noSubs');
|
||||
if(ns !== null) {
|
||||
document.querySelector('#noSubs').style = 'display: none;';
|
||||
}
|
||||
addSubredditElement(subName);
|
||||
subNameElm.value = '';
|
||||
}
|
||||
|
||||
@@ -999,6 +999,10 @@
|
||||
});
|
||||
|
||||
fetchPromise.then(async res => {
|
||||
// can potentially happen if request is aborted (by immediate log cancel) before logs begin to be read
|
||||
if(res === undefined) {
|
||||
return;
|
||||
}
|
||||
const reader = res.getReader();
|
||||
let keepReading = true;
|
||||
while(keepReading) {
|
||||
@@ -1039,178 +1043,374 @@
|
||||
read();*/
|
||||
}).catch((e) => {
|
||||
if(e.name !== 'AbortError') {
|
||||
console.debug(`Non-abort error occurred while streaming logs for ${bot} ${sub}`);
|
||||
console.error(e);
|
||||
} else {
|
||||
console.debug(`Log streaming for ${bot} ${sub} aborted`);
|
||||
}
|
||||
});
|
||||
|
||||
const existing = recentlySeen.get(`${bot}.${sub}`) || {};
|
||||
recentlySeen.set(`${bot}.${sub}`, {...existing, fetch: fetchPromise, controller});
|
||||
recentlySeen.set(`${bot}.${sub}`, {...existing, fetch: fetchPromise, controller, streamStart: Date.now()});
|
||||
}
|
||||
|
||||
function updateLiveStats(resp) {
|
||||
const delayedItemsMap = new Map();
|
||||
let lastSeenIdentifier = null;
|
||||
|
||||
function updateLiveStats(resp, sub, bot, responseType) {
|
||||
let el;
|
||||
let isAll = resp.name.toLowerCase() === 'all';
|
||||
let isAll = sub.toLowerCase() === 'all';
|
||||
if(isAll) {
|
||||
// got all
|
||||
el = document.querySelector(`[data-subreddit="All"][data-bot="${resp.bot}"].sub`);
|
||||
el = document.querySelector(`[data-subreddit="All"][data-bot="${bot}"].sub`);
|
||||
} else {
|
||||
// got subreddit
|
||||
el = document.querySelector(`[data-subreddit="${resp.name}"].sub`);
|
||||
el = document.querySelector(`[data-subreddit="${sub}"].sub`);
|
||||
}
|
||||
|
||||
if(resp.system.running && el.classList.contains('offline')) {
|
||||
el.classList.remove('offline');
|
||||
} else if(!resp.system.running && !el.classList.contains('offline')) {
|
||||
el.classList.add('offline');
|
||||
const {
|
||||
system: {
|
||||
running
|
||||
} = {},
|
||||
delayedItems,
|
||||
runningActivities,
|
||||
queuedActivities,
|
||||
permissions,
|
||||
stats: {
|
||||
historical: {
|
||||
eventsCheckedTotal,
|
||||
checksTriggeredTotal,
|
||||
rulesTriggeredTotal,
|
||||
actionsRunTotal,
|
||||
} = {},
|
||||
} = {},
|
||||
checks: {
|
||||
comments,
|
||||
submissions
|
||||
} = {},
|
||||
pollingInfo,
|
||||
} = resp;
|
||||
|
||||
if(running !== undefined) {
|
||||
if(resp.system.running && el.classList.contains('offline')) {
|
||||
el.classList.remove('offline');
|
||||
} else if(!resp.system.running && !el.classList.contains('offline')) {
|
||||
el.classList.add('offline');
|
||||
}
|
||||
}
|
||||
|
||||
el.querySelector('.runningActivities').innerHTML = resp.runningActivities;
|
||||
el.querySelector('.queuedActivities').innerHTML = resp.queuedActivities;
|
||||
el.querySelector('.delayedItemsCount').innerHTML = resp.delayedItems.length;
|
||||
el.querySelector('.delayedItemsList').innerHTML = 'No delayed Items!';
|
||||
if(resp.delayedItems.length > 0) {
|
||||
el.querySelector('.delayedItemsList').innerHTML = '';
|
||||
const now = dayjs();
|
||||
const sorted = resp.delayedItems.map(x => ({...x, queuedAtUnix: x.queuedAt, queuedAt: dayjs.unix(x.queuedAt), dispatchAt: dayjs.unix(x.queuedAt + x.duration)}));
|
||||
sorted.sort((a, b) => {
|
||||
return a.dispatchAt.isSameOrAfter(b.dispatchAt) ? 1 : -1
|
||||
});
|
||||
const delayedItemDivs = sorted.map(x => {
|
||||
const diffUntilNow = x.dispatchAt.diff(now);
|
||||
const durationUntilNow = dayjs.duration(diffUntilNow, 'ms');
|
||||
const queuedAtDisplay = x.queuedAt.format('HH:mm:ss z');
|
||||
const durationDayjs = dayjs.duration(x.duration, 'seconds');
|
||||
const durationDisplay = durationDayjs.humanize();
|
||||
const cancelLink = `<a href="#" data-id="${x.id}" data-subreddit="${x.subreddit}" class="delayCancel">CANCEL</a>`;
|
||||
return `<div>A <a href="https://reddit.com${x.permalink}">${x.submissionId !== undefined ? 'Comment' : 'Submssion'}</a>${isAll ? ` in <a href="https://reddit.com${x.subreddit}">${x.subreddit}</a> ` : ''} by <a href="https://reddit.com/u/${x.author}">${x.author}</a> queued by ${x.source} at ${queuedAtDisplay} for ${durationDisplay} (dispatches ${durationUntilNow.humanize(true)}) -- ${cancelLink}</div>`;
|
||||
});
|
||||
el.querySelector('.delayedItemsList').insertAdjacentHTML('afterbegin', delayedItemDivs.join(''));
|
||||
el.querySelectorAll('.delayedItemsList .delayCancel').forEach(elm => {
|
||||
elm.addEventListener('click', e => {
|
||||
e.preventDefault();
|
||||
const id = e.target.dataset.id;
|
||||
const subreddit = e.target.dataset.subreddit;
|
||||
fetch(`/api/delayed?instance=<%= instanceId %>&bot=${resp.bot}&subreddit=${subreddit}&id=${id}`, {
|
||||
if(runningActivities !== undefined) {
|
||||
el.querySelector('.runningActivities').innerHTML = resp.runningActivities;
|
||||
}
|
||||
if(queuedActivities !== undefined) {
|
||||
el.querySelector('.queuedActivities').innerHTML = resp.queuedActivities;
|
||||
}
|
||||
|
||||
if(delayedItems !== undefined) {
|
||||
let items = [];
|
||||
if(responseType === 'full') {
|
||||
delayedItemsMap.clear();
|
||||
for(const i of delayedItems) {
|
||||
delayedItemsMap.set(i.id, i);
|
||||
}
|
||||
items = delayedItems;
|
||||
} else {
|
||||
for(const n of delayedItems.new) {
|
||||
delayedItemsMap.set(n.id, n);
|
||||
}
|
||||
for(const n of delayedItems.removed) {
|
||||
delayedItemsMap.delete(n);
|
||||
}
|
||||
items = Array.from(delayedItemsMap.values());
|
||||
}
|
||||
el.querySelector('.delayedItemsCount').innerHTML = items.length;
|
||||
el.querySelector('.delayedItemsList').innerHTML = 'No delayed Items!';
|
||||
if(items.length > 0) {
|
||||
el.querySelector('.delayedItemsList').innerHTML = '';
|
||||
const now = dayjs();
|
||||
const sorted = items.map(x => ({...x, queuedAtUnix: x.queuedAt, queuedAt: dayjs.unix(x.queuedAt), dispatchAt: dayjs.unix(x.queuedAt + x.duration)}));
|
||||
sorted.sort((a, b) => {
|
||||
return a.dispatchAt.isSameOrAfter(b.dispatchAt) ? 1 : -1
|
||||
});
|
||||
const delayedItemDivs = sorted.map(x => {
|
||||
const diffUntilNow = x.dispatchAt.diff(now);
|
||||
const durationUntilNow = dayjs.duration(diffUntilNow, 'ms');
|
||||
const queuedAtDisplay = x.queuedAt.format('HH:mm:ss z');
|
||||
const durationDayjs = dayjs.duration(x.duration, 'seconds');
|
||||
const durationDisplay = durationDayjs.humanize();
|
||||
const cancelLink = `<a href="#" data-id="${x.id}" data-subreddit="${x.subreddit}" class="delayCancel">CANCEL</a>`;
|
||||
return `<div>A <a href="https://reddit.com${x.permalink}">${x.submissionId !== undefined ? 'Comment' : 'Submission'}</a>${isAll ? ` in <a href="https://reddit.com${x.subreddit}">${x.subreddit}</a> ` : ''} by <a href="https://reddit.com/u/${x.author}">${x.author}</a> queued by ${x.source} at ${queuedAtDisplay} for ${durationDisplay} (dispatches ${durationUntilNow.humanize(true)}) -- ${cancelLink}</div>`;
|
||||
});
|
||||
//let sub = resp.name;
|
||||
if(sub === 'All') {
|
||||
sub = sorted.reduce((acc, curr) => {
|
||||
if(!acc.includes(curr.subreddit)) {
|
||||
return acc.concat(curr.subreddit);
|
||||
}
|
||||
return acc;
|
||||
},[]).join(',');
|
||||
}
|
||||
delayedItemDivs.unshift(`<div><a href="#"data-subreddit="${sub}" class="delayCancelAll">Cancel ALL</a></div>`)
|
||||
el.querySelector('.delayedItemsList').insertAdjacentHTML('afterbegin', delayedItemDivs.join(''));
|
||||
el.querySelectorAll('.delayedItemsList .delayCancel').forEach(elm => {
|
||||
elm.addEventListener('click', e => {
|
||||
e.preventDefault();
|
||||
const id = e.target.dataset.id;
|
||||
const subreddit = e.target.dataset.subreddit;
|
||||
fetch(`/api/delayed?instance=<%= instanceId %>&bot=${bot}&subreddit=${subreddit}&id=${id}`, {
|
||||
method: 'DELETE'
|
||||
}).then((resp) => {
|
||||
if (!resp.ok) {
|
||||
console.error('Response was not OK from delay cancel');
|
||||
} else {
|
||||
console.log('Removed ok');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
el.querySelectorAll('.delayedItemsList .delayCancelAll').forEach(elm => {
|
||||
elm.addEventListener('click', e => {
|
||||
e.preventDefault();
|
||||
const subreddit = e.target.dataset.subreddit;
|
||||
deleteDelayedActivities(bot, subreddit);
|
||||
|
||||
/*fetch(`/api/delayed?instance=<%= instanceId %>&bot=${bot}&subreddit=${subreddit}`, {
|
||||
method: 'DELETE'
|
||||
}).then((resp) => {
|
||||
if (!resp.ok) {
|
||||
console.error('Response was not OK from delay cancel');
|
||||
console.error('Response was not OK from delay cancel ALL');
|
||||
} else {
|
||||
console.log('Removed ok');
|
||||
console.log('Removed ALL ok');
|
||||
}
|
||||
});*/
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
el.querySelector('.allStats .eventsCount').innerHTML = resp.stats.historical.eventsCheckedTotal;
|
||||
el.querySelector('.allStats .checksCount').innerHTML = resp.stats.historical.checksTriggeredTotal;
|
||||
el.querySelector('.allStats .rulesCount').innerHTML = resp.stats.historical.rulesTriggeredTotal;
|
||||
el.querySelector('.allStats .actionsCount').innerHTML = resp.stats.historical.actionsRunTotal;
|
||||
if(eventsCheckedTotal !== undefined) {
|
||||
el.querySelector('.allStats .eventsCount').innerHTML = resp.stats.historical.eventsCheckedTotal;
|
||||
}
|
||||
if(checksTriggeredTotal !== undefined) {
|
||||
el.querySelector('.allStats .checksCount').innerHTML = resp.stats.historical.checksTriggeredTotal;
|
||||
}
|
||||
if(rulesTriggeredTotal !== undefined) {
|
||||
el.querySelector('.allStats .rulesCount').innerHTML = resp.stats.historical.rulesTriggeredTotal;
|
||||
}
|
||||
if(actionsRunTotal !== undefined) {
|
||||
el.querySelector('.allStats .actionsCount').innerHTML = resp.stats.historical.actionsRunTotal;
|
||||
}
|
||||
|
||||
if(isAll) {
|
||||
for(const elm of ['apiAvg','apiLimit','apiDepletion','nextHeartbeat', 'nextHeartbeatHuman', 'limitReset', 'limitResetHuman', 'nannyMode', 'startedAtHuman']) {
|
||||
el.querySelector(`#${elm}`).innerHTML = resp[elm];
|
||||
if(resp[elm] !== undefined) {
|
||||
el.querySelector(`#${elm}`).innerHTML = resp[elm];
|
||||
}
|
||||
}
|
||||
|
||||
if(running !== undefined) {
|
||||
el.querySelector(`.botStatus`).innerHTML = resp.system.running ? 'ONLINE' : 'OFFLINE';
|
||||
}
|
||||
el.querySelector(`.botStatus`).innerHTML = resp.system.running ? 'ONLINE' : 'OFFLINE';
|
||||
} else {
|
||||
if(el.querySelector('.modPermissionsCount').innerHTML != resp.permissions.length) {
|
||||
el.querySelector('.modPermissionsCount').innerHTML = resp.permissions.length;
|
||||
el.querySelector('.modPermissionsList').innerHTML = '';
|
||||
el.querySelector('.modPermissionsList').insertAdjacentHTML('afterbegin', resp.permissions.map(x => `<li class="font-mono">${x}</li>`).join(''));
|
||||
if(permissions !== undefined) {
|
||||
if(el.querySelector('.modPermissionsCount').innerHTML != resp.permissions.length) {
|
||||
el.querySelector('.modPermissionsCount').innerHTML = resp.permissions.length;
|
||||
el.querySelector('.modPermissionsList').innerHTML = '';
|
||||
el.querySelector('.modPermissionsList').insertAdjacentHTML('afterbegin', resp.permissions.map(x => `<li class="font-mono">${x}</li>`).join(''));
|
||||
}
|
||||
}
|
||||
|
||||
for(const elm of ['botState', 'queueState', 'eventsState']) {
|
||||
const state = resp[elm];
|
||||
el.querySelector(`.${elm}`).innerHTML = `${state.state}${state.causedBy === 'system' ? '' : ' (user)'}`;
|
||||
if(resp[elm] !== undefined) {
|
||||
const state = resp[elm];
|
||||
el.querySelector(`.${elm}`).innerHTML = `${state.state}${state.causedBy === 'system' ? '' : ' (user)'}`;
|
||||
}
|
||||
}
|
||||
for(const elm of ['startedAt', 'startedAtHuman', 'wikiLastCheck', 'wikiLastCheckHuman', 'wikiRevision', 'wikiRevisionHuman', 'validConfig', 'delayBy']) {
|
||||
el.querySelector(`.${elm}`).innerHTML = resp[elm];
|
||||
if(resp[elm] !== undefined) {
|
||||
el.querySelector(`.${elm}`).innerHTML = resp[elm];
|
||||
}
|
||||
}
|
||||
if(comments !== undefined) {
|
||||
el.querySelector(`.commentCheckCount`).innerHTML = resp.checks.comments;
|
||||
}
|
||||
if(submissions !== undefined) {
|
||||
el.querySelector(`.submissionCheckCount`).innerHTML = resp.checks.submissions;
|
||||
}
|
||||
el.querySelector(`.commentCheckCount`).innerHTML = resp.checks.comments;
|
||||
el.querySelector(`.submissionCheckCount`).innerHTML = resp.checks.submissions;
|
||||
|
||||
const newInner = resp.pollingInfo.map(x => `<li>${x}</li>`).join('');
|
||||
if(el.querySelector(`.pollingInfo`).innerHTML !== newInner) {
|
||||
el.querySelector(`.pollingInfo`).innerHTML = newInner;
|
||||
if(pollingInfo !== undefined) {
|
||||
const newInner = resp.pollingInfo.map(x => `<li>${x}</li>`).join('');
|
||||
if(el.querySelector(`.pollingInfo`).innerHTML !== newInner) {
|
||||
el.querySelector(`.pollingInfo`).innerHTML = newInner;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getLiveStats(bot, sub) {
|
||||
function deleteDelayedActivities(bot, subredditStr, id) {
|
||||
const subs = subredditStr.split(',');
|
||||
fetch(`/api/delayed?instance=<%= instanceId %>&bot=${bot}&subreddit=${subs[0]}${id !== undefined ? `&id=${id}` : ''}`, {
|
||||
method: 'DELETE'
|
||||
}).then((resp) => {
|
||||
if (!resp.ok) {
|
||||
if(id === undefined) {
|
||||
console.error(`Response was not OK from ${subs[0]} delay ALL cancel`);
|
||||
} else {
|
||||
console.error(`Response was not OK from ${subs[0]} delay cancel ${id}`);
|
||||
}
|
||||
} else {
|
||||
if(id === undefined) {
|
||||
console.log(`Removed ALL for ${subs[0]} ok`);
|
||||
} else {
|
||||
console.log(`Removed ${id} for ${subs[0]} ok`);
|
||||
}
|
||||
if(subs.length > 1) {
|
||||
deleteDelayedActivities(bot, subs.slice(1).join(','));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function getLiveStats(bot, sub, responseType = 'full') {
|
||||
console.debug(`Getting live stats for ${bot} ${sub}`)
|
||||
return fetch(`/api/liveStats?instance=<%= instanceId %>&bot=${bot}&subreddit=${sub}`)
|
||||
.then(response => response.json())
|
||||
.then(resp => updateLiveStats(resp));
|
||||
return fetch(`/api/liveStats?instance=<%= instanceId %>&bot=${bot}&subreddit=${sub}&type=${responseType}`)
|
||||
.then(response => {
|
||||
if(response.status === 304) {
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
return response.json();
|
||||
})
|
||||
.then(resp => {
|
||||
if(resp === false) {
|
||||
return;
|
||||
}
|
||||
updateLiveStats(resp, sub, bot, responseType);
|
||||
});
|
||||
}
|
||||
|
||||
function onSubVisible (bot, sub) {
|
||||
const identifier = `${bot}.${sub}`
|
||||
lastSeenIdentifier = identifier;
|
||||
|
||||
console.debug(`Focused on ${identifier}`);
|
||||
|
||||
let immediateCancel = [];
|
||||
const notNew = Array.from(recentlySeen.entries()).filter(([k,v]) => k !== identifier);
|
||||
// browsers have a default limit for number of concurrent connections
|
||||
// which INCLUDES streaming responses (logs)
|
||||
// so we need to keep number of idle streaming logs low to prevent browser from hanging on new requests
|
||||
if(notNew.length > 2) {
|
||||
notNew.sort((a, b) => a[1].streamStart - b[1].streamStart);
|
||||
immediateCancel = notNew.slice(2).map(x => x[0]);
|
||||
console.debug(`More than 2 other views are still streaming logs! Will immediately stop the oldest (skipping two earliest): ${immediateCancel.join(' , ')}`);
|
||||
}
|
||||
|
||||
recentlySeen.forEach((value, key) => {
|
||||
const {timeout, liveStatsInt, ...rest} = value;
|
||||
if(key === identifier && timeout !== undefined) {
|
||||
|
||||
console.debug(`${key} Clearing unfocused timeout on own already set`);
|
||||
clearTimeout(timeout);
|
||||
recentlySeen.set(key, rest);
|
||||
|
||||
} else if(key !== identifier) {
|
||||
|
||||
// stop live stats for tabs we are not viewing
|
||||
clearInterval(liveStatsInt);
|
||||
|
||||
if(immediateCancel.includes(key)) {
|
||||
const {controller} = value;
|
||||
if(controller !== undefined) {
|
||||
console.debug(`${key} Stopping logs IMMEDIATELY`);
|
||||
controller.abort();
|
||||
recentlySeen.delete(key);
|
||||
}
|
||||
} else
|
||||
// set timeout for logs we are not viewing
|
||||
if(timeout === undefined) {
|
||||
const t = setTimeout(() => {
|
||||
const k = key;
|
||||
const val = recentlySeen.get(k);
|
||||
if(val !== undefined) {
|
||||
const {controller} = val;
|
||||
console.debug(`${k} 15 second unfocused timeout expired, stopping log streaming`);
|
||||
if(controller !== undefined) {
|
||||
console.debug('Stopping logs');
|
||||
controller.abort();
|
||||
}
|
||||
recentlySeen.delete(k);
|
||||
}
|
||||
},15000);
|
||||
recentlySeen.set(key, {timeout: t, liveStatsInt, ...rest});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if(!recentlySeen.has(identifier)) {
|
||||
getLogBlock(bot, sub).then(() => {
|
||||
getStreamingLogs(sub, bot);
|
||||
});
|
||||
}
|
||||
|
||||
delayedItemsMap.clear();
|
||||
// always get live stats for tab we just started viewing
|
||||
getLiveStats(bot, sub).then(() => {
|
||||
let liveStatsInt;
|
||||
const liveStatFunc = () => {
|
||||
// after initial live stats "full frame" only request deltas to reduce data usage
|
||||
getLiveStats(bot, sub, 'delta').catch((err) => {
|
||||
console.error(err);
|
||||
// stop interval if live stat encounters an error
|
||||
clearInterval(liveStatsInt);
|
||||
})
|
||||
};
|
||||
liveStatsInt = setInterval(liveStatFunc, 5000);
|
||||
const existing = recentlySeen.get(identifier) ?? {};
|
||||
recentlySeen.set(identifier, {...existing, bot, sub, liveStatsInt});
|
||||
});
|
||||
}
|
||||
|
||||
document.querySelectorAll('.sub').forEach(el => {
|
||||
const sub = el.dataset.subreddit;
|
||||
const bot = el.dataset.bot;
|
||||
//console.log(`Focused on ${bot} ${sub}`);
|
||||
onVisible(el, () => {
|
||||
console.debug(`Focused on ${bot} ${sub}`);
|
||||
|
||||
const identifier = `${bot}.${sub}`;
|
||||
|
||||
recentlySeen.forEach((value, key) => {
|
||||
const {timeout, liveStatsInt, ...rest} = value;
|
||||
if(key === identifier && timeout !== undefined) {
|
||||
|
||||
console.debug('Clearing timeout on own already set');
|
||||
clearTimeout(timeout);
|
||||
recentlySeen.set(key, rest);
|
||||
|
||||
} else if(key !== identifier) {
|
||||
|
||||
// stop live stats for tabs we are not viewing
|
||||
clearInterval(liveStatsInt);
|
||||
|
||||
// set timeout for logs we are not viewing
|
||||
if(timeout === undefined) {
|
||||
const t = setTimeout(() => {
|
||||
const k = key;
|
||||
const val = recentlySeen.get(k);
|
||||
if(val !== undefined) {
|
||||
const {controller} = val;
|
||||
console.debug(`timeout expired, stopping live data for ${k}`);
|
||||
if(controller !== undefined) {
|
||||
console.debug('Stopping logs');
|
||||
controller.abort();
|
||||
}
|
||||
// if(liveStatInt !== undefined) {
|
||||
// console.log('Stopping live stats');
|
||||
// clearInterval(liveStatInt);
|
||||
// }
|
||||
recentlySeen.delete(k);
|
||||
}
|
||||
},15000);
|
||||
recentlySeen.set(key, {timeout: t, liveStatsInt, ...rest});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if(!recentlySeen.has(identifier)) {
|
||||
getLogBlock(bot, sub).then(() => {
|
||||
getStreamingLogs(sub, bot);
|
||||
});
|
||||
}
|
||||
|
||||
// always get live stats for tab we just started viewing
|
||||
getLiveStats(bot, sub).then(() => {
|
||||
let liveStatsInt;
|
||||
const liveStatFunc = () => {
|
||||
getLiveStats(bot, sub).catch(() => {
|
||||
// stop interval if live stat encounters an error
|
||||
clearInterval(liveStatsInt);
|
||||
})
|
||||
};
|
||||
liveStatsInt = setInterval(liveStatFunc, 5000);
|
||||
recentlySeen.set(identifier, {liveStatsInt});
|
||||
});
|
||||
});
|
||||
onVisible(el, () => onSubVisible(bot, sub));
|
||||
});
|
||||
|
||||
let backgroundTimeout = null;
|
||||
|
||||
document.addEventListener("visibilitychange", (e) => {
|
||||
if (document.visibilityState === "hidden") {
|
||||
console.debug(`Set 15 seconds timeout for ${lastSeenIdentifier} live data due to page not being visible`);
|
||||
backgroundTimeout = setTimeout(() => {
|
||||
console.debug(`Stopping live data for ${lastSeenIdentifier} due to page not being visible`);
|
||||
|
||||
const {liveStatsInt, controller} = recentlySeen.get(lastSeenIdentifier) ?? {};
|
||||
if(liveStatsInt !== undefined && liveStatsInt !== null) {
|
||||
clearInterval(liveStatsInt);
|
||||
}
|
||||
if(controller !== undefined && controller !== null) {
|
||||
controller.abort();
|
||||
}
|
||||
backgroundTimeout = null;
|
||||
}, 15000);
|
||||
} else {
|
||||
// cancel real-time data timeout because page is visible again
|
||||
if(backgroundTimeout !== null) {
|
||||
console.debug(`Cancelled live-data timeout for ${lastSeenIdentifier}`);
|
||||
clearTimeout(backgroundTimeout);
|
||||
backgroundTimeout = null;
|
||||
} else if(lastSeenIdentifier !== null) {
|
||||
// if timeout is null then it was hit
|
||||
// and since we have a last seen this is what is visible to the user so restart live data for it
|
||||
const {bot, sub} = recentlySeen.get(lastSeenIdentifier) ?? {};
|
||||
if(bot !== undefined && sub !== undefined) {
|
||||
console.debug(`Restarting live-data for ${lastSeenIdentifier} due to page being visible`);
|
||||
recentlySeen.delete(lastSeenIdentifier);
|
||||
onSubVisible(bot, sub);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
var searchParams = new URLSearchParams(window.location.search);
|
||||
const shownSub = searchParams.get('sub') || 'All'
|
||||
|
||||
1
src/Web/types/express/index.d.ts
vendored
1
src/Web/types/express/index.d.ts
vendored
@@ -8,6 +8,7 @@ declare global {
|
||||
declare namespace Express {
|
||||
interface Request {
|
||||
botApp: App;
|
||||
logger: Logger;
|
||||
token?: string,
|
||||
instance?: CMInstanceInterface,
|
||||
bot?: BotInstance,
|
||||
|
||||
287
src/util.ts
287
src/util.ts
@@ -7,6 +7,7 @@ import {deflateSync, inflateSync} from "zlib";
|
||||
import pixelmatch from 'pixelmatch';
|
||||
import os from 'os';
|
||||
import pathUtil from 'path';
|
||||
import {Response} from 'node-fetch';
|
||||
import crypto, {createHash} from 'crypto';
|
||||
import {
|
||||
ActionResult,
|
||||
@@ -16,7 +17,7 @@ import {
|
||||
CheckSummary,
|
||||
ImageComparisonResult,
|
||||
ItemCritPropHelper,
|
||||
LogInfo,
|
||||
LogInfo, NamedGroup,
|
||||
PollingOptionsStrong,
|
||||
PostBehaviorOptionConfig,
|
||||
RegExResult,
|
||||
@@ -73,14 +74,15 @@ import {
|
||||
ActivitySourceTypes,
|
||||
CacheProvider,
|
||||
ConfigFormat,
|
||||
DurationVal,
|
||||
DurationVal, ExternalUrlContext,
|
||||
ModUserNoteLabel,
|
||||
modUserNoteLabels,
|
||||
RedditEntity,
|
||||
RedditEntityType,
|
||||
statFrequencies,
|
||||
StatisticFrequency,
|
||||
StatisticFrequencyOption
|
||||
StatisticFrequencyOption, UrlContext,
|
||||
WikiContext
|
||||
} from "./Common/Infrastructure/Atomic";
|
||||
import {
|
||||
AuthorOptions,
|
||||
@@ -605,6 +607,9 @@ export const formatNumber = (val: number | string, options?: numberFormatOptions
|
||||
if (Number.isNaN(parsedVal)) {
|
||||
return defaultVal;
|
||||
}
|
||||
if(!Number.isFinite(val)) {
|
||||
return 'Infinite';
|
||||
}
|
||||
let prefixStr = prefix;
|
||||
const {enable = false, indicate = true, type = 'round'} = round || {};
|
||||
if (enable && !Number.isInteger(parsedVal)) {
|
||||
@@ -716,34 +721,49 @@ export const isActivityWindowConfig = (val: any): val is FullActivityWindowConfi
|
||||
}
|
||||
|
||||
// string must only contain ISO8601 optionally wrapped by whitespace
|
||||
const ISO8601_REGEX: RegExp = /^(-?)P(?=\d|T\d)(?:(\d+)Y)?(?:(\d+)M)?(?:(\d+)([DW]))?(?:T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+(?:\.\d+)?)S)?)?$/;
|
||||
const ISO8601_REGEX: RegExp = /^\s*((-?)P(?=\d|T\d)(?:(\d+)Y)?(?:(\d+)M)?(?:(\d+)([DW]))?(?:T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+(?:\.\d+)?)S)?)?)\s*$/;
|
||||
// finds ISO8601 in any part of a string
|
||||
const ISO8601_SUBSTRING_REGEX: RegExp = /(-?)P(?=\d|T\d)(?:(\d+)Y)?(?:(\d+)M)?(?:(\d+)([DW]))?(?:T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+(?:\.\d+)?)S)?)?/;
|
||||
const ISO8601_SUBSTRING_REGEX: RegExp = /((-?)P(?=\d|T\d)(?:(\d+)Y)?(?:(\d+)M)?(?:(\d+)([DW]))?(?:T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+(?:\.\d+)?)S)?)?)/g;
|
||||
// string must only duration optionally wrapped by whitespace
|
||||
const DURATION_REGEX: RegExp = /^\s*(?<time>\d+)\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\s*$/;
|
||||
// finds duration in any part of the string
|
||||
const DURATION_SUBSTRING_REGEX: RegExp = /(?<time>\d+)\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)/;
|
||||
export const parseDuration = (val: string, strict = true): Duration => {
|
||||
let matches = val.match(strict ? DURATION_REGEX : DURATION_SUBSTRING_REGEX);
|
||||
if (matches !== null) {
|
||||
const groups = matches.groups as any;
|
||||
const dur: Duration = dayjs.duration(groups.time, groups.unit);
|
||||
if (!dayjs.isDuration(dur)) {
|
||||
throw new SimpleError(`Parsed value '${val}' did not result in a valid Dayjs Duration`);
|
||||
}
|
||||
return dur;
|
||||
const DURATION_SUBSTRING_REGEX: RegExp = /(?<time>\d+)\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)/g;
|
||||
|
||||
export const parseDurationFromString = (val: string, strict = true): {duration: Duration, original: string}[] => {
|
||||
let matches = parseRegex(strict ? DURATION_REGEX : DURATION_SUBSTRING_REGEX, val);
|
||||
if (matches !== undefined) {
|
||||
return matches.map(x => {
|
||||
const groups = x.named as NamedGroup;
|
||||
const dur: Duration = dayjs.duration(groups.time, groups.unit);
|
||||
if (!dayjs.isDuration(dur)) {
|
||||
throw new SimpleError(`Parsed value '${x.match}' did not result in a valid Dayjs Duration`);
|
||||
}
|
||||
return {duration: dur, original: `${groups.time} ${groups.unit}`};
|
||||
});
|
||||
}
|
||||
matches = val.match(strict ? ISO8601_REGEX : ISO8601_SUBSTRING_REGEX);
|
||||
if (matches !== null) {
|
||||
const dur: Duration = dayjs.duration(val);
|
||||
if (!dayjs.isDuration(dur)) {
|
||||
throw new SimpleError(`Parsed value '${val}' did not result in a valid Dayjs Duration`);
|
||||
}
|
||||
return dur;
|
||||
|
||||
matches = parseRegex(strict ? ISO8601_REGEX : ISO8601_SUBSTRING_REGEX, val);
|
||||
if (matches !== undefined) {
|
||||
return matches.map(x => {
|
||||
const dur: Duration = dayjs.duration(x.groups[0]);
|
||||
if (!dayjs.isDuration(dur)) {
|
||||
throw new SimpleError(`Parsed value '${x.groups[0]}' did not result in a valid Dayjs Duration`);
|
||||
}
|
||||
return {duration: dur, original: x.groups[0]};
|
||||
});
|
||||
}
|
||||
|
||||
throw new InvalidRegexError([(strict ? DURATION_REGEX : DURATION_SUBSTRING_REGEX), (strict ? ISO8601_REGEX : ISO8601_SUBSTRING_REGEX)], val)
|
||||
}
|
||||
|
||||
export const parseDuration = (val: string, strict = true): Duration => {
|
||||
const res = parseDurationFromString(val, strict);
|
||||
if(res.length > 1) {
|
||||
throw new SimpleError(`Must only have one Duration value, found ${res.length} in: ${val}`);
|
||||
}
|
||||
return res[0].duration;
|
||||
}
|
||||
|
||||
const SUBREDDIT_NAME_REGEX: RegExp = /^\s*(?:\/r\/|r\/)*(\w+)*\s*$/;
|
||||
const SUBREDDIT_NAME_REGEX_URL = 'https://regexr.com/61a1d';
|
||||
export const parseSubredditName = (val:string): string => {
|
||||
@@ -784,7 +804,7 @@ const WIKI_REGEX_URL = 'https://regexr.com/61bq1';
|
||||
const URL_REGEX: RegExp = /^\s*url:(?<url>[^\s]+)\s*$/;
|
||||
const URL_REGEX_URL = 'https://regexr.com/61bqd';
|
||||
|
||||
export const parseWikiContext = (val: string) => {
|
||||
export const parseWikiContext = (val: string): WikiContext | undefined => {
|
||||
const matches = val.match(WIKI_REGEX);
|
||||
if (matches === null) {
|
||||
return undefined;
|
||||
@@ -804,6 +824,34 @@ export const parseExternalUrl = (val: string) => {
|
||||
return (matches.groups as any).url as string;
|
||||
}
|
||||
|
||||
export const parseUrlContext = (val: string): UrlContext | undefined => {
|
||||
const wiki = parseWikiContext(val);
|
||||
if(wiki !== undefined) {
|
||||
return {
|
||||
value: val,
|
||||
context: wiki
|
||||
}
|
||||
}
|
||||
const urlContext = parseExternalUrl(val);
|
||||
if(urlContext !== undefined) {
|
||||
return {
|
||||
value: val,
|
||||
context: {
|
||||
url: urlContext
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export const asWikiContext = (val: object): val is WikiContext => {
|
||||
return val !== null && typeof val === 'object' && 'wiki' in val;
|
||||
}
|
||||
|
||||
export const asExtUrlContext = (val: object): val is ExternalUrlContext => {
|
||||
return val !== null && typeof val === 'object' && 'url' in val;
|
||||
}
|
||||
|
||||
export const dummyLogger = {
|
||||
debug: (v: any) => null,
|
||||
error: (v: any) => null,
|
||||
@@ -811,62 +859,81 @@ export const dummyLogger = {
|
||||
info: (v: any) => null
|
||||
}
|
||||
|
||||
const GIST_REGEX = new RegExp(/.*gist\.github\.com\/.+\/(.+)/i)
|
||||
const GH_BLOB_REGEX = new RegExp(/.*github\.com\/(.+)\/(.+)\/blob\/(.+)/i);
|
||||
const REGEXR_REGEX = new RegExp(/^.*((regexr\.com)\/[\w\d]+).*$/i);
|
||||
const REGEXR_PAGE_REGEX = new RegExp(/(.|[\n\r])+"expression":"(.+)","text"/g);
|
||||
export const fetchExternalUrl = async (url: string, logger: (any) = dummyLogger): Promise<string> => {
|
||||
export const normalizeGistFileKey = (val: string) => val.replaceAll(/[^\w\d]/g, '').toLowerCase().trim();
|
||||
export const GIST_REGEX = new RegExp(/.*gist\.github\.com\/(?<user>.+)\/(?<gistId>[^#\/]+)(?:#file-(?<fileName>.+))?/i)
|
||||
export const GIST_RAW_REGEX = new RegExp(/.*gist\.github\.com\/(?<user>.+)\/(?<gistId>[^#\/]+)\/raw\/.+/i)
|
||||
export const GH_BLOB_REGEX = new RegExp(/.*github\.com\/(?<user>.+)\/(?<repo>.+)\/blob\/(?<path>.+)(?:#.+)?/i);
|
||||
export const REGEXR_REGEX = new RegExp(/^.*((regexr\.com)\/[\w\d]+).*$/i);
|
||||
export const REGEXR_PAGE_REGEX = new RegExp(/(.|[\n\r])+"expression":"(.+)","text"/g);
|
||||
export const fetchExternalResult = async (url: string, logger: (any) = dummyLogger): Promise<[string, Response]> => {
|
||||
let hadError = false;
|
||||
logger.debug(`Attempting to detect resolvable URL for ${url}`);
|
||||
let match = url.match(GIST_REGEX);
|
||||
if (match !== null) {
|
||||
const gistApiUrl = `https://api.github.com/gists/${match[1]}`;
|
||||
logger.debug(`Looks like a non-raw gist URL! Trying to resolve ${gistApiUrl}`);
|
||||
let match = parseRegexSingleOrFail(GIST_RAW_REGEX, url); // check for raw gist url first and if found treat as normal URL
|
||||
if(match === undefined) {
|
||||
// if not raw then if its still a gist then we need to parse and use API
|
||||
match = parseRegexSingleOrFail(GIST_REGEX, url);
|
||||
|
||||
try {
|
||||
const response = await fetch(gistApiUrl);
|
||||
if (!response.ok) {
|
||||
logger.error(`Response was not OK from Gist API (${response.statusText}) -- will return response from original URL instead`);
|
||||
if (response.size > 0) {
|
||||
logger.error(await response.text())
|
||||
}
|
||||
hadError = true;
|
||||
} else {
|
||||
const data = await response.json();
|
||||
// get first found file
|
||||
const fileKeys = Object.keys(data.files);
|
||||
if (fileKeys.length === 0) {
|
||||
logger.error(`No files found in gist!`);
|
||||
if (match !== undefined) {
|
||||
const gistApiUrl = `https://api.github.com/gists/${match.named.gistId}`;
|
||||
logger.debug(`Looks like a non-raw gist URL! Trying to resolve ${gistApiUrl} ${match.named.fileName !== undefined ? ` and find file ${match.named.fileName}` : ''}`);
|
||||
|
||||
try {
|
||||
const response = await fetch(gistApiUrl);
|
||||
if (!response.ok) {
|
||||
logger.warn(`Response was not OK from Gist API (${response.statusText}) -- will return response from original URL instead`);
|
||||
if (response.size > 0) {
|
||||
logger.warn(await response.text())
|
||||
}
|
||||
hadError = true;
|
||||
} else {
|
||||
if (fileKeys.length > 1) {
|
||||
logger.warn(`More than one file found in gist! Using first found: ${fileKeys[0]}`);
|
||||
const data = await response.json();
|
||||
// get first found file
|
||||
const fileKeys = Object.keys(data.files);
|
||||
if (fileKeys.length === 0) {
|
||||
logger.error(`No files found in gist!`);
|
||||
} else {
|
||||
logger.debug(`Using file ${fileKeys[0]}`);
|
||||
}
|
||||
const file = data.files[fileKeys[0]];
|
||||
if (file.truncated === false) {
|
||||
return file.content;
|
||||
}
|
||||
const rawUrl = file.raw_url;
|
||||
logger.debug(`File contents was truncated, retrieving full contents from ${rawUrl}`);
|
||||
try {
|
||||
const rawUrlResponse = await fetch(rawUrl);
|
||||
return await rawUrlResponse.text();
|
||||
} catch (err: any) {
|
||||
logger.error('Gist Raw URL Response returned an error, will return response from original URL instead');
|
||||
logger.error(err);
|
||||
let fileKey = fileKeys[0];
|
||||
if (fileKeys.length > 1) {
|
||||
if(match.named.fileName !== undefined) {
|
||||
//const normalizedFileName = normalizeGistFileKey(match.named.fileName.replace('/^file-/', ''));
|
||||
const normalizedFileName = normalizeGistFileKey(match.named.fileName);
|
||||
const matchingKey = fileKeys.find(x => normalizeGistFileKey(x) === normalizedFileName);
|
||||
if(matchingKey === undefined) {
|
||||
throw new SimpleError(`Found Gist ${match.named.gistId} but it did not contain a file named ${match.named.fileName}`);
|
||||
}
|
||||
fileKey = matchingKey;
|
||||
} else {
|
||||
logger.warn(`More than one file found in gist but URL did not specify a filename! Using first found: ${fileKey}`);
|
||||
}
|
||||
} else {
|
||||
logger.debug(`Using file ${fileKey}`);
|
||||
}
|
||||
const file = data.files[fileKey];
|
||||
if (file.truncated === false) {
|
||||
return [file.content, response];
|
||||
}
|
||||
const rawUrl = file.raw_url;
|
||||
logger.debug(`File contents was truncated, retrieving full contents from ${rawUrl}`);
|
||||
try {
|
||||
const rawUrlResponse = await fetch(rawUrl);
|
||||
return [await rawUrlResponse.text(), rawUrlResponse];
|
||||
} catch (err: any) {
|
||||
logger.error('Gist Raw URL Response returned an error, will return response from original URL instead');
|
||||
logger.error(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
logger.error('Response returned an error, will return response from original URL instead');
|
||||
logger.error(err);
|
||||
}
|
||||
} catch (err: any) {
|
||||
logger.error('Response returned an error, will return response from original URL instead');
|
||||
logger.error(err);
|
||||
}
|
||||
}
|
||||
match = url.match(GH_BLOB_REGEX);
|
||||
|
||||
if (match !== null) {
|
||||
const rawUrl = `https://raw.githubusercontent.com/${match[1]}/${match[2]}/${match[3]}`
|
||||
match = parseRegexSingleOrFail(GH_BLOB_REGEX, url)
|
||||
|
||||
if (match !== undefined) {
|
||||
const rawUrl = `https://raw.githubusercontent.com/${match.named.user}/${match.named.repo}/${match.named.path}`
|
||||
logger.debug(`Looks like a single file github URL! Resolving to ${rawUrl}`);
|
||||
try {
|
||||
const response = await fetch(rawUrl);
|
||||
@@ -877,7 +944,7 @@ export const fetchExternalUrl = async (url: string, logger: (any) = dummyLogger)
|
||||
}
|
||||
hadError = true;
|
||||
} else {
|
||||
return await response.text();
|
||||
return [await response.text(), response];
|
||||
}
|
||||
} catch (err: any) {
|
||||
logger.error('Response returned an error, will return response from original URL instead');
|
||||
@@ -885,8 +952,8 @@ export const fetchExternalUrl = async (url: string, logger: (any) = dummyLogger)
|
||||
}
|
||||
}
|
||||
|
||||
match = url.match(REGEXR_REGEX);
|
||||
if(match !== null) {
|
||||
match = parseRegexSingleOrFail(REGEXR_REGEX, url);
|
||||
if(match !== undefined) {
|
||||
logger.debug(`Looks like a Regexr URL! Trying to get expression from page HTML`);
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
@@ -921,7 +988,7 @@ export const fetchExternalUrl = async (url: string, logger: (any) = dummyLogger)
|
||||
}
|
||||
throw new Error(`Response was not OK: ${response.statusText}`);
|
||||
}
|
||||
return await response.text();
|
||||
return [await response.text(), response];
|
||||
}
|
||||
|
||||
export interface RetryOptions {
|
||||
@@ -1382,30 +1449,44 @@ export const parseStringToRegexOrLiteralSearch = (val: string, defaultFlags: str
|
||||
return literalSearchRegex;
|
||||
}
|
||||
|
||||
export const parseRegex = (reg: RegExp, val: string): RegExResult => {
|
||||
export const parseRegex = (reg: RegExp, val: string): RegExResult[] | undefined => {
|
||||
|
||||
if(reg.global) {
|
||||
const g = Array.from(val.matchAll(reg));
|
||||
const global = g.map(x => {
|
||||
if(g.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
return g.map(x => {
|
||||
return {
|
||||
match: x[0],
|
||||
index: x.index,
|
||||
groups: x.slice(1),
|
||||
named: x.groups,
|
||||
}
|
||||
named: x.groups || {},
|
||||
} as RegExResult;
|
||||
});
|
||||
return {
|
||||
matched: g.length > 0,
|
||||
matches: g.length > 0 ? g.map(x => x[0]) : [],
|
||||
global: g.length > 0 ? global : [],
|
||||
};
|
||||
}
|
||||
|
||||
const m = val.match(reg)
|
||||
return {
|
||||
matched: m !== null,
|
||||
matches: m !== null ? m.slice(0).filter(x => x !== undefined) : [],
|
||||
global: [],
|
||||
if(m === null) {
|
||||
return undefined;
|
||||
}
|
||||
return [{
|
||||
match: m[0],
|
||||
index: m.index as number,
|
||||
groups: m.slice(1),
|
||||
named: m.groups || {}
|
||||
}];
|
||||
}
|
||||
|
||||
export const parseRegexSingleOrFail = (reg: RegExp, val: string): RegExResult | undefined => {
|
||||
const results = parseRegex(reg, val);
|
||||
if(results !== undefined) {
|
||||
if(results.length > 1) {
|
||||
throw new SimpleError(`Expected Regex to match once but got ${results.length} results. Either Regex must NOT be global (using 'g' flag) or parsed value must only match regex once. Given: ${val} || Regex: ${reg.toString()}`);
|
||||
}
|
||||
return results[0];
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export const testMaybeStringRegex = (test: string, subject: string, defaultFlags: string = 'i'): [boolean, string] => {
|
||||
@@ -1695,10 +1776,26 @@ function *setMinus(A: Array<any>, B: Array<any>) {
|
||||
}
|
||||
|
||||
|
||||
export const difference = (a: Array<any>, b: Array<any>) => {
|
||||
/**
|
||||
* Returns elements that both arrays do not have in common
|
||||
*/
|
||||
export const symmetricalDifference = (a: Array<any>, b: Array<any>) => {
|
||||
return Array.from(setMinus(a, b));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a Set of elements from valA not in valB
|
||||
* */
|
||||
export function difference(valA: Set<any> | Array<any>, valB: Set<any> | Array<any>) {
|
||||
const setA = valA instanceof Set ? valA : new Set(valA);
|
||||
const setB = valB instanceof Set ? valB : new Set(valB);
|
||||
const _difference = new Set(setA);
|
||||
for (const elem of setB) {
|
||||
_difference.delete(elem);
|
||||
}
|
||||
return _difference;
|
||||
}
|
||||
|
||||
// can use 'in' operator to check if object has a property with name WITHOUT TRIGGERING a snoowrap proxy to fetch
|
||||
export const isSubreddit = (value: any) => {
|
||||
try {
|
||||
@@ -2823,3 +2920,21 @@ export const toModNoteLabel = (val: string): ModUserNoteLabel => {
|
||||
export const asModNoteLabel = (val: string): val is ModUserNoteLabel => {
|
||||
return modUserNoteLabels.includes(val);
|
||||
}
|
||||
|
||||
/**
|
||||
* Split an array into two based on a truthy function
|
||||
*
|
||||
* Returns arrays -> [[...passed],[...failed]]
|
||||
*
|
||||
* https://stackoverflow.com/a/42299191/1469797
|
||||
* */
|
||||
export function partition<T>(array: T[], callback: (element: T, index: number, array: T[]) => boolean) {
|
||||
return array.reduce(function (result: [T[], T[]], element, i) {
|
||||
callback(element, i, array)
|
||||
? result[0].push(element)
|
||||
: result[1].push(element);
|
||||
|
||||
return result;
|
||||
}, [[], []]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,18 +1,20 @@
|
||||
import {describe, it} from 'mocha';
|
||||
import {assert} from 'chai';
|
||||
import {
|
||||
COMMENT_URL_ID,
|
||||
parseDuration,
|
||||
COMMENT_URL_ID, GH_BLOB_REGEX, GIST_RAW_REGEX,
|
||||
GIST_REGEX,
|
||||
parseDurationFromString,
|
||||
parseLinkIdentifier,
|
||||
parseRedditEntity, removeUndefinedKeys, SUBMISSION_URL_ID
|
||||
parseRedditEntity, parseRegexSingleOrFail, REGEXR_REGEX, removeUndefinedKeys, SUBMISSION_URL_ID
|
||||
} from "../src/util";
|
||||
import dayjs from "dayjs";
|
||||
import dduration, {Duration, DurationUnitType} from 'dayjs/plugin/duration.js';
|
||||
import {
|
||||
parseDurationComparison,
|
||||
parseGenericValueComparison,
|
||||
parseGenericValueOrPercentComparison
|
||||
parseGenericValueOrPercentComparison, parseReportComparison
|
||||
} from "../src/Common/Infrastructure/Comparisons";
|
||||
import {RegExResult} from "../src/Common/interfaces";
|
||||
|
||||
dayjs.extend(dduration);
|
||||
|
||||
@@ -70,26 +72,91 @@ describe('Non-temporal Comparison Operations', function () {
|
||||
assert.exists(val.duration);
|
||||
assert.equal(dayjs.duration(2, 'months').milliseconds(), (val.duration as Duration).milliseconds());
|
||||
});
|
||||
it('should throw if more than one time component found', function () {
|
||||
assert.throws(() => parseDurationComparison('> 3 in 2 days and 4 months'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('Parsing Temporal Values', function () {
|
||||
|
||||
describe('Temporal Comparison Operations', function () {
|
||||
it('should throw if no operator sign', function () {
|
||||
assert.throws(() => parseDurationComparison('just 3'))
|
||||
describe('Parsing Text', function () {
|
||||
for (const unit of ['millisecond', 'milliseconds', 'second', 'seconds', 'minute', 'minutes', 'hour', 'hours', 'day', 'days', 'week', 'weeks', 'month', 'months', 'year', 'years']) {
|
||||
it(`should accept ${unit} unit for duration`, function () {
|
||||
const result = parseDurationFromString(`1 ${unit}`)[0];
|
||||
assert.equal(result.original, `1 ${unit}`);
|
||||
assert.equal(result.duration.asMilliseconds(), dayjs.duration(1, unit as DurationUnitType).asMilliseconds());
|
||||
});
|
||||
}
|
||||
it('should accept ISO8601 durations', function () {
|
||||
|
||||
const shortResult = parseDurationFromString('P23DT23H')[0];
|
||||
assert.equal(shortResult.original, 'P23DT23H');
|
||||
assert.equal(shortResult.duration.asSeconds(), dayjs.duration({
|
||||
days: 23,
|
||||
hours: 23
|
||||
}).asSeconds());
|
||||
|
||||
const longResult = parseDurationFromString('P3Y6M4DT12H30M5S')[0];
|
||||
|
||||
assert.equal(longResult.original, 'P3Y6M4DT12H30M5S');
|
||||
assert.equal(longResult.duration.asSeconds(), dayjs.duration({
|
||||
years: 3,
|
||||
months: 6,
|
||||
days: 4,
|
||||
hours: 12,
|
||||
minutes: 30,
|
||||
seconds: 5
|
||||
}).asSeconds());
|
||||
});
|
||||
|
||||
it('should parse durations from anywhere in a string', function() {
|
||||
const example1 = `> 2 user "misinfo" in 2 hours`;
|
||||
const ex1Result = parseDurationFromString(example1, false)[0];
|
||||
assert.equal(ex1Result.original, '2 hours');
|
||||
assert.equal(ex1Result.duration.asMilliseconds(), dayjs.duration(2, 'hours').asMilliseconds());
|
||||
|
||||
const example2 = `Test example 4 minutes ago`;
|
||||
const ex2Result = parseDurationFromString(example2, false)[0];
|
||||
assert.equal(ex2Result.original, '4 minutes');
|
||||
assert.equal(ex2Result.duration.asMilliseconds(), dayjs.duration(4, 'minutes').asMilliseconds());
|
||||
|
||||
const example3 = `Test 3 hours will be`;
|
||||
const ex3Result = parseDurationFromString(example3, false)[0];
|
||||
assert.equal(ex3Result.original, '3 hours');
|
||||
assert.equal(ex3Result.duration.asMilliseconds(), dayjs.duration(3, 'hours').asMilliseconds());
|
||||
|
||||
const example4 = `> 2 user "misinfo" in P23DT23H with extra`;
|
||||
const ex4Result = parseDurationFromString(example4, false)[0];
|
||||
assert.equal(ex4Result.original, 'P23DT23H');
|
||||
assert.equal(ex4Result.duration.asMilliseconds(), dayjs.duration({
|
||||
days: 23,
|
||||
hours: 23
|
||||
}).asMilliseconds());
|
||||
});
|
||||
})
|
||||
|
||||
describe('Temporal Comparison Operations', function () {
|
||||
|
||||
it('should throw if no units', function () {
|
||||
assert.throws(() => parseDurationComparison('> 3'))
|
||||
});
|
||||
|
||||
for (const unit of ['millisecond', 'milliseconds', 'second', 'seconds', 'minute', 'minutes', 'hour', 'hours', 'day', 'days', 'week', 'weeks', 'month', 'months', 'year', 'years']) {
|
||||
it(`should accept ${unit} unit`, function () {
|
||||
assert.doesNotThrow(() => parseDurationComparison(`> 3 ${unit}`))
|
||||
});
|
||||
}
|
||||
it('should only accept units compatible with dayjs', function () {
|
||||
assert.throws(() => parseDurationComparison('> 3 gigawatts'))
|
||||
});
|
||||
|
||||
it('should throw if value is a percentage', function () {
|
||||
assert.throws(() => parseDurationComparison('> 3% months'))
|
||||
});
|
||||
|
||||
it('should throw if value is negative', function () {
|
||||
assert.throws(() => parseDurationComparison('> -3 months'))
|
||||
});
|
||||
|
||||
it('should throw if more than one duration value found', function () {
|
||||
assert.throws(() => parseDurationComparison('> 3 months in 2 days'))
|
||||
});
|
||||
|
||||
it('should parse greater-than with a duration', function () {
|
||||
const res = parseDurationComparison('> 3 days');
|
||||
assert.equal(res.operator, '>')
|
||||
@@ -112,30 +179,56 @@ describe('Parsing Temporal Values', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('Parsing Text', function () {
|
||||
for (const unit of ['millisecond', 'milliseconds', 'second', 'seconds', 'minute', 'minutes', 'hour', 'hours', 'day', 'days', 'week', 'weeks', 'month', 'months', 'year', 'years']) {
|
||||
it(`should accept ${unit} unit for duration`, function () {
|
||||
assert.equal(parseDuration(`1 ${unit}`).asMilliseconds(), dayjs.duration(1, unit as DurationUnitType).asMilliseconds());
|
||||
});
|
||||
});
|
||||
|
||||
describe('Report Comparison Operations', function () {
|
||||
|
||||
it(`should accept report type as optional`, function () {
|
||||
const result = parseReportComparison(`> 0`)
|
||||
assert.isUndefined(result.reportType);
|
||||
});
|
||||
|
||||
it(`should accept 'user' report type`, function () {
|
||||
for(const type of ['user','users']) {
|
||||
const result = parseReportComparison(`> 0 ${type}`)
|
||||
assert.equal(result.reportType, 'user');
|
||||
}
|
||||
it('should accept ISO8601 durations', function () {
|
||||
});
|
||||
|
||||
assert.equal(parseDuration('P23DT23H').asSeconds(), dayjs.duration({
|
||||
days: 23,
|
||||
hours: 23
|
||||
}).asSeconds());
|
||||
it(`should accept 'mod' report type`, function () {
|
||||
for(const type of ['mod','mods']) {
|
||||
const result = parseReportComparison(`> 0 ${type}`)
|
||||
assert.equal(result.reportType, 'mod');
|
||||
}
|
||||
});
|
||||
|
||||
assert.equal(parseDuration('P3Y6M4DT12H30M5S').asSeconds(), dayjs.duration({
|
||||
years: 3,
|
||||
months: 6,
|
||||
days: 4,
|
||||
hours: 12,
|
||||
minutes: 30,
|
||||
seconds: 5
|
||||
}).asSeconds());
|
||||
});
|
||||
})
|
||||
it(`should accept report reason literals in single or double quotes`, function () {
|
||||
for(const reasonStr of [`'misinfo'`, `"misinfo"`]) {
|
||||
const result = parseReportComparison(`> 0 ${reasonStr}`)
|
||||
assert.equal(result.reasonMatch, reasonStr);
|
||||
assert.equal((result.reasonRegex as RegExp).toString(), new RegExp(/.*misinfo.*/, 'i').toString());
|
||||
}
|
||||
});
|
||||
|
||||
it(`should accept report reason as regex`, function () {
|
||||
const result = parseReportComparison(`> 0 /misinfo/`)
|
||||
assert.equal(result.reasonMatch, '/misinfo/');
|
||||
assert.equal((result.reasonRegex as RegExp).toString(), new RegExp(/misinfo/, 'i').toString());
|
||||
});
|
||||
|
||||
it(`should accept a time constraint`, function () {
|
||||
const result = parseReportComparison(`> 1 in 2 hours`)
|
||||
assert.equal(result.durationText, '2 hours');
|
||||
assert.equal((result.duration as Duration).asMilliseconds(), dayjs.duration(2, 'hours').asMilliseconds())
|
||||
});
|
||||
|
||||
it(`should accept all components`, function () {
|
||||
const result = parseReportComparison(`> 1 user 'misinfo' in 2 hours`)
|
||||
assert.equal(result.reasonMatch, `'misinfo'`);
|
||||
assert.equal((result.reasonRegex as RegExp).toString(), new RegExp(/.*misinfo.*/, 'i').toString());
|
||||
assert.equal(result.durationText, '2 hours');
|
||||
assert.equal((result.duration as Duration).asMilliseconds(), dayjs.duration(2, 'hours').asMilliseconds())
|
||||
});
|
||||
});
|
||||
|
||||
describe('Parsing Reddit Entity strings', function () {
|
||||
@@ -210,5 +303,57 @@ describe('Link Recognition', function () {
|
||||
|
||||
// it('should recognize submission id from reddit shortlink')
|
||||
// https://redd.it/92dd8
|
||||
});
|
||||
|
||||
describe('External URL Parsing', function() {
|
||||
|
||||
it('should recognize and parse raw gist URLs', function() {
|
||||
const res = parseRegexSingleOrFail(GIST_RAW_REGEX, 'https://gist.github.com/FoxxMD/2b035429fbf326a00d9a6ca2a38011d9/raw/97076d52114eb17a8754384d95087e8a0a74cf88/file-with-symbols.test.yaml');
|
||||
assert.exists(res);
|
||||
const rese = res as RegExResult;
|
||||
assert.equal(rese.named.user, 'FoxxMD');
|
||||
assert.equal(rese.named.gistId, '2b035429fbf326a00d9a6ca2a38011d9');
|
||||
});
|
||||
|
||||
it('should not parse non-raw gist URLs with raw regex', function() {
|
||||
for(const url of [
|
||||
'https://gist.github.com/FoxxMD/2b035429fbf326a00d9a6ca2a38011d9',
|
||||
'https://gist.github.com/FoxxMD/2b035429fbf326a00d9a6ca2a38011d9#file-file-with-symbols-test-yaml'
|
||||
]) {
|
||||
const res = parseRegexSingleOrFail(GIST_RAW_REGEX, url);
|
||||
assert.notExists(res, `Should not have parsed ${url} as RAW gist`);
|
||||
}
|
||||
});
|
||||
|
||||
it('should recognize and parse gist URLs', function() {
|
||||
const res = parseRegexSingleOrFail(GIST_REGEX, 'https://gist.github.com/FoxxMD/2b035429fbf326a00d9a6ca2a38011d9');
|
||||
assert.exists(res);
|
||||
const rese = res as RegExResult;
|
||||
assert.equal(rese.named.user, 'FoxxMD');
|
||||
assert.equal(rese.named.gistId, '2b035429fbf326a00d9a6ca2a38011d9');
|
||||
});
|
||||
|
||||
it('should recognize and parse gist URLs with filename hashes', function() {
|
||||
const res = parseRegexSingleOrFail(GIST_REGEX, 'https://gist.github.com/FoxxMD/2b035429fbf326a00d9a6ca2a38011d9#file-file-with-symbols-test-yaml');
|
||||
assert.exists(res);
|
||||
const rese = res as RegExResult;
|
||||
assert.equal(rese.named.user, 'FoxxMD');
|
||||
assert.equal(rese.named.gistId, '2b035429fbf326a00d9a6ca2a38011d9');
|
||||
assert.equal(rese.named.fileName, 'file-with-symbols-test-yaml');
|
||||
});
|
||||
|
||||
it('should recognize and parse github blob URLs', function() {
|
||||
const res = parseRegexSingleOrFail(GH_BLOB_REGEX, 'https://github.com/FoxxMD/context-mod/blob/master/src/util.ts');
|
||||
assert.exists(res);
|
||||
const rese = res as RegExResult;
|
||||
assert.equal(rese.named.user, 'FoxxMD');
|
||||
assert.equal(rese.named.repo, 'context-mod');
|
||||
assert.equal(rese.named.path, 'master/src/util.ts');
|
||||
});
|
||||
|
||||
it('should recognize regexr URLs', function() {
|
||||
const res = parseRegexSingleOrFail(REGEXR_REGEX, 'https://regexr.com/6pomb');
|
||||
assert.exists(res);
|
||||
});
|
||||
})
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user