Compare commits

..

98 Commits

Author SHA1 Message Date
FoxxMD
ea875e392f feat(image): Implement template matching #99
* Initial template matching logic using openCV
* Test using pokemon cheat logo
* Test using an "exact" selection from a stock image https://unsplash.com/photos/J2RQT7kJSMM
2022-08-24 17:25:55 -04:00
FoxxMD
b89d98e42b feat(image): Integrate openCV dependency
* Add opencv bindings/build packages as optional dependencies
* Add openCV build step to docker image
* document openCV install process for local installations
2022-08-24 12:55:19 -04:00
FoxxMD
c02a2ad622 chore: Bump version constant due to BC 2022-08-23 09:49:12 -04:00
FoxxMD
e9f08915a4 chore: Bump version constant 2022-08-23 09:47:35 -04:00
FoxxMD
4b95ccd0ba feat(docs): Simplify getting started operator instructions
Take advantage of First Time Setup page when no configuration is present to help generate a minimum configuration.
2022-08-23 09:46:54 -04:00
FoxxMD
567a2f0720 feat(action): Implement creating submissions #93
* Create self/link submissions, no reddit media (upload) yet
* Submission can use user modifiers (nsfw, spoiler) and mod modifiers (sticky, distinguish, lock)
* Can create submission in current subreddit (default) or arbitrary subreddit defined in configuration (targets)
  * Can create multiple submissions by defining multiple targets
2022-08-22 14:00:27 -04:00
FoxxMD
61ff402256 feat(config): Allow wikiConfig override at subreddit-level 2022-08-22 10:36:18 -04:00
FoxxMD
9158bda992 feat(comment): Implement arbitrary comment targets #93 2022-08-19 11:01:54 -04:00
FoxxMD
873f9d3c91 refactor(image): Normalize image hash usage a bit more to reduce complexity 2022-08-18 13:35:37 -04:00
FoxxMD
783ef5db53 fix(image): url variable name usage 2022-08-16 13:36:36 -04:00
FoxxMD
f0cb5c1315 fix(ui): Fix constant reassignment 2022-08-16 13:36:20 -04:00
FoxxMD
8e1b916ea4 fix(database): Remove table data check due to incompatibility with postgres
Not sure what the issue is but a generic message should be enough for now
2022-08-16 10:28:10 -04:00
FoxxMD
4acf87eacd feat(database): Implement automated backup for better-sqlite3 connection 2022-08-16 10:05:20 -04:00
FoxxMD
5dd5a32c51 fix(database): Fix how new columns are added to existing table on invite migration 2022-08-16 09:47:53 -04:00
FoxxMD
207907881f feat: Add initial setup wizard
* If web client does not have credentials (or operator) then redirect login to form to fill them in
  * Write to config location on form completion
2022-08-15 17:17:23 -04:00
FoxxMD
44da276d41 fix: Fix missing column type specifying for db session entity 2022-08-15 16:05:00 -04:00
FoxxMD
6c98b6f995 fix: cleanup guests and no-config scenario
* Check for no/null guest data during invite creation/usage for both client and server side
* Add instances tab on invite page
* Replace instance select on invite page with query string usage
* Redirect from status page to auth helper when instance has no bots
2022-08-15 15:59:51 -04:00
FoxxMD
cc0c3dfe61 docs: Fix incorrect url for minimum config 2022-08-15 15:57:45 -04:00
FoxxMD
b48d75fda3 fix(config): Fix setting api friendly name
addIn creates missing collections automatically
2022-08-15 15:57:26 -04:00
FoxxMD
2adf2d258d fix(database): Handle invite migration when db is sqljs
* Create table if it does not exist (sqljs has two database)
* Drop Invite table from sqljs db if it has no rows
2022-08-15 15:56:43 -04:00
FoxxMD
c55a1c6502 feat(ui): Add initial guests to bot invite and rename Guest Mod to Guest
* Rename to Guest since this is more accurate of a description -- "mod" is confusing since user doesn't have any actual mod power
* Add guests as an option to invite creation and display on invite page
2022-08-11 15:23:09 -04:00
FoxxMD
0011ff8853 chore: Re-add clean username for guest due to merge 2022-08-11 14:53:48 -04:00
FoxxMD
4bbf871051 Merge branch 'edge' into tempAccess
# Conflicts:
#	src/Web/Server/routes/authenticated/user/index.ts
#	src/Web/Server/server.ts
2022-08-11 14:50:45 -04:00
FoxxMD
54755dc480 refactor(guest)!: Migrate invites to be owned by the server
It makes more sense for the CM instance that will actually have a bot added to it to own the invite for that bot.

* (BC) Move Invite into server entity mappings and rename to BotInvite
  * Add guests and initialConfig (future use)
  * BC -- Existing invites need to have instance defined to be used
  * BC -- Cache-based invite storage has been REMOVED
  * BC -- Removed inviteMaxAge config property (for now)
* Add SubredditInvite entity for future use
* Force/implement server (api) having a defined friendly name. This is used to determine which invites in a DB belong to which server instance
  * If not defined in config it is generated at random and then written to config
* Refactor how invites are retrieved and parsed client-side -- CRUD using server api
  * BC -- Invite URL structure has changed from ?invite=id to /invite/id...
* Added better UI for migration redirect
  * Show all reachable instances in redirect page header
  * Error page also shows reachable instances in page header
2022-08-11 14:47:45 -04:00
FoxxMD
01f95a37e7 feat(ui): Add guided tour for main dashboard 2022-08-03 14:37:56 -04:00
FoxxMD
5ddad418b0 fix(manager): Fix missing valid config check before starting queue 2022-08-02 16:05:48 -04:00
FoxxMD
b5b2e88c1f fix(guest mod): Parse/clean name from user input before adding
So that we don't care if user sends FoxxMD or u/FoxxMD
2022-08-02 15:46:56 -04:00
FoxxMD
194ded7be6 fix(auth): Fix subreddit normalization on client instance 2022-08-02 15:31:00 -04:00
FoxxMD
7ba375d702 fix(auth): Fix subreddit normalization 2022-08-02 14:15:01 -04:00
FoxxMD
9a4c38151f fix(auth): Fix default instance middleware accessibility check 2022-08-02 14:06:49 -04:00
FoxxMD
f8df6fc93f docs: Remove removal reason footnote 2022-08-02 13:02:28 -04:00
FoxxMD
86a3b229cb feat(remove): Implement removal reason id and note
* Cache subreddit removal reasons and display in popup helper in authenticated config editor
* Add fields for removal reason note/reason in remove action
* Update remove action documentation to include new fields
2022-08-02 12:34:40 -04:00
FoxxMD
ca3e8d7d80 feat: Add removal reason related endpoints to snoowrap client
Using undocumented endpoints pulled from praw documentation/code. Snoowrap can now:

* add removal reason/mod note on a removed activity
* get subreddit removal reasons (used for getting ids for use with removal reason endpoint)
2022-08-02 11:09:05 -04:00
FoxxMD
5af4384871 Merge branch 'tempAccess' into edge 2022-08-01 12:08:10 -04:00
FoxxMD
47957e6ab9 fix: Remove debug statements for image processing 2022-07-29 15:05:08 -04:00
FoxxMD
7f1a404b4e feat(image): Implement image flip detection in recent rule 2022-07-29 14:42:49 -04:00
FoxxMD
1f64a56260 feat(image): Improve image normalization to make duplicate detection more powerful
* Trim image to remove arbitrary borders
* Convert image to greyscale in order to reduce effect of saturation differences
* Implement "mirrored" (y-axis flip) hash calculations
* Implement local file fetch and image processing test suite
2022-07-29 14:19:32 -04:00
FoxxMD
366cb2b629 More guest mod cleanup and fixes
* Default value on guest mod CRUD
* Validate expire time client-side
2022-07-28 14:28:43 -04:00
FoxxMD
b9b442ad1e feat(ui): Implement saving config as guest mod 2022-07-28 14:14:55 -04:00
FoxxMD
81a1bdb446 refactor: Simplify ACL for server/client users and introduce guest context
* Use BotInstance class on client side for easier state tracking and simplifying ACL functions (moved from user to instance)
* Implement and use same interface for client/server Bot representations
* Implement mod/guest context for status and live stats data
* Restrict guest mod CRUD to mods only
2022-07-28 13:39:46 -04:00
FoxxMD
1e4b369b1e Fix CRUD for guest mods 2022-07-28 09:45:30 -04:00
FoxxMD
7a34a7b531 Almost working correctly guest mod add
Need to fix something wrong with time parsing server-side
2022-07-27 17:06:55 -04:00
FoxxMD
b83bb6f998 Refactor guest relationship
* Use eager relation
* Fix orphan row usage
2022-07-27 16:18:34 -04:00
FoxxMD
3348af2780 Implement guest mod removal via UI and refactor guest rendering into live stats 2022-07-27 15:32:00 -04:00
FoxxMD
04896a7363 Implement guest mod entities and read-only ui 2022-07-27 13:07:56 -04:00
FoxxMD
d8003e049c chore(schema): Update schema to reflect ban property max length changes 2022-07-25 16:36:24 -04:00
FoxxMD
b67a933084 feat(ui): Real-time data usage improvements based on visibility
Stop or restart real-time data (logs, stats) based on page visibility API so that client does continue to consume data when page is in the background/in a non-visible tab
2022-07-25 16:35:32 -04:00
FoxxMD
d684ecc0ff docs: Add memory management notes
* Document memory management approaches
* Change node args for docker to a better name (NODE_ARGS)
* Implement default node arg for docker `--max_old_space_size=512`
2022-07-25 12:20:02 -04:00
FoxxMD
9efd4751d8 feat(templating): Render content for more actions and properties
* Template render Message action 'title'
* Template render Ban action 'reason' and 'note'
2022-07-25 11:56:22 -04:00
FoxxMD
9331c2a3c8 feat(templating): Include activity id and title for all activities
* Include reddit thing id as 'id'
* Include 'title' -- for submission this is submission title. For comment this is the first 50 characters of the comment truncated with '...'
* Include 'shortTitle' -- same as above but truncated to 15 characters
2022-07-25 11:41:43 -04:00
FoxxMD
d6f7ce2441 feat(server): Better handling for subreddit invite CRUD
* Return 400 with descriptive error when invalid value or duplicate invite requested
* Better value comparison for subreddit invite deletion
2022-07-25 11:24:19 -04:00
FoxxMD
ffd7033faf feat(server): Add server child logger to requests for easier logging 2022-07-25 11:22:44 -04:00
FoxxMD
df5825d8df fix(ui): Fix setting style for non-existing element on subreddit invite 2022-07-25 11:13:32 -04:00
FoxxMD
42c6ca7af5 feat(ui): More live stat delta improvements and better stream handling on the browser
* More granular delta for nested objects
* Custom delta structure for delayedItems
* Fix abort controller overwrite
* Enforce maximum of two unfocused log streams before cancelling immediately on visible change to reduce concurrent number of requests from browser
2022-07-22 13:50:53 -04:00
FoxxMD
1e94835f97 feat(ui): Reduce data usage for live stats using response deltas 2022-07-22 09:46:01 -04:00
FoxxMD
6230ef707d feat(ui): Improve delayed activity cancel handling
* Fix missing bot param in DELETE call
* Fix missing database removal of canceled activity
* Implement ability to cancel all accessible
2022-07-21 10:11:19 -04:00
FoxxMD
b290a4696d fix(ui): Use correct auth middleware for api proxy endpoint 2022-07-20 13:04:46 -04:00
FoxxMD
4c965f7215 feat(docker): Expand NODE_FLAGS (ENV) variable in node run command to allow arbitrary flags be passed through docker ENVs 2022-07-20 11:18:13 -04:00
FoxxMD
ce990094a1 feat(config): Gate memory monitoring behind operator config option monitorMemory 2022-07-20 10:20:52 -04:00
FoxxMD
4196d2acb0 feat(influx): Add server memory metrics 2022-07-19 16:28:58 -04:00
FoxxMD
3150da8b4a feat(influx): Add manager health metrics 2022-07-19 14:26:14 -04:00
FoxxMD
655c82d5e1 fix(filter): Make source filter stricter by requiring exact value rather than "contains"
Fixes issue where source that is a shorter version of a longer identifier does not accidentally get run first
2022-07-18 15:55:41 -04:00
FoxxMD
73302b718e chore: Bump version for 0.11.4 release 2022-07-15 09:27:04 -04:00
FoxxMD
bbf91ceac0 feat(filter): Implement filtering by upvoteRatio for authorIs 2022-07-14 13:56:12 -04:00
FoxxMD
67b793c2aa feat: Support parsing filename from non-raw GIST url in editor
* Support filename for non-raw gist
* Fix raw gist URL usage by fetching from CDN
2022-07-14 12:48:04 -04:00
FoxxMD
d635e5a65d feat: Support parsing filename from non-raw GIST url 2022-07-14 11:12:04 -04:00
FoxxMD
8dc140a953 feat(report): Implement filtering reports in itemIs by time
* Refactor time parsing and comparison utils to consolidate logic and be more flexible
* Tests for the above
* Fix indexes for ActivityReport (should be unique)
* Fix missing eager load for Activity-Report relationship
* Implement time filtering for reports in itemIs
2022-07-13 13:00:49 -04:00
FoxxMD
eaa9f627e2 refactor(report): Cleanup monitoring in manager 2022-07-13 09:36:33 -04:00
FoxxMD
16cb28cb72 feat(report): Implement report tracking #78
* Store reports with likely creation time to DB
* Use some trickery and caching in Manager to get likely report creation time
2022-07-12 17:29:28 -04:00
FoxxMD
fa450f9f8f docs(database): Add influx/grafana documentation 2022-07-11 13:46:27 -04:00
FoxxMD
1e76ca6c0e docs(config): Add partial configuration documentation 2022-07-11 13:01:40 -04:00
FoxxMD
25b7ea497f feat(filter): Implement specifying report reason in report comparison 2022-07-11 12:19:28 -04:00
FoxxMD
2f93774346 fix: Fix unnecessary subreddit fetch call on init
Snoowrap doesn't add fetch property to objects from listings so even though subreddit may already be fetched it always re-fetches, using an api call. Do a dirty fetch check here to prevent wasting calls when we are sure the subreddit is fetched.
2022-07-11 12:18:30 -04:00
FoxxMD
e2f6a92a90 fix(filter): Fix logic for assigning number of reports to compare 2022-07-11 10:01:58 -04:00
FoxxMD
0ffddaac9e Revert "chore: Bump got and sharp versions for dependabot fixes"
This reverts commit fbf328d90f.
2022-07-11 08:56:44 -04:00
FoxxMD
4ddf6ddf26 Revert "chore: Bump passport version for dependabot fixes"
This reverts commit 7f61c190ea.
2022-07-11 08:56:41 -04:00
FoxxMD
7f61c190ea chore: Bump passport version for dependabot fixes 2022-07-08 12:34:54 -04:00
FoxxMD
fbf328d90f chore: Bump got and sharp versions for dependabot fixes 2022-07-08 12:33:03 -04:00
FoxxMD
36c6f7f1b8 feat(ban): Implement templating for ban note/reason 2022-07-08 10:51:47 -04:00
FoxxMD
0379ad17b9 feat: Add more Activity data to templating data #94
* Add votes, reports, upvote ratio, nsfw, op, and spoiler
* Update templating documentation
2022-07-08 10:24:10 -04:00
FoxxMD
02ed9f91f9 feat: Improve api health monitoring
* Potentially fix #30
* Break out api sampling and output into own function
* Add influx metrics for used calls and sample faster
* Fix default tags inheritance
2022-07-07 13:05:14 -04:00
FoxxMD
9fcc3db7b2 fix(modnote): Use correct activity identifier for reddit_id 2022-07-07 09:43:25 -04:00
FoxxMD
65ea84a69d refactor(database): Move Event creation to earlier in activity handling function for a more accurate processing time 2022-07-06 17:41:14 -04:00
FoxxMD
a4b8d3a8ef feat(database): Add api health measurement to influxdb 2022-07-06 17:11:38 -04:00
FoxxMD
2692a5fecb fix(database): Fix processing/queued time value 2022-07-06 16:48:39 -04:00
FoxxMD
192c1659a0 feat(database): Initial influx db implementation #84 2022-07-06 15:14:58 -04:00
FoxxMD
cc241e41f4 refactor(logging): Improve log retention performance 2022-07-05 12:30:51 -04:00
FoxxMD
4bfb57a6cf chore: Update connect-typeorm dependency
Moved to 2.0.0 b/c it now supports typeorm >= 0.3.0
2022-07-05 09:43:25 -04:00
FoxxMD
b6dedae7a1 fix(usernote): Fix regression with note types
Can use a custom note type so don't restrict to only the documented ones
2022-07-01 12:06:48 -04:00
FoxxMD
f2783bd7a4 feat(config): More error handling and internal improvements to config fragment parsing
* Refactor config fragment parsing function to always return an array for simpler usage in hydrate function
* Add error handling to all fragment calls so user can get a clear description of which fragment failed, contextually
2022-07-01 11:46:54 -04:00
FoxxMD
2efe41eadd fix(cache): Use correct cache key and improve external url cache re-use
* Fix incorrect variable used for cache key that prevented cache from working at all (oops)
* Don't add subreddit context to ext url cache key so it can be re-used from any subreddit
2022-07-01 11:45:11 -04:00
FoxxMD
ebd60b9abe feat(config): Support config fragments as array of fragments #76 2022-07-01 10:20:53 -04:00
FoxxMD
2a16df49a4 feat(config): Implement cache control for config fragments #76 2022-06-30 14:44:04 -04:00
FoxxMD
0d6841259b feat(config): Initial implementation for partial configs #76 2022-06-30 13:04:02 -04:00
FoxxMD
8c0755c8c2 working when no includes in config 2022-06-30 12:39:57 -04:00
FoxxMD
5e1da5bc5d feat(bot): Implement better in-situ subreddit add/removal
* Refactor manager build/init from bot to be independent of bot init process
  * Remove/destroy managers for subreddits no longer moderated by bot account or not in operator list
  * Add/create managers for subreddits that should be moderated
* Run manager build function during heartbeat to sync managers
2022-06-28 13:24:56 -04:00
FoxxMD
242c6a49b5 docs(usernote): Document usernote types in schema and in documentation 2022-06-27 09:33:57 -04:00
FoxxMD
aa399c160e docs(sentiment): Add sentiment documentation 2022-06-21 16:13:38 -04:00
142 changed files with 26968 additions and 1815 deletions

View File

@@ -8,6 +8,7 @@ coverage
.idea
*.bak
*.sqlite
*.sqlite*
*.json
*.json5
*.yaml

View File

@@ -81,8 +81,9 @@ RUN apk add --no-cache \
#
# vips required to run sharp library for image comparison
# opencv required for other image processing
RUN echo "http://dl-4.alpinelinux.org/alpine/v3.14/community" >> /etc/apk/repositories \
&& apk --no-cache add vips
&& apk --no-cache add vips opencv opencv-dev
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
@@ -115,11 +116,24 @@ RUN npm install --production \
&& rm -rf node_modules/ts-node \
&& rm -rf node_modules/typescript
# build bindings for opencv
RUN apk add --no-cache --virtual .build-deps \
make \
g++ \
gcc \
libgcc \
&& npm run cv-install-docker-prebuild \
&& apk del .build-deps
ENV NPM_CONFIG_LOGLEVEL debug
# can set database to use more performant better-sqlite3 since we control everything
ENV DB_DRIVER=better-sqlite3
# NODE_ARGS are expanded after `node` command in the entrypoint IE "node {NODE_ARGS} src/index.js run"
# by default enforce better memory mangement by limiting max long-lived GC space to 512MB
ENV NODE_ARGS="--max_old_space_size=512"
ARG webPort=8085
ENV PORT=$webPort
EXPOSE $PORT

View File

@@ -30,6 +30,7 @@ Feature Highlights for **Moderators:**
* Event notification via Discord
* [**Web interface**](#web-ui-and-screenshots) for monitoring, administration, and oauth bot authentication
* [**Placeholders**](/docs/subreddit/actionTemplating.md) (like automoderator) can be configured via a wiki page or raw text and supports [mustache](https://mustache.github.io) templating
* [**Partial Configurations**](/docs/subreddit/components/README.md#partial-configurations) -- offload parts of your configuration to shared locations to consolidate logic between multiple subreddits
Feature highlights for **Developers and Hosting (Operators):**
@@ -43,6 +44,7 @@ Feature highlights for **Developers and Hosting (Operators):**
* Historical statistics
* [Docker container support](/docs/operator/installation.md#docker-recommended)
* Easy, UI-based [OAuth authentication](/docs/operator/addingBot.md) for adding Bots and moderator dashboard
* Integration with [InfluxDB](https://www.influxdata.com) for detailed [time-series metrics](/docs/operator/database.md#influx) and a pre-built [Grafana](https://grafana.com) [dashboard](/docs/operator/database.md#grafana)
# Table of Contents
@@ -145,6 +147,13 @@ A built-in editor using [monaco-editor](https://microsoft.github.io/monaco-edito
![Configuration View](docs/images/editor.jpg)
### [Grafana Dashboard](/docs/operator/database.md#grafana)
* Overall stats (active bots/subreddits, api calls, per second/hour/minute activity ingest)
* Over time graphs for events, per subreddit, and for individual rules/check/actions
![Grafana Dashboard](/docs/images/grafana.jpg)
## License
[MIT](/LICENSE)

View File

@@ -2,6 +2,8 @@
# used https://github.com/linuxserver/docker-plex as a template
# NODE_ARGS can be passed by ENV in docker command like "docker run foxxmd/context-mod -e NODE_ARGS=--optimize_for_size"
exec \
s6-setuidgid abc \
/usr/local/bin/node /app/src/index.js run
/usr/local/bin/node $NODE_ARGS /app/src/index.js run

BIN
docs/images/grafana.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 183 KiB

View File

@@ -21,7 +21,7 @@ They are responsible for configuring the software at a high-level and managing a
# Overview
CM is composed of two applications that operate indepedently but are packaged together such that they act as one piece of software:
CM is composed of two applications that operate independently but are packaged together such that they act as one piece of software:
* **Server** -- Responsible for **running the bot(s)** and providing an API to retrieve information on and interact with them EX start/stop bot, reload config, retrieve operational status, etc.
* **Client** -- Responsible for serving the **web interface** and handling the bot oauth authentication flow between operators and subreddits/bots.

View File

@@ -136,6 +136,8 @@ You will need have this information available:
See the [**example minimum configuration** below.](#minimum-config)
This configuration can also be **generated** by CM if you start CM with **no configuration defined** and visit the web interface.
# Bots
Configured using the `bots` top-level property. Bot configuration can override and specify many more options than are available at the operator-level. Many of these can also set the defaults for each subreddit the bot runs:

View File

@@ -130,3 +130,59 @@ retention: 2000
runs:
...
```
# Influx
ContextMod supports writing detailed time-series data to [InfluxDB](https://www.influxdata.com/).
This data can be used to monitor the overall health, performance, and metrics for a ContextMod server. Currently, this data can **only be used by an Operator** as it requires access to the operator configuration and CM instance.
CM supports InfluxDB OSS > 2.3 or InfluxDB Cloud.
**Note:** This is an **advanced feature** and assumes you have enough technical knowledge to follow the documentation provided by each application to deploy and configure them. No support is guaranteed for installation, configuration, or use of Influx and Grafana.
## Supported Metrics
TBA
## Setup
### InfluxDB OSS
* Install [InfluxDB](https://docs.influxdata.com/influxdb/v2.3/install/)
* [Configure InfluxDB using the UI](https://docs.influxdata.com/influxdb/v2.3/install/#set-up-influxdb-through-the-ui)
* You will need **Username**, **Password**, **Organization Name**, and **Bucket Name** later for Grafana setup so make sure to record them somewhere
* [Create a Token](https://docs.influxdata.com/influxdb/v2.3/security/tokens/create-token/) with enough permissions to write/read to the bucket you configured
* After the token is created **view/copy the token** to clipboard by clicking the token name. You will need this for Grafana setup.
### ContextMod
Add the following block to the top-level of your operator configuration:
```yaml
influxConfig:
credentials:
url: 'http://localhost:8086' # URL to your influx DB instance
token: '9RtZ5YZ6bfEXAMPLENJsTSKg==' # token created in the previous step
org: MyOrg # organization created in the previous step
bucket: contextmod # name of the bucket created in the previous step
```
## Grafana
A pre-built dashboard for [Grafana](https://grafana.com) can be imported to display overall metrics/stats using InfluxDB data.
![Grafana Dashboard](/docs/images/grafana.jpg)
* Create a new Data Source using **InfluxDB** type
* Choose **Flux** for the **Query Language**
* Fill in the details for **URL**, **Basic Auth Details** and **InfluxDB Details** using the data you created in the [Influx Setup step](#influxdb-oss)
* Set **Min time interval** to `60s`
* Click **Save and test**
* Import Dashboard
* **Browse** the Dashboard pane
* Click **Import** and **upload** the [grafana dashboard json file](/docs/operator/grafana.json)
* Chose the data source you created from the **InfluxDB CM** dropdown
* Click **Import**
The dashboard can be filtered by **Bots** and **Subreddits** dropdowns at the top of the page to get more specific details.

View File

@@ -4,9 +4,7 @@ This getting started guide is for **Operators** -- that is, someone who wants to
* [Installation](#installation)
* [Create a Reddit Client](#create-a-reddit-client)
* [Create a Minimum Configuration](#create-a-minimum-configuration)
* [Local Installation](#local-installation)
* [Docker Installation](#docker-installation)
* [Start ContextMod](#start-contextmod)
* [Add a Bot to CM](#add-a-bot-to-cm)
* [Access The Dashboard](#access-the-dashboard)
* [What's Next?](#whats-next)
@@ -19,29 +17,25 @@ Follow the [installation](/docs/operator/installation.md) documentation. It is r
[Create a reddit client](/docs/operator/README.md#provisioning-a-reddit-client)
# Create a Minimum Configuration
# Start ContextMod
Using the information you received in the previous step [create a minimum file configuration](/docs/operator/configuration.md#minimum-configuration) save it as `config.yaml` somewhere.
Start CM using the example command from your [installation](#installation) and visit http://localhost:8085
# Start ContextMod With Configuration
The First Time Setup page will ask you to input:
## Local Installation
* Client ID (from [Create a Reddit Client](#create-a-reddit-client))
* Client Secret (from [Create a Reddit Client](#create-a-reddit-client))
* Operator -- this is the username of your main Reddit account.
If you [installed CM locally](/docs/installation.md#locally) move your configuration file `config.yaml` to the root of the project directory (where `package.json`) is located.
From the root directory run this command to start CM
```
node src/index.js run
```
## Docker Installation
If you [installed CM using Docker](/docs/installation.md#docker-recommended) make note of the directory you saved your minimum configuration to and substitute its full path for `host/path/folder` in the docker command show in the [docker install directions](/docs/operator/installation.md#docker-recommended)
**Write Config** and then restart CM. You have now created the [minimum configuration](/docs/operator/configuration.md#minimum-configuration) required to run CM.
# Add A Bot to CM
Once CM is up and running use the [CM OAuth Helper](/docs/operator/addingBot.md#cm-oauth-helper-recommended) to add authorize and add a Bot to your CM instance.
You should automatically be directed to the [Bot Invite Helper](/docs/operator/addingBot.md#cm-oauth-helper-recommended) used to authorize and add a Bot to your CM instance.
Follow the directions here and **create an Authorization Invite** at the bottom of the page.
Next, login to Reddit with the account you will be using as the Bot and then visit the **Authorization Invite** link you created. Follow the steps there to finish adding the Bot to your CM instance.
# Access The Dashboard
@@ -57,4 +51,4 @@ As an operator you should familiarize yourself with how the [operator configurat
If you are also the moderator of the subreddit the bot will be running you should check out the [moderator getting started guide.](/docs/subreddit/gettingStarted.md#setup-wiki-page)
You might also be interested in these [quick tips for using the web interface](/docs/webInterface.md)
You might also be interested in these [quick tips for using the web interface](/docs/webInterface.md). Additionally, on the dashboard click the **Help** button at the top of the page to get a guided tour of the dashboard.

3148
docs/operator/grafana.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -10,9 +10,10 @@ PROTIP: Using a container management tool like [Portainer.io CE](https://www.por
### [Dockerhub](https://hub.docker.com/r/foxxmd/context-mod)
An example of starting the container using the [minimum configuration](/docs/operator/operatorConfiguration.md#minimum-config) with a [configuration file](/docs/operator/operatorConfiguration.md#defining-configuration-via-file):
An example of starting the container using the [minimum configuration](/docs/operator/configuration.md#minimum-config):
* Bind the directory where your config file, logs, and database are located on your host machine into the container's default `DATA_DIR` by using `-v /host/path/folder:/config`
* Note: **You must do this** or else your configuration will be lost next time your container is updated.
* Expose the web interface using the container port `8085`
```
@@ -59,6 +60,65 @@ An example of running CM using the [minimum configuration](/docs/operator/config
node src/index.js run
```
### Dependencies
Note: All below dependencies are automatically included in the [Docker](#docker-recommended) image.
#### Sharp
For basic [Image Comparisons](/docs/imageComparison.md) and image data CM uses [sharp](https://sharp.pixelplumbing.com/) which depends on [libvips](https://www.libvips.org/)
Binaries for Sharp and libvips ship with the `sharp` npm package for all major operating systems and should require no additional steps to use -- installing CM with the above script should be sufficient.
See more about Sharp dependencies in the [image comparison prerequisites.](/docs/imageComparison.md#prerequisites)
#### OpenCV
For advanced image comparison CM uses [OpenCV.](https://opencv.org/) OpenCV is an **optional** dependency that is only utilized if CM is configured to run these advanced image operations so if you are NOT doing any image-related operations you can safely ignore this section/dependency.
**NOTE:** Depending on the image being compared (resolution) and operations being performed this can be a **CPU heavy resource.** TODO: Add rules that are cpu heavy...
##### Installation
Installation is not an automatic process. The below instructions are a summary of "easy" paths for installation but are not exhaustive. DO reference the detailed instructions (including additional details for windows installs) at [opencv4nodejs How to Install](https://github.com/UrielCh/opencv4nodejs#how-to-install).
###### Build From Source
This may take **some time** since openCV will be built from scratch.
On windows you must first install build tools: `npm install --global windows-build-tools`
Otherwise, run one of the following commands from the CM project directory:
* For CUDA (Nvidia GPU acceleration): `npm run cv-autoinstall-cuda`
* Normal: `npm run cv-autoinstall`
###### Build from Prebuilt
In this use-case you already have openCV built OR are using a distro prebuilt package. This method is much faster than building from source as only bindings need to be built.
[More information on prebuild installation](https://github.com/UrielCh/opencv4nodejs#installing-opencv-manually)
Prerequisites:
* Windows `choco install OpenCV -y -version 4.1.0`
* MacOS `brew install opencv@4; brew link --force opencv@4`
* Linux -- varies, check your package manager for `opencv` and `opencv-dev`
A script for building on **Ubuntu** is already included in CM:
* `sudo apt install opencv-dev`
* `npm run cv-install-ubuntu-prebuild`
Otherwise, you will need to modify `scripts` in CM's `package.json`, use the script `cv-install-ubuntu-prebuild` as an example. Your command must include:
* `--incDir [path/to/opencv/dev-files]` (on linux, usually `/usr/include/opencv4/`)
* `--libDir [path/to/opencv/shared-files]` (on linux usually `/lib/x86_64-linux-gnu/` or `/usr/lib/`)
* `--binDir=[path/to/openv/binaries]` (on linux usually `/usr/bin/`)
After you have modified/added a script for your operating system run it with `npm run yourScriptName`
## [Heroku Quick Deploy](https://heroku.com/about)
**NOTE:** This is still experimental and requires more testing.
@@ -76,3 +136,21 @@ Be aware that Heroku's [free dyno plan](https://devcenter.heroku.com/articles/fr
* The **Worker** dyno **will not** go to sleep but you will NOT be able to access the web interface. You can, however, still see how Cm is running by reading the logs for the dyno.
If you want to use a free dyno it is recommended you perform first-time setup (bot authentication and configuration, testing, etc...) with the **Web** dyno, then SWITCH to a **Worker** dyno so it can run 24/7.
# Memory Management
Node exhibits [lazy GC cleanup](https://github.com/FoxxMD/context-mod/issues/90#issuecomment-1190384006) which can result in memory usage for long-running CM instances increasing to unreasonable levels. This problem does not seem to be an issue with CM itself but with Node's GC approach. The increase does not affect CM's performance and, for systems with less memory, the Node *should* limit memory usage based on total available.
In practice CM uses ~130MB for a single bot, single subreddit setup. Up to ~350MB for many (10+) bots or many (20+) subreddits.
If you need to reign in CM's memory usage for some reason this can be addressed by setting an upper limit for memory usage with `node` args by using either:
**--max_old_space_size=**
Value is megabytes. This sets an explicit limit on GC memory usage.
This is set by default in the [Docker](#docker-recommended) container using the env `NODE_ARGS` to `--max_old_space_size=512`. It can be disabled by overriding the ENV.
**--optimize_for_size**
Tells Node to optimize for (less) memory usage rather than some performance optimizations. This option is not memory size dependent. In practice performance does not seem to be affected and it reduces (but not entirely prevents) memory increases over long periods.

View File

@@ -2,44 +2,92 @@ Actions that can submit text (Report, Comment, UserNote) will have their `conten
See here for a [cheatsheet](https://gist.github.com/FoxxMD/d365707cf99fdb526a504b8b833a5b78) and [here](https://www.tsmean.com/articles/mustache/the-ultimate-mustache-tutorial/) for a more thorough tutorial.
# Template Data
## Activity Data
Activity data can be accessed using the `item` variable. Example
```
This activity is a {{item.kind}} with {{item.votes}} votes, created {{item.age}} ago.
```
Produces:
> This activity is a submission with 10 votes created 5 minutes ago.
### Common
All Actions with `content` have access to this data:
```json5
| Name | Description | Example |
|-------------|-----------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------|
| `kind` | The Activity type (submission or comment) | submission |
| `author` | Name of the Author of the Activity being processed | FoxxMD |
| `permalink` | URL to the Activity | https://reddit.com/r/mySuibreddit/comments/ab23f/my_post |
| `votes` | Number of upvotes | 69 |
| `age` | The age of the Activity in a [human friendly format](https://day.js.org/docs/en/durations/humanize) | 5 minutes |
| `botLink` | A URL to CM's introduction thread | https://www.reddit.com/r/ContextModBot/comments/otz396/introduction_to_contextmodbot |
{
item: {
kind: 'string', // the type of item (comment/submission)
author: 'string', // name of the item author (reddit user)
permalink: 'string', // a url to the item
url: 'string', // if the item is a Submission then its URL (external for link type submission, reddit link for self-posts)
title: 'string', // if the item is a Submission, then the title of the Submission,
botLink: 'string' // a link to the bot's FAQ
},
rules: {
// contains all rules that were run and are accessible using the name, lowercased, with all spaces/dashes/underscores removed
}
}
### Submissions
If the **Activity** is a Submission these additional properties are accessible:
| Name | Description | Example |
|---------------|-----------------------------------------------------------------|-------------------------|
| `upvoteRatio` | The upvote ratio | 100% |
| `nsfw` | If the submission is marked as NSFW | true |
| `spoiler` | If the submission is marked as a spoiler | true |
| `url` | If the submission was a link then this is the URL for that link | http://example.com |
| `title` | The title of the submission | Test post please ignore |
### Comments
If the **Activity** is a Comment these additional properties are accessible:
| Name | Description | Example |
|------|--------------------------------------------------------------|---------|
| `op` | If the Author is the OP of the Submission this comment is in | true |
### Moderator
If the **Activity** occurred in a Subreddit the Bot moderates these properties are accessible:
| Name | Description | Example |
|---------------|-------------------------------------|---------|
| `reports` | The number of reports recieved | 1 |
| `modReports` | The number of reports by moderators | 1 |
| `userReports` | The number of reports by users | 1 |
## Rule Data
### Summary
A summary of what rules were processed and which were triggered, with results, is available using the `ruleSummary` variable. Example:
```
A summary of rules processed for this activity:
The properties of `rules` are accessible using the name, lower-cased, with all spaces/dashes/underscores. If no name is given `kind` is used as `name` Example:
{{ruleSummary}}
```
"rules": [
{
"name": "My Custom-Recent Activity Rule", // mycustomrecentactivityrule
"kind": "recentActivity"
},
{
// name = repeatsubmission
"kind": "repeatActivity",
}
]
Would produce:
> A summary of rules processed for this activity:
>
> * namedRegexRule - ✘
> * nameAttributionRule - ✓ - 1 Attribution(s) met the threshold of < 20%, with 1 (3%) of 32 Total -- window: 6 months
> * noXPost ✓ - ✓ 1 of 1 unique items repeated <= 3 times, largest repeat: 1
### Individual
Individual **Rules** can be accessed using the name of the rule, **lower-cased, with all spaces/dashes/underscores.** Example:
```
Submission was repeated {{rules.noxpost.largestRepeat}} times
```
Produces
**To see what data is available for individual Rules [consult the schema](#configuration) for each Rule.**
> Submission was repeated 7 times
#### Quick Templating Tutorial

View File

@@ -21,6 +21,7 @@ This list is not exhaustive. [For complete documentation on a subreddit's config
* [Author](#author)
* [Regex](#regex)
* [Repost](#repost)
* [Sentiment Analysis](#sentiment-analysis)
* [Rule Sets](#rule-sets)
* [Actions](#actions)
* [Named Actions](#named-actions)
@@ -28,6 +29,7 @@ This list is not exhaustive. [For complete documentation on a subreddit's config
* [List of Actions](#list-of-actions)
* [Approve](#approve)
* [Ban](#ban)
* [Submission](#submission)
* [Comment](#comment)
* [Contributor (Add/Remove)](#contributor)
* [Dispatch/Delay](#dispatch)
@@ -66,6 +68,7 @@ This list is not exhaustive. [For complete documentation on a subreddit's config
* [Check Order](#check-order)
* [Rule Order](#rule-order)
* [Configuration Re-use and Caching](#configuration-re-use-and-caching)
* [Partial Configurations](#partial-configurations)
* [Subreddit-ready examples](#subreddit-ready-examples)
# Runs
@@ -368,6 +371,12 @@ The **Repost** rule is used to find reposts for both **Submissions** and **Comme
This rule is for searching **all of Reddit** for reposts, as opposed to just the history of the Author of the Activity being checked. If you only want to check for reposts by the Author of the Activity being checked you should use the [Repeat Activity](/docs/subreddit/components/repeatActivity) rule.
### Sentiment Analysis
[**Full Documentation**](/docs/subreddit/components/sentiment)
The **Sentiment Rule** is used to determine the overall emotional intent (negative, neutral, positive) of a Submission or Comment by analyzing the actual text content of the Activity.
# Rule Sets
The `rules` list on a `Check` can contain both `Rule` objects and `RuleSet` objects.
@@ -486,11 +495,30 @@ actions:
### Comment
Reply to the Activity being processed with a comment. [Schema Documentation](https://json-schema.app/view/%23/%23%2Fdefinitions%2FSubmissionCheckJson/%23%2Fdefinitions%2FCommentActionJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json)
Reply to an Activity with a comment. [Schema Documentation](https://json-schema.app/view/%23/%23%2Fdefinitions%2FSubmissionCheckJson/%23%2Fdefinitions%2FCommentActionJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json)
* If the Activity is a Submission the comment is a top-level reply
* If the Activity is a Comment the comment is a child reply
#### Templating
`content` can be [templated](#templating) and use [URL Tokens](#url-tokens)
#### Targets
Optionally, specify the Activity CM should reply to. **When not specified CM replies to the Activity being processed using `self`**
Valid values: `self`, `parent`, or a Reddit permalink.
`self` and `parent` are special targets that are relative to the Activity being processed:
* When the Activity being processed is a **Submission** => `parent` logs a warning and does nothing
* When the Activity being processed is a **Comment**
* `self` => reply to Comment
* `parent` => make a top-level Comment in the **Submission** the Comment belong to
If target is not self/parent then CM assumes the value is a **reddit permalink** and will attempt to make a Comment to that Activity
```yaml
actions:
- kind: comment
@@ -498,7 +526,71 @@ actions:
distinguish: boolean # distinguish as a mod
sticky: boolean # sticky comment
lock: boolean # lock the comment after creation
targets: string # 'self' or 'parent' or 'https://reddit.com/r/someSubreddit/21nfdi....'
```
### Submission
Create a Submission [Schema Documentation](https://json-schema.app/view/%23/%23%2Fdefinitions%2FSubmissionCheckJson/%23%2Fdefinitions%2FSubmissionActionJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json)
The Submission type, Link or Self-Post, is determined based on the presence of `url` in the action's configuration.
```yaml
actions:
- kind: submission
title: string # required, the title of the submission. can be templated.
content: string # the body of the submission. can be templated
url: string # if specified the submission will be a Link Submission. can be templated
distinguish: boolean # distinguish as a mod
sticky: boolean # sticky comment
lock: boolean # lock the comment after creation
nsfw: boolean # mark submission as NSFW
spoiler: boolean # mark submission as a spoiler
flairId: string # flair template id for submission
flairText: string # flair text for submission
targets: string # 'self' or a subreddit name IE mealtimevideos
```
#### Templating
`content`,`url`, and `title` can be [templated](#templating) and use [URL Tokens](#url-tokens)
TIP: To create a Link Submission pointing to the Activity currently being processed use
```yaml
actions:
- kind: submission
url: {{item.permalink}}
# ...
```
#### Targets
Optionally, specify the Subreddit the Submission should be made in. **When not specified CM uses `self`**
Valid values: `self` or Subreddit Name
* `self` => (**Default**) Create Submission in the same Subreddit of the Activity being processed
* Subreddit Name => Create Submission in given subreddit IE `mealtimevideos`
* Your bot must be able to access and be able to post in the given subreddit
Example:
```yaml
actions:
- kind: comment
targets: mealtimevideos
```
To post to multiple subreddits use a list:
```yaml
actions:
- kind: comment
targets:
- self
- mealtimevideos
- anotherSubreddit
```
### Contributor
@@ -615,16 +707,19 @@ actions:
Remove the Activity being processed. [Schema Documentation](https://json-schema.app/view/%23/%23%2Fdefinitions%2FSubmissionCheckJson/%23%2Fdefinitions%2FRemoveActionJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fedge%2Fsrc%2FSchema%2FApp.json)
* **note** can be [templated](#templating)
* **reasonId** IDs can be found in the [editor](/docs/webInterface.md) using the **Removal Reasons** popup
If neither note nor reasonId are included then no removal reason is added.
```yaml
actions:
- kind: remove
spam: boolean # optional, mark as spam on removal
spam: false # optional, mark as spam on removal
note: 'a moderator-readable note' # optional, a note only visible to moderators (new reddit only)
reasonId: '2n0f4674-365e-46d2-8fc7-a337d85d5340' # optional, the ID of a removal reason to add to the removal action (new reddit only)
```
#### What About Removal Reason?
Reddit does not support setting a removal reason through the API. Please complain in [r/modsupport](https://www.reddit.com/r/modsupport) or [r/redditdev](https://www.reddit.com/r/redditdev) to help get this added :)
### Report
Report the Activity being processed. [Schema Documentation](https://json-schema.app/view/%23/%23%2Fdefinitions%2FSubmissionCheckJson/%23%2Fdefinitions%2FReportActionJson?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fedge%2Fsrc%2FSchema%2FApp.json)
@@ -656,6 +751,8 @@ actions:
### Mod Note
[**Full Documentation**](/docs/subreddit/components/modActions/README.md#mod-note-action)
Add a [Mod Note](https://www.reddit.com/r/modnews/comments/t8vafc/announcing_mod_notes/) for the Author of the Activity.
* `type` must be one of the [valid note labels](https://www.reddit.com/dev/api#POST_api_mod_notes):
@@ -1064,7 +1161,7 @@ If the Check is using `AND` condition for its rules (default) then if either Rul
**It is therefore advantageous to list your lightweight Rules first in each Check.**
### Configuration Re-use and Caching
## Configuration Re-use and Caching
ContextMod implements caching functionality for:
@@ -1088,6 +1185,116 @@ PROTIP: You can monitor the re-use of cache in the `Cache` section of your subre
[Learn more about how Caching works](/docs/operator/caching.md)
## Partial Configurations
ContextMod supports fetching parts of a configuration (a **Fragment**) from an external source. Fragments are an advanced feature and should only be used by users who are familiar with CM's configuration syntax and understand the risks/downsides associates with fragmenting a configuration.
**Fragments** are supported for:
* [Runs](#runs)
* [Checks](#checks)
* [Rules](#rules)
* [Actions](#actions)
### Should You Use Partial Configurations?
* **PROS**
* Consolidate shared configuration for many subreddits into one location
* Shared configuration can be updated independently of subreddits
* Allows sharing access to configuration outside of moderators of a specific subreddit or even reddit
* **CONS**
* Editor does not currently support viewing, editing, or updating Fragments. Only the Fragment URL is visible in a Subreddit's configuration
* No editor support for viewing obscures "complete view" of configuration and makes editor less useful for validation
* Currently, editor cannot validate individual Fragments. They must be copy-pasted "in place" within a normal configuration.
* Using external (non-wiki) sources means **you** are responsible for the security/access to the fragment
In general, Fragments should only be used to offload small, well-tested pieces of a configuration that can be shared between many subreddits. Examples:
* A regex Rule for spam links
* A Recent Activity Rule for reporting users from freekarma subreddits
### Usage
A Fragment may be either a special string or a Fragment object. The fetched Fragment can be either an object or an array of objects of the type of Fragment being replaced.
**String**
If value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit
* EX `wiki:botconfig/myFragment` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/myFragment`
If the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page
* EX `wiki:myFragment/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/myFragment/test`
If the value starts with `url:` then the value is fetched as an external url and expects raw text returned
* EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`
**Object**
The object contains:
* `path` -- REQUIRED string following rules above
* `ttl` -- OPTIONAL, number of seconds to cache the URL result. Defaults to `WikiTTL`
#### Examples
**Replacing A Rule with a URL Fragment**
```yaml
runs:
- checks:
- name: Free Karma Alert
description: Check if author has posted in 'freekarma' subreddits
kind: submission
rules:
- 'url:https://gist.githubusercontent.com/FoxxMD/0e1ee1ab950ff4d1f0cd26172bae7f8f/raw/0ebfaca903e4a651827effac5775c8718fb6e1f2/fragmentRule.yaml'
- name: badSub
kind: recentActivity
useSubmissionAsReference: false
thresholds:
# if the number of activities (sub/comment) found CUMULATIVELY in the subreddits listed is
# equal to or greater than 1 then the rule is triggered
- threshold: '>= 1'
subreddits:
- MyBadSubreddit
window: 7 days
actions:
- kind: report
content: 'uses freekarma subreddits and bad subreddits'
```
**Replacing A Rule with a URL Fragment (Multiple)**
```yaml
runs:
- checks:
- name: Free Karma Alert
description: Check if author has posted in 'freekarma' subreddits
kind: submission
rules:
- 'url:https://gist.githubusercontent.com/FoxxMD/0e1ee1ab950ff4d1f0cd26172bae7f8f/raw/0ebfaca903e4a651827effac5775c8718fb6e1f2/fragmentRuleArray.yaml'
actions:
- kind: report
content: 'uses freekarma subreddits and bad subreddits'
```
**Replacing A Rule with a Wiki Fragment**
```yaml
runs:
- checks:
- name: Free Karma Alert
description: Check if author has posted in 'freekarma' subreddits
kind: submission
rules:
- 'wiki:freeKarmaFrag'
actions:
- kind: report
content: 'uses freekarma subreddits'
```
# Subreddit-Ready Examples
Refer to the [Subreddit-Ready Examples](/docs/subreddit/components/subredditReady) section to find ready-to-use configurations for common scenarios (spam, freekarma blocking, etc...). This is also a good place to familiarize yourself with what complete configurations look like.

View File

@@ -40,7 +40,7 @@
// for this to pass the Author of the Submission must not have the flair "Supreme Memer" and have the name "user1" or "user2"
{
"flairText": ["Supreme Memer"],
"name": ["user1","user2"]
"names": ["user1","user2"]
},
{
// for this to pass the Author of the Submission must not have the flair "Decent Memer"

View File

@@ -30,7 +30,7 @@ runs:
# for this to pass the Author of the Submission must not have the flair "Supreme Memer" and have the name "user1" or "user2"
- flairText:
- Supreme Memer
name:
names:
- user1
- user2
# for this to pass the Author of the Submission must not have the flair "Decent Memer"

View File

@@ -0,0 +1,183 @@
# Table of Contents
* [Overview](#overview)
* [Pros And Cons](#pros-and-cons)
* [Technical Overview](#technical-overview)
* [Sentiment Values](#sentiment-values)
* [Usage](#usage)
* [Testing Sentiment Value](#testing-sentiment-value)
* [Numerical](#numerical)
* [Text](#text)
* [Sentiment Rule](#sentiment-rule)
* [Historical](#historical)
* [Examples](#examples)
# Overview
[Sentiment Analysis](https://monkeylearn.com/sentiment-analysis/) (SA) is a form of [Natural Language Processing](https://monkeylearn.com/natural-language-processing/) (NLP) used to extract the overall [sentiment](https://www.merriam-webster.com/dictionary/sentiment) (emotional intent) from a piece of text. Simply, SA is used to determine how positive or negative the emotion of a sentence is.
Examples:
* "I love how curly your hair is" -- very positive
* "The United States is over 200 years old" -- neutral
* "Frankly, your face is disgusting and I would hate to meet you" -- very negative
SA can be a powerful signal for determining the intent of a user's comment/submission. However, it should not be the **only** tool as it comes with both strengths and weaknesses.
## Pros and Cons
Pros
* In terms of Reddit API usage, SA is **free**. It requires no API calls and is computationally trivial.
* Extremely powerful signal for intent since it analyzes the actual text content of an activity
* Requires almost no setup to use
* Can be used as a substitute for regex/keyword matching when looking for hateful/toxic comments
* English language comprehension is very thorough
* Uses 3 independent algorithms to evaluate sentiment
* Understands common english slang, internet slang, and emojis
Cons
* Language limited -- only supported for English (most thorough), French, German, and Spanish
* Less accurate for small word count content (less than 4 words)
* Does not understand sarcasm/jokes
* Accuracy depends on use of common words
* Accuracy depends on clear intent
* Heavy nuance, obscure word choice, and hidden meanings are not understood
## Technical Overview
ContextMod attempts to identify the language of the content it is processing. Based on its confidence of the language it will use up to three different NLP libraries to extract sentiment:
* [NLP.js](https://github.com/axa-group/nlp.js/blob/master/docs/v3/sentiment-analysis.md) (english, french, german, and spanish)
* [vaderSentiment-js](https://github.com/vaderSentiment/vaderSentiment-js/) (english only)
* [wink-sentiment](https://github.com/winkjs/wink-sentiment) (english only)
The above libraries make use of these Sentiment Analysis algorithms:
* VADER https://github.com/cjhutto/vaderSentiment
* AFINN http://corpustext.com/reference/sentiment_afinn.html
* Senticon https://ieeexplore.ieee.org/document/8721408
* Pattern https://github.com/clips/pattern
* wink https://github.com/winkjs/wink-sentiment (modified AFINN with emojis)
Each library produces a normalized score: the sum of all the valence values for each recognized token in its lexicon, divided by the number of words/tokens.
ContextMod takes each normalized score and adjusts it to be between -1 and +1. It then adds finds the average of all normalized score to produce a final sentiment between -1 and +1.
# Sentiment Values
Each piece of content ContextMod analyses produces a score from -1 to +1 to represent the sentiment of that content
| Score | Sentiment |
|-------|--------------------|
| -1 | |
| -0.6 | Extremely Negative |
| -0.3 | Very Negative |
| -0.1 | Negative |
| 0 | Neutral |
| 0.1 | Positive |
| 0.3 | Very Positive |
| 0.6 | Extremely Positive |
| 1 | |
# Usage
## Testing Sentiment Value
Testing for sentiment in the Sentiment Rule is done using either a **text** or **numerical** comparison.
### Numerical
Similar to other numerical comparisons in CM -- use an equality operator and the number to test for:
* `> 0.1` -- sentiment is at least positive
* `<= -0.1` -- sentiment is not negative
Testing for *only* neutral sentiment should be done use a text comparison (below).
### Text
Use any of the **Sentiment** text values from the above table to form a test:
* `is very positive`
* `is neutral`
* `is extremely negative`
You may also use the `not` operator:
* `is not negative`
* `is not very negative`
* `is not neutral`
## Sentiment Rule
An example rule that tests the current comment/submission to see if it has negative sentiment:
```yaml
sentiment: 'is negative'
```
It's very simple :)
### Historical
You may also test the Sentiment of Activities from the user's history. (Note: this may use an API call to get history)
```yaml
sentiment: 'is negative'
historical:
window:
count: 50
mustMatchCurrent: true # optional, the initial activity being tested must test true ("is positive" must be true) before historical tests are run
sentimentVal: 'is very negative' # optional, if the sentiment test to use for historical content is different than the initial test
totalMatching: '> 3' # optional, a comparison for how many historical activities must match sentimentVal
```
# Examples
#### Check with Rules for recent problem subreddit activity and negative sentiment in comment
```yaml
name: Probably Toxic Comment
kind: comment
rules:
- kind: recentActivity
thresholds:
- aProblemSubreddit
- kind: sentiment
name: negsentiment
sentiment: 'is very negative'
actions:
- kind: report
content: 'Sentiment of {{rules.negsentiment.averageScore}} {{rules.negsentiment.sentimentTest}}'
```
#### Check with Rules for recent problem subreddit activity and negative sentiment in comment history from problem subreddits
```yaml
name: Toxic Comment With History
kind: comment
rules:
- kind: recentActivity
thresholds:
- aProblemSubreddit
- aSecondProblemSubreddit
- kind: sentiment
sentiment: 'is very negative'
historical:
sentimentVal: 'is negative'
mustMatchCurrent: true
totalMatching: '> 1'
window:
count: 100
filterOn:
post:
subreddits:
include:
- name:
- aProblemSubreddit
- aSecondProblemSubreddit
actions:
- kind: remove
```

View File

@@ -6,6 +6,16 @@ Context Mod supports reading and writing [User Notes](https://www.reddit.com/r/t
[Click here for the Toolbox Quickstart Guide](https://www.reddit.com/r/toolbox/wiki/docs/quick_start)
Valid Note Types:
* `gooduser`
* `spamwatch`
* `spamwarn`
* `abusewarn`
* `ban`
* `permban`
* `botban`
## Filter
User Notes are an additional criteria on [AuthorCriteria](https://json-schema.app/view/%23%2Fdefinitions%2FAuthorCriteria?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FApp.json) that can be used alongside other Author properties for both [filtering rules and in the AuthorRule.](/docs/subreddit/components/author/)

View File

@@ -20,6 +20,7 @@
## Web Dashboard Tips
* Click the **Help** button at the top of the page to get a **guided tour of the dashboard**
* Use the [**Overview** section](/docs/images/botOperations.png) to control the bot at a high-level
* You can **manually run** the bot on any activity (comment/submission) by pasting its permalink into the [input field below the Overview section](/docs/images/runInput.png) and hitting one of the **run buttons**
* **Dry run** will make the bot run on the activity but it will only **pretend** to run actions, if triggered. This is super useful for testing your config without consequences

2025
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "redditcontextbot",
"version": "0.5.1",
"version": "0.11.4",
"description": "",
"main": "index.js",
"scripts": {
@@ -8,18 +8,24 @@
"build": "tsc && npm run bundle-front",
"bundle-front": "browserify src/Web/assets/browser.js | terser --compress --mangle > src/Web/assets/public/browserBundle.js",
"start": "node src/index.js run",
"schema": "npm run -s schema-app & npm run -s schema-ruleset & npm run -s schema-rule & npm run -s schema-action & npm run -s schema-config",
"schema-app": "typescript-json-schema tsconfig.json JSONConfig --out src/Schema/App.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
"schema-ruleset": "typescript-json-schema tsconfig.json RuleSetJson --out src/Schema/RuleSet.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
"schema-rule": "typescript-json-schema tsconfig.json RuleJson --out src/Schema/Rule.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
"schema-action": "typescript-json-schema tsconfig.json ActionJson --out src/Schema/Action.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
"schema": "npm run -s schema-app & npm run -s schema-ruleset & npm run -s schema-rule & npm run -s schema-action & npm run -s schema-check & npm run -s schema-run & npm run -s schema-config",
"schema-app": "typescript-json-schema tsconfig.json SubredditConfigData --out src/Schema/App.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
"schema-ruleset": "typescript-json-schema tsconfig.json RuleSetConfigData --out src/Schema/RuleSet.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
"schema-rule": "typescript-json-schema tsconfig.json RuleConfigData --out src/Schema/Rule.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
"schema-check": "typescript-json-schema tsconfig.json ActivityCheckConfigValue --out src/Schema/Check.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
"schema-run": "typescript-json-schema tsconfig.json RunConfigValue --out src/Schema/Run.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
"schema-action": "typescript-json-schema tsconfig.json ActionConfigData --out src/Schema/Action.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
"schema-config": "typescript-json-schema tsconfig.json OperatorJsonConfig --out src/Schema/OperatorConfig.json --required --tsNodeRegister --refs --validationKeywords deprecationMessage",
"schemaNotWorking": "./node_modules/.bin/ts-json-schema-generator -f tsconfig.json -p src/JsonConfig.ts -t JSONConfig --out src/Schema/vegaSchema.json",
"schemaNotWorking": "./node_modules/.bin/ts-json-schema-generator -f tsconfig.json -p src/SubredditConfigData.ts -t JSONConfig --out src/Schema/vegaSchema.json",
"circular": "madge --circular --extensions ts src/index.ts",
"circular-graph": "madge --image graph.svg --circular --extensions ts src/index.ts",
"postinstall": "patch-package",
"typeorm": "node --require ts-node/register ./node_modules/typeorm/cli.js",
"initMigration": "npm run typeorm -- migration:generate -t 1642180264563 -d ormconfig.js \"src/Common/Migrations/Database/init\""
"initMigration": "npm run typeorm -- migration:generate -t 1642180264563 -d ormconfig.js \"src/Common/Migrations/Database/init\"",
"cv-install-ubuntu-prebuild": "build-opencv --incDir /usr/include/opencv4/ --libDir /lib/x86_64-linux-gnu/ --binDir=/usr/bin/ --nobuild rebuild",
"cv-install-docker-prebuild": "build-opencv --incDir /usr/include/opencv4/ --libDir /usr/lib/ --binDir=/usr/bin/ --nobuild rebuild",
"cv-autoinstall-cuda": "build-opencv --version 4.5.5 --flags=\"-DWITH_CUDA=ON -DWITH_CUDNN=ON -DOPENCV_DNN_CUDA=ON -DCUDA_FAST_MATH=ON\" build",
"cv-autoinstall": "build-opencv build"
},
"engines": {
"node": ">=16"
@@ -30,6 +36,8 @@
"dependencies": {
"@awaitjs/express": "^0.8.0",
"@googleapis/youtube": "^2.0.0",
"@influxdata/influxdb-client": "^1.27.0",
"@influxdata/influxdb-client-apis": "^1.27.0",
"@nlpjs/core": "^4.23.4",
"@nlpjs/lang-de": "^4.23.4",
"@nlpjs/lang-en": "^4.23.4",
@@ -38,6 +46,7 @@
"@nlpjs/language": "^4.22.7",
"@nlpjs/nlp": "^4.23.5",
"@stdlib/regexp-regexp": "^0.0.6",
"@u4/opencv4nodejs": "^6.2.1",
"ajv": "^7.2.4",
"ansi-regex": ">=5.0.1",
"async": "^3.2.0",
@@ -47,7 +56,7 @@
"cache-manager-redis-store": "^2.0.0",
"commander": "^8.0.0",
"comment-json": "^4.1.1",
"connect-typeorm": "github:FoxxMD/connect-typeorm#typeormBump",
"connect-typeorm": "^2.0.0",
"cookie-parser": "^1.3.5",
"dayjs": "^1.10.5",
"deepmerge": "^4.2.2",
@@ -91,8 +100,9 @@
"string-similarity": "^4.0.4",
"tcp-port-used": "^1.0.2",
"triple-beam": "^1.3.0",
"typeorm": "^0.3.4",
"typeorm": "^0.3.7",
"typeorm-logger-adaptor": "^1.1.0",
"unique-names-generator": "^4.7.1",
"vader-sentiment": "^1.1.3",
"webhook-discord": "^3.7.7",
"wink-sentiment": "^5.0.2",
@@ -155,6 +165,7 @@
"better-sqlite3": "^7.5.0",
"mongo": "^3.6.0",
"mysql": "^2.18.1",
"opencv-build": "^0.1.9",
"pg": "^8.7.1",
"sharp": "^0.29.1"
}

View File

@@ -18,12 +18,15 @@ import {CancelDispatchAction, CancelDispatchActionJson} from "./CancelDispatchAc
import ContributorAction, {ContributorActionJson} from "./ContributorAction";
import {StructuredFilter} from "../Common/Infrastructure/Filters/FilterShapes";
import {ModNoteAction, ModNoteActionJson} from "./ModNoteAction";
import {SubmissionAction, SubmissionActionJson} from "./SubmissionAction";
export function actionFactory
(config: StructuredActionJson, logger: Logger, subredditName: string, resources: SubredditResources, client: ExtendedSnoowrap, emitter: EventEmitter): Action {
switch (config.kind) {
case 'comment':
return new CommentAction({...config as StructuredFilter<CommentActionJson>, logger, subredditName, resources, client, emitter});
case 'submission':
return new SubmissionAction({...config as StructuredFilter<SubmissionActionJson>, logger, subredditName, resources, client, emitter});
case 'lock':
return new LockAction({...config as StructuredFilter<LockActionJson>, logger, subredditName, resources, client, emitter});
case 'remove':

View File

@@ -6,6 +6,10 @@ import {ActionProcessResult, Footer, RuleResult} from "../Common/interfaces";
import {RuleResultEntity} from "../Common/Entities/RuleResultEntity";
import {runCheckOptions} from "../Subreddit/Manager";
import {ActionTypes} from "../Common/Infrastructure/Atomic";
import {truncateStringToLength} from "../util";
const truncate = truncateStringToLength(100);
const truncateLongMessage = truncateStringToLength(200);
export class BanAction extends Action {
@@ -37,15 +41,17 @@ export class BanAction extends Action {
async process(item: Comment | Submission, ruleResults: RuleResultEntity[], options: runCheckOptions): Promise<ActionProcessResult> {
const dryRun = this.getRuntimeAwareDryrun(options);
const content = this.message === undefined ? undefined : await this.resources.getContent(this.message, item.subreddit);
const renderedBody = content === undefined ? undefined : await renderContent(content, item, ruleResults, this.resources.userNotes);
const renderedBody = this.message === undefined ? undefined : await this.resources.renderContent(this.message, item, ruleResults);
const renderedContent = renderedBody === undefined ? undefined : `${renderedBody}${await this.resources.generateFooter(item, this.footer)}`;
const renderedReason = this.reason === undefined ? undefined : truncate(await this.resources.renderContent(this.reason, item, ruleResults));
const renderedNote = this.note === undefined ? undefined : truncate(await this.resources.renderContent(this.note, item, ruleResults));
const touchedEntities = [];
let banPieces = [];
banPieces.push(`Message: ${renderedContent === undefined ? 'None' : `${renderedContent.length > 100 ? `\r\n${renderedContent}` : renderedContent}`}`);
banPieces.push(`Reason: ${this.reason || 'None'}`);
banPieces.push(`Note: ${this.note || 'None'}`);
banPieces.push(`Message: ${renderedContent === undefined ? 'None' : `${renderedContent.length > 100 ? `\r\n${truncateLongMessage(renderedContent)}` : renderedContent}`}`);
banPieces.push(`Reason: ${renderedReason || 'None'}`);
banPieces.push(`Note: ${renderedNote || 'None'}`);
const durText = this.duration === undefined ? 'permanently' : `for ${this.duration} days`;
this.logger.info(`Banning ${item.author.name} ${durText}${this.reason !== undefined ? ` (${this.reason})` : ''}`);
this.logger.verbose(`\r\n${banPieces.join('\r\n')}`);
@@ -56,8 +62,8 @@ export class BanAction extends Action {
const bannedUser = await fetchedSub.banUser({
name: fetchedName,
banMessage: renderedContent === undefined ? undefined : renderedContent,
banReason: this.reason,
banNote: this.note,
banReason: renderedReason,
banNote: renderedNote,
duration: this.duration
});
touchedEntities.push(bannedUser);
@@ -65,7 +71,7 @@ export class BanAction extends Action {
return {
dryRun,
success: true,
result: `Banned ${item.author.name} ${durText}${this.reason !== undefined ? ` (${this.reason})` : ''}`,
result: `Banned ${item.author.name} ${durText}${renderedReason !== undefined ? ` (${renderedReason})` : ''}`,
touchedEntities
};
}
@@ -97,8 +103,10 @@ export interface BanActionConfig extends ActionConfig, Footer {
* */
message?: string
/**
* Reason for ban.
* @maxLength 100
* Reason for ban. Can use Templating.
*
* If the length expands to more than 100 characters it will truncated with "..."
*
* @examples ["repeat spam"]
* */
reason?: string
@@ -110,8 +118,10 @@ export interface BanActionConfig extends ActionConfig, Footer {
* */
duration?: number
/**
* A mod note for this ban
* @maxLength 100
* A mod note for this ban. Can use Templating.
*
* If the length expands to more than 100 characters it will truncated with "..."
*
* @examples ["Sock puppet for u/AnotherUser"]
* */
note?: string

View File

@@ -1,12 +1,14 @@
import Action, {ActionJson, ActionOptions} from "./index";
import {Comment} from "snoowrap";
import {Comment, VoteableContent} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {renderContent} from "../Utils/SnoowrapUtils";
import {ActionProcessResult, Footer, RequiredRichContent, RichContent, RuleResult} from "../Common/interfaces";
import {truncateStringToLength} from "../util";
import {asComment, asSubmission, parseRedditThingsFromLink, truncateStringToLength} from "../util";
import {RuleResultEntity} from "../Common/Entities/RuleResultEntity";
import {runCheckOptions} from "../Subreddit/Manager";
import {ActionTypes} from "../Common/Infrastructure/Atomic";
import {ActionTarget, ActionTypes, ArbitraryActionTarget} from "../Common/Infrastructure/Atomic";
import {CMError} from "../Utils/Errors";
import {SnoowrapActivity} from "../Common/Infrastructure/Reddit";
export class CommentAction extends Action {
content: string;
@@ -14,6 +16,7 @@ export class CommentAction extends Action {
sticky: boolean = false;
distinguish: boolean = false;
footer?: false | string;
targets: ArbitraryActionTarget[]
constructor(options: CommentActionOptions) {
super(options);
@@ -23,12 +26,18 @@ export class CommentAction extends Action {
sticky = false,
distinguish = false,
footer,
targets = ['self']
} = options;
this.footer = footer;
this.content = content;
this.lock = lock;
this.sticky = sticky;
this.distinguish = distinguish;
if (!Array.isArray(targets)) {
this.targets = [targets];
} else {
this.targets = targets;
}
}
getKind(): ActionTypes {
@@ -45,48 +54,105 @@ export class CommentAction extends Action {
const renderedContent = `${body}${footer}`;
this.logger.verbose(`Contents:\r\n${renderedContent.length > 100 ? `\r\n${renderedContent}` : renderedContent}`);
if(item.archived) {
this.logger.warn('Cannot comment because Item is archived');
return {
dryRun,
success: false,
result: 'Cannot comment because Item is archived'
};
}
let allErrors = true;
const targetResults: string[] = [];
const touchedEntities = [];
let modifiers = [];
let reply: Comment;
if(!dryRun) {
// @ts-ignore
reply = await item.reply(renderedContent);
// add to recent so we ignore activity when/if it is discovered by polling
await this.resources.setRecentSelf(reply);
touchedEntities.push(reply);
}
if (this.lock) {
modifiers.push('Locked');
for (const target of this.targets) {
let targetItem = item;
let targetIdentifier = target;
if (target === 'parent') {
if (asSubmission(item)) {
const noParent = `[Parent] Submission ${item.name} does not have a parent`;
this.logger.warn(noParent);
targetResults.push(noParent);
continue;
}
targetItem = await this.resources.getActivity(this.client.getSubmission(item.link_id));
} else if (target !== 'self') {
const redditThings = parseRedditThingsFromLink(target);
let id = '';
try {
if (redditThings.comment !== undefined) {
id = redditThings.comment.id;
targetIdentifier = `Permalink Comment ${id}`
// @ts-ignore
await this.resources.getActivity(this.client.getSubmission(redditThings.submission.id));
targetItem = await this.resources.getActivity(this.client.getComment(redditThings.comment.id));
} else if (redditThings.submission !== undefined) {
id = redditThings.submission.id;
targetIdentifier = `Permalink Submission ${id}`
targetItem = await this.resources.getActivity(this.client.getSubmission(redditThings.submission.id));
} else {
targetResults.push(`[Permalink] Could not parse ${target} as a reddit permalink`);
continue;
}
} catch (err: any) {
targetResults.push(`[${targetIdentifier}] error occurred while fetching activity: ${err.message}`);
this.logger.warn(new CMError(`[${targetIdentifier}] error occurred while fetching activity`, {cause: err}));
continue;
}
}
if (targetItem.archived) {
const archived = `[${targetIdentifier}] Cannot comment because Item is archived`;
this.logger.warn(archived);
targetResults.push(archived);
continue;
}
let modifiers = [];
let reply: Comment;
if (!dryRun) {
// snoopwrap typing issue, thinks comments can't be locked
// @ts-ignore
await reply.lock();
reply = await targetItem.reply(renderedContent);
// add to recent so we ignore activity when/if it is discovered by polling
await this.resources.setRecentSelf(reply);
touchedEntities.push(reply);
}
}
if (this.distinguish && !dryRun) {
modifiers.push('Distinguished');
if(this.sticky) {
modifiers.push('Stickied');
if (this.lock && targetItem.can_mod_post) {
if (!targetItem.can_mod_post) {
this.logger.warn(`[${targetIdentifier}] Cannot lock because bot is not a moderator`);
} else {
modifiers.push('Locked');
if (!dryRun) {
// snoopwrap typing issue, thinks comments can't be locked
// @ts-ignore
await reply.lock();
}
}
}
if(!dryRun) {
// @ts-ignore
await reply.distinguish({sticky: this.sticky});
if (this.distinguish) {
if (!targetItem.can_mod_post) {
this.logger.warn(`[${targetIdentifier}] Cannot lock Distinguish/Sticky because bot is not a moderator`);
} else {
modifiers.push('Distinguished');
if (this.sticky) {
modifiers.push('Stickied');
}
if (!dryRun) {
// @ts-ignore
await reply.distinguish({sticky: this.sticky});
}
}
}
const modifierStr = modifiers.length === 0 ? '' : ` == ${modifiers.join(' | ')} == =>`;
// @ts-ignore
targetResults.push(`${targetIdentifier}${modifierStr} created Comment ${dryRun ? 'DRYRUN' : (reply as SnoowrapActivity).name}`)
allErrors = false;
}
const modifierStr = modifiers.length === 0 ? '' : `[${modifiers.join(' | ')}]`;
return {
dryRun,
success: true,
result: `${modifierStr}${truncateStringToLength(100)(body)}`,
success: !allErrors,
result: `${targetResults.join('\n')}${truncateStringToLength(100)(body)}`,
touchedEntities,
};
}
@@ -97,7 +163,8 @@ export class CommentAction extends Action {
lock: this.lock,
sticky: this.sticky,
distinguish: this.distinguish,
footer: this.footer
footer: this.footer,
targets: this.targets,
}
}
}
@@ -115,6 +182,21 @@ export interface CommentActionConfig extends RequiredRichContent, Footer {
* Distinguish the comment after creation?
* */
distinguish?: boolean,
/**
* Specify where this comment should be made
*
* Valid values: 'self' | 'parent' | [reddit permalink]
*
* 'self' and 'parent' are special targets that are relative to the Activity being processed:
* * When Activity is Submission => 'parent' does nothing
* * When Activity is Comment
* * 'self' => reply to Activity
* * 'parent' => make a top-level comment in the Submission the Comment is in
*
* If target is not self/parent then CM assumes the value is a reddit permalink and will attempt to make a comment to that Activity
* */
targets?: ArbitraryActionTarget | ArbitraryActionTarget[]
}
export interface CommentActionOptions extends CommentActionConfig, ActionOptions {
@@ -124,5 +206,5 @@ export interface CommentActionOptions extends CommentActionConfig, ActionOptions
* Reply to the Activity. For a submission the reply will be a top-level comment.
* */
export interface CommentActionJson extends CommentActionConfig, ActionJson {
kind: 'comment'
kind: 'comment'
}

View File

@@ -50,8 +50,9 @@ export class MessageAction extends Action {
async process(item: Comment | Submission, ruleResults: RuleResultEntity[], options: runCheckOptions): Promise<ActionProcessResult> {
const dryRun = this.getRuntimeAwareDryrun(options);
const content = await this.resources.getContent(this.content);
const body = await renderContent(content, item, ruleResults, this.resources.userNotes);
const body = await this.resources.renderContent(this.content, item, ruleResults);
const subject = this.title === undefined ? `Concerning your ${isSubmission(item) ? 'Submission' : 'Comment'}` : await this.resources.renderContent(this.title, item, ruleResults);
const footer = await this.resources.generateFooter(item, this.footer);
@@ -80,7 +81,7 @@ export class MessageAction extends Action {
text: renderedContent,
// @ts-ignore
fromSubreddit: this.asSubreddit ? await item.subreddit.fetch() : undefined,
subject: this.title || `Concerning your ${isSubmission(item) ? 'Submission' : 'Comment'}`,
subject: subject,
};
const msgPreview = `\r\n

View File

@@ -4,13 +4,16 @@ import Snoowrap, {Comment, Submission} from "snoowrap";
import {activityIsRemoved} from "../Utils/SnoowrapUtils";
import {ActionProcessResult, RuleResult} from "../Common/interfaces";
import dayjs from "dayjs";
import {isSubmission} from "../util";
import {isSubmission, truncateStringToLength} from "../util";
import {RuleResultEntity} from "../Common/Entities/RuleResultEntity";
import {runCheckOptions} from "../Subreddit/Manager";
import {ActionTypes} from "../Common/Infrastructure/Atomic";
const truncate = truncateStringToLength(100);
export class RemoveAction extends Action {
spam: boolean;
note?: string;
reasonId?: string;
getKind(): ActionTypes {
return 'remove';
@@ -20,21 +23,54 @@ export class RemoveAction extends Action {
super(options);
const {
spam = false,
note,
reasonId,
} = options;
this.spam = spam;
this.note = note;
this.reasonId = reasonId;
}
async process(item: Comment | Submission, ruleResults: RuleResultEntity[], options: runCheckOptions): Promise<ActionProcessResult> {
const dryRun = this.getRuntimeAwareDryrun(options);
const touchedEntities = [];
let removeSummary = [];
// issue with snoowrap typings, doesn't think prop exists on Submission
// @ts-ignore
if (activityIsRemoved(item)) {
this.logger.warn('It looks like this Item is already removed!');
}
if (this.spam) {
removeSummary.push('Marked as SPAM');
this.logger.verbose('Marking as spam on removal');
}
const renderedNote = this.note === undefined ? undefined : await this.resources.renderContent(this.note, item, ruleResults);
let foundReasonId: string | undefined;
let foundReason: string | undefined;
if(this.reasonId !== undefined) {
const reason = await this.resources.getSubredditRemovalReasonById(this.reasonId);
if(reason === undefined) {
const reasonWarn = [`Could not find any Removal Reason with the ID ${this.reasonId}!`];
if(renderedNote === undefined) {
reasonWarn.push('Cannot add any Removal Reason because note is also empty!');
} else {
reasonWarn.push('Will add Removal Reason but only with note.');
}
this.logger.warn(reasonWarn.join(''));
} else {
foundReason = truncate(reason.title);
foundReasonId = reason.id;
removeSummary.push(`Reason: ${truncate(foundReason)} (${foundReasonId})`);
}
}
if(renderedNote !== undefined) {
removeSummary.push(`Note: ${truncate(renderedNote)}`);
}
this.logger.verbose(removeSummary.join(' | '));
if (!dryRun) {
// @ts-ignore
await item.remove({spam: this.spam});
@@ -44,6 +80,18 @@ export class RemoveAction extends Action {
// @ts-ignore
item.removed = true;
}
if(foundReasonId !== undefined || renderedNote !== undefined) {
await this.client.addRemovalReason(item, renderedNote, foundReasonId);
item.mod_reason_by = this.resources.botAccount as string;
if(renderedNote !== undefined) {
item.removal_reason = renderedNote;
}
if(foundReason !== undefined) {
item.mod_reason_title = foundReason;
}
}
await this.resources.resetCacheForItem(item);
touchedEntities.push(item);
}
@@ -66,7 +114,22 @@ export interface RemoveOptions extends Omit<RemoveActionConfig, 'authorIs' | 'it
}
export interface RemoveActionConfig extends ActionConfig {
/** (Optional) Mark Activity as spam */
spam?: boolean
/** (Optional) A mod-readable note added to the removal reason for this Activity. Can use Templating.
*
* This note (and removal reasons) are only visible on New Reddit
* */
note?: string
/** (Optional) The ID of the Removal Reason to use
*
* Removal reasons are only visible on New Reddit
*
* To find IDs for removal reasons check the "Removal Reasons" popup located in the CM dashboard config editor for your subreddit
*
* More info on Removal Reasons: https://mods.reddithelp.com/hc/en-us/articles/360010094892-Removal-Reasons
* */
reasonId?: string
}
/**

View File

@@ -0,0 +1,323 @@
import Action, {ActionJson, ActionOptions} from "./index";
import {Comment, SubmitLinkOptions, SubmitSelfPostOptions, VoteableContent} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {renderContent} from "../Utils/SnoowrapUtils";
import {ActionProcessResult, Footer, RequiredRichContent, RichContent, RuleResult} from "../Common/interfaces";
import {asComment, asSubmission, parseRedditEntity, parseRedditThingsFromLink, sleep, truncateStringToLength} from "../util";
import {RuleResultEntity} from "../Common/Entities/RuleResultEntity";
import {runCheckOptions} from "../Subreddit/Manager";
import {ActionTarget, ActionTypes, ArbitraryActionTarget} from "../Common/Infrastructure/Atomic";
import {CMError} from "../Utils/Errors";
import {SnoowrapActivity} from "../Common/Infrastructure/Reddit";
import Subreddit from "snoowrap/dist/objects/Subreddit";
export class SubmissionAction extends Action {
content?: string;
lock: boolean = false;
sticky: boolean = false;
distinguish: boolean = false;
spoiler: boolean = false;
nsfw: boolean = false;
flairId?: string
flairText?: string
url?: string
title: string
footer?: false | string;
targets: ('self' | string)[]
constructor(options: SubmissionActionOptions) {
super(options);
const {
content,
lock = false,
sticky = false,
spoiler = false,
distinguish = false,
nsfw = false,
flairText,
flairId,
footer,
url,
title,
targets = ['self']
} = options;
this.footer = footer;
this.content = content;
this.lock = lock;
this.sticky = sticky;
if(this.sticky) {
this.distinguish = sticky;
} else {
this.distinguish = distinguish;
}
this.spoiler = spoiler;
this.nsfw = nsfw;
this.flairText = flairText;
this.flairId = flairId;
this.url = url;
this.title = title;
if (!Array.isArray(targets)) {
this.targets = [targets];
} else {
this.targets = targets;
}
}
getKind(): ActionTypes {
return 'submission';
}
async process(item: Comment | Submission, ruleResults: RuleResultEntity[], options: runCheckOptions): Promise<ActionProcessResult> {
const dryRun = this.getRuntimeAwareDryrun(options);
const title = await this.resources.renderContent(this.title, item, ruleResults);
this.logger.verbose(`Title: ${title}`);
const url = this.url !== undefined ? await this.resources.renderContent(this.url, item, ruleResults) : undefined;
this.logger.verbose(`URL: ${url !== undefined ? url : '[No URL]'}`);
const body = this.content !== undefined ? await this.resources.renderContent(this.content, item, ruleResults) : undefined;
let renderedContent: string | undefined = undefined;
if(body !== undefined) {
const footer = await this.resources.generateFooter(item, this.footer);
renderedContent = `${body}${footer}`;
this.logger.verbose(`Contents:\r\n${renderedContent.length > 100 ? `\r\n${renderedContent}` : renderedContent}`);
} else {
this.logger.verbose(`Contents: [No Body]`);
}
let allErrors = true;
const targetResults: string[] = [];
const touchedEntities = [];
let submittedOnce = false;
for (const targetVal of this.targets) {
//
if(submittedOnce) {
// delay submissions by 3 seconds (on previous successful call)
// to try to spread out load
await sleep(3000);
}
let target: Subreddit = item.subreddit;
let targetIdentifier = targetVal;
if (targetVal !== 'self') {
const subredditVal = parseRedditEntity(targetVal);
try {
target = await this.resources.getSubreddit(subredditVal.name);
targetIdentifier = `[Subreddit ${target.display_name}]`;
} catch (err: any) {
targetResults.push(`[${targetIdentifier}] error occurred while fetching subreddit: ${err.message}`);
if(!err.logged) {
this.logger.warn(new CMError(`[${targetIdentifier}] error occurred while fetching subreddit`, {cause: err}));
}
continue;
}
}
// TODO check if we can post in subreddit
let modifiers = [];
let post: Submission | undefined;
if (!dryRun) {
let opts: SubmitLinkOptions | SubmitSelfPostOptions;
let type: 'self' | 'link';
const genericOpts = {
title,
subredditName: target.display_name,
nsfw: this.nsfw,
spoiler: this.spoiler,
flairId: this.flairId,
flairText: this.flairText,
};
if(url !== undefined) {
type = 'link';
opts = {
...genericOpts,
url,
};
if(renderedContent !== undefined) {
// @ts-ignore
linkOpts.text = renderedContent;
}
} else {
type = 'self';
opts = {
...genericOpts,
text: renderedContent,
}
}
// @ts-ignore
post = await this.tryPost(type, target, opts);
await this.resources.setRecentSelf(post as Submission);
if(post !== undefined) {
touchedEntities.push(post);
}
}
if (this.lock) {
if (post !== undefined && !post.can_mod_post) {
this.logger.warn(`[${targetIdentifier}] Cannot lock because bot is not a moderator`);
} else {
modifiers.push('Locked');
if (!dryRun && post !== undefined) {
// snoopwrap typing issue, thinks comments can't be locked
// @ts-ignore
await post.lock();
}
}
}
if (this.distinguish) {
if (post !== undefined && !post.can_mod_post) {
this.logger.warn(`[${targetIdentifier}] Cannot Distinguish/Sticky because bot is not a moderator`);
} else {
modifiers.push('Distinguished');
if (this.sticky) {
modifiers.push('Stickied');
}
if (!dryRun && post !== undefined) {
// @ts-ignore
await post.distinguish({sticky: this.sticky});
}
}
}
const modifierStr = modifiers.length === 0 ? '' : ` == ${modifiers.join(' | ')} == =>`;
const targetSummary = `${targetIdentifier} ${modifierStr} created Submission ${dryRun ? 'DRYRUN' : (post as SnoowrapActivity).name}`;
// @ts-ignore
targetResults.push(targetSummary)
this.logger.verbose(targetSummary);
allErrors = false;
}
return {
dryRun,
success: !allErrors,
result: `${targetResults.join('\n')}${this.url !== undefined ? `\nURL: ${this.url}` : ''}${body !== undefined ? truncateStringToLength(100)(body) : ''}`,
touchedEntities,
};
}
// @ts-ignore
protected async tryPost(type: 'self' | 'link', target: Subreddit, data: SubmitLinkOptions | SubmitSelfPostOptions, maxAttempts = 2): Promise<Submission> {
let post: Submission | undefined;
let error: any;
for (let i = 0; i <= maxAttempts; i++) {
try {
if (type === 'self') {
// @ts-ignore
post = await target.submitSelfpost(data as SubmitSelfPostOptions);
} else {
// @ts-ignore
post = await target.submitLink(data as SubmitLinkOptions);
}
break;
} catch (e: any) {
if (e.message.includes('RATELIMIT')) {
// Looks like you've been doing that a lot. Take a break for 5 seconds before trying again
await sleep(5000);
error = e;
} else {
throw e;
}
}
}
if (error !== undefined) {
throw error;
}
// @ts-ignore
return post;
}
protected getSpecificPremise(): object {
return {
content: this.content,
lock: this.lock,
sticky: this.sticky,
spoiler: this.spoiler,
distinguish: this.distinguish,
nsfw: this.nsfw,
flairId: this.flairId,
flairText: this.flairText,
url: this.url,
text: this.content !== undefined ? truncateStringToLength(50)(this.content) : undefined,
footer: this.footer,
targets: this.targets,
}
}
}
export interface SubmissionActionConfig extends RichContent, Footer {
/**
* Lock the Submission after creation?
* */
lock?: boolean,
/**
* Sticky the Submission after creation?
* */
sticky?: boolean,
nsfw?: boolean
spoiler?: boolean
/**
* The title of this Submission.
*
* Templated the same as **content**
* */
title: string
/**
* If Submission should be a Link, the URL to use
*
* Templated the same as **content**
*
* PROTIP: To make a Link Submission pointing to the Activity being processed use `{{item.permalink}}` as the URL value
* */
url?: string
/**
* Flair template to apply to this Submission
* */
flairId?: string
/**
* Flair text to apply to this Submission
* */
flairText?: string
/**
* Distinguish as Mod after creation?
* */
distinguish?: boolean
/**
* Specify where this Submission should be made
*
* Valid values: 'self' | [subreddit]
*
* * 'self' -- DEFAULT. Post Submission to same subreddit of Activity being processed
* * [subreddit] -- The name of a subreddit to post Submission to. EX mealtimevideos
* */
targets?: 'self' | string
}
export interface SubmissionActionOptions extends SubmissionActionConfig, ActionOptions {
}
/**
* Reply to the Activity. For a submission the reply will be a top-level comment.
* */
export interface SubmissionActionJson extends SubmissionActionConfig, ActionJson {
kind: 'submission'
}

View File

@@ -7,12 +7,12 @@ import Submission from "snoowrap/dist/objects/Submission";
import {ActionProcessResult, RuleResult} from "../Common/interfaces";
import {RuleResultEntity} from "../Common/Entities/RuleResultEntity";
import {runCheckOptions} from "../Subreddit/Manager";
import {ActionTypes} from "../Common/Infrastructure/Atomic";
import {ActionTypes, UserNoteType} from "../Common/Infrastructure/Atomic";
export class UserNoteAction extends Action {
content: string;
type: string;
type: UserNoteType;
allowDuplicate: boolean;
constructor(options: UserNoteActionOptions) {

View File

@@ -4,24 +4,27 @@ import {getLogger} from "./Utils/loggerFactory";
import {DatabaseMigrationOptions, OperatorConfig, OperatorConfigWithFileContext, OperatorFileConfig} from "./Common/interfaces";
import Bot from "./Bot";
import LoggedError from "./Utils/LoggedError";
import {mergeArr, sleep} from "./util";
import {copyFile} from "fs/promises";
import {generateRandomName, mergeArr, sleep} from "./util";
import {copyFile, open} from "fs/promises";
import {constants} from "fs";
import {Connection} from "typeorm";
import {Connection, DataSource, Repository} from "typeorm";
import {ErrorWithCause} from "pony-cause";
import {MigrationService} from "./Common/MigrationService";
import {Invokee} from "./Common/Infrastructure/Atomic";
import {DatabaseConfig} from "./Common/Infrastructure/Database";
import {InviteData} from "./Web/Common/interfaces";
import {BotInvite} from "./Common/Entities/BotInvite";
export class App {
bots: Bot[] = [];
logger: Logger;
dbLogger: Logger;
database: Connection
database: DataSource
startedAt: Dayjs = dayjs();
ranMigrations: boolean = false;
migrationBlocker?: string;
friendly?: string;
config: OperatorConfig;
@@ -30,6 +33,7 @@ export class App {
fileConfig: OperatorFileConfig;
migrationService: MigrationService;
inviteRepo: Repository<BotInvite>;
constructor(config: OperatorConfigWithFileContext) {
const {
@@ -49,6 +53,8 @@ export class App {
this.logger = getLogger(config.logging);
this.dbLogger = this.logger.child({labels: ['Database']}, mergeArr);
this.database = database;
this.inviteRepo = this.database.getRepository(BotInvite);
this.friendly = this.config.api.friendly;
this.logger.info(`Operators: ${name.length === 0 ? 'None Specified' : name.join(', ')}`)
@@ -114,6 +120,8 @@ export class App {
return;
}
await this.checkFriendlyName();
if(this.bots.length > 0) {
this.logger.info('Bots already exist, will stop and destroy these before building new ones.');
await this.destroy(causedBy);
@@ -133,7 +141,7 @@ export class App {
for (const b of this.bots) {
if (b.error === undefined) {
try {
await b.testClient();
await b.init();
await b.buildManagers();
await sleep(2000);
b.runManagers(causedBy).catch((err) => {
@@ -161,4 +169,54 @@ export class App {
await b.destroy(causedBy);
}
}
async checkFriendlyName() {
if(this.friendly === undefined) {
let randFriendly: string = generateRandomName();
this.logger.verbose(`No friendly name set for Server. Generated: ${randFriendly}`);
const exists = async (name: string) => {
const existing = await this.inviteRepo.findBy({instance: name});
return existing.length > 0;
}
while (await exists(randFriendly)) {
let oldFriendly = randFriendly;
randFriendly = generateRandomName();
this.logger.verbose(`${oldFriendly} already exists! Generated: ${randFriendly}`);
}
this.friendly = randFriendly;
this.fileConfig.document.setFriendlyName(this.friendly);
const handle = await open(this.fileConfig.document.location as string, 'w');
await handle.writeFile(this.fileConfig.document.toString());
await handle.close();
this.logger.verbose(`Wrote ${randFriendly} as friendly server name to config.`);
}
}
async getInviteById(id: string): Promise<BotInvite | undefined> {
const invite = await this.inviteRepo.findOne({where: {id, instance: this.friendly}});
if(invite === null) {
return undefined;
}
return invite;
}
async getInviteIds(): Promise<string[]> {
if(!this.ranMigrations) {
// not ready!
return [];
}
const invites = await this.inviteRepo.findBy({instance: this.friendly});
return invites.map(x => x.id);
}
async addInvite(data: InviteData): Promise<InviteData> {
return await this.inviteRepo.save(new BotInvite(data));
}
async deleteInvite(id: string): Promise<void> {
await this.inviteRepo.delete({ id });
}
}

View File

@@ -13,13 +13,13 @@ import {
USER
} from "../Common/interfaces";
import {
createRetryHandler, difference,
createRetryHandler, symmetricalDifference,
formatNumber, getExceptionMessage, getUserAgent,
mergeArr,
parseBool,
parseDuration, parseMatchMessage, parseRedditEntity,
parseSubredditName, RetryOptions,
sleep
parseSubredditName, partition, RetryOptions,
sleep, intersect
} from "../util";
import {Manager} from "../Subreddit/Manager";
import {ExtendedSnoowrap, ProxiedSnoowrap} from "../Utils/SnoowrapClients";
@@ -41,14 +41,21 @@ import {ManagerRunState} from "../Common/Entities/EntityRunState/ManagerRunState
import {Invokee, PollOn} from "../Common/Infrastructure/Atomic";
import {FilterCriteriaDefaults} from "../Common/Infrastructure/Filters/FilterShapes";
import {snooLogWrapper} from "../Utils/loggerFactory";
import {InfluxClient} from "../Common/Influx/InfluxClient";
import {Point} from "@influxdata/influxdb-client";
import {BotInstanceFunctions, NormalizedManagerResponse} from "../Web/Common/interfaces";
import {AuthorEntity} from "../Common/Entities/AuthorEntity";
import {Guest, GuestEntityData} from "../Common/Entities/Guest/GuestInterfaces";
import {guestEntitiesToAll, guestEntityToApiGuest} from "../Common/Entities/Guest/GuestEntity";
class Bot {
class Bot implements BotInstanceFunctions {
client!: ExtendedSnoowrap;
logger!: Logger;
logs: LogInfo[] = [];
wikiLocation: string;
dryRun?: true | undefined;
inited: boolean = false;
running: boolean = false;
subreddits: string[];
excludeSubreddits: string[];
@@ -71,6 +78,7 @@ class Bot {
botName?: string;
botLink?: string;
botAccount?: string;
botUser?: RedditUser;
maxWorkers: number;
startedAt: Dayjs = dayjs();
sharedStreams: PollOn[] = [];
@@ -90,9 +98,13 @@ class Bot {
config: BotInstanceConfig;
influxClients: InfluxClient[] = [];
database: DataSource
invokeeRepo: Repository<InvokeeType>;
runTypeRepo: Repository<RunStateType>;
managerRepo: Repository<ManagerEntity>;
authorRepo: Repository<AuthorEntity>;
botEntity!: BotEntity
getBotName = () => {
@@ -154,6 +166,8 @@ class Bot {
this.database = database;
this.invokeeRepo = this.database.getRepository(InvokeeType);
this.runTypeRepo = this.database.getRepository(RunStateType);
this.managerRepo = this.database.getRepository(ManagerEntity);
this.authorRepo = this.database.getRepository(AuthorEntity);
this.config = config;
this.dryRun = parseBool(dryRun) === true ? true : undefined;
this.softLimit = softLimit;
@@ -177,8 +191,11 @@ class Bot {
this.logger.stream().on('log', (log: LogInfo) => {
if(log.bot !== undefined && log.bot === this.getBotName() && log.subreddit === undefined) {
const combinedLogs = [log, ...this.logs];
this.logs = combinedLogs.slice(0, 301);
this.logs.unshift(log);
if(this.logs.length > 300) {
// remove all elements starting from the 300th index (301st item)
this.logs.splice(300);
}
}
});
@@ -311,33 +328,22 @@ class Bot {
}
}
async testClient(initial = true) {
try {
// @ts-ignore
await this.client.getMe();
this.logger.info('Test API call successful');
} catch (err: any) {
if (initial) {
this.logger.error('An error occurred while trying to initialize the Reddit API Client which would prevent the entire application from running.');
}
const hint = getExceptionMessage(err, {
401: 'Likely a credential is missing or incorrect. Check clientId, clientSecret, refreshToken, and accessToken',
400: 'Credentials may have been invalidated manually or by reddit due to behavior',
});
let msg = `Error occurred while testing Reddit API client${hint !== undefined ? `: ${hint}` : ''}`;
this.error = msg;
const clientError = new CMError(msg, {cause: err});
clientError.logged = true;
this.logger.error(clientError);
throw clientError;
}
}
async init() {
if(this.inited) {
return;
}
let user: RedditUser;
try {
user = await this.testClient();
} catch(err: any) {
this.logger.error('An error occurred while trying to initialize the Reddit API Client which would prevent the Bot from running.');
throw err;
}
async buildManagers(subreddits: string[] = []) {
let availSubs = [];
// @ts-ignore
const user = await this.client.getMe().fetch();
this.cacheManager.botName = user.name;
this.botUser = user;
this.botLink = `https://reddit.com/user/${user.name}`;
this.botAccount = `u/${user.name}`;
this.logger.info(`Reddit API Limit Remaining: ${this.client.ratelimitRemaining}`);
@@ -364,35 +370,78 @@ class Bot {
this.botEntity = b;
}
if(this.config.opInflux !== undefined) {
this.influxClients.push(this.config.opInflux.childClient(this.logger, {bot: user.name}));
if(this.config.influxConfig !== undefined) {
const iClient = new InfluxClient(this.config.influxConfig, this.logger, {bot: user.name});
await iClient.isReady();
this.influxClients.push(iClient);
}
}
this.inited = true;
}
// @ts-ignore
async testClient(initial = true) {
try {
// @ts-ignore
const user = this.client.getMe().fetch();
this.logger.info('Test API call successful');
return user;
} catch (err: any) {
if (initial) {
this.logger.error('An error occurred while trying to initialize the Reddit API Client which would prevent the entire application from running.');
}
const hint = getExceptionMessage(err, {
401: 'Likely a credential is missing or incorrect. Check clientId, clientSecret, refreshToken, and accessToken',
400: 'Credentials may have been invalidated manually or by reddit due to behavior',
});
let msg = `Error occurred while testing Reddit API client${hint !== undefined ? `: ${hint}` : ''}`;
this.error = msg;
const clientError = new CMError(msg, {cause: err});
clientError.logged = true;
this.logger.error(clientError);
throw clientError;
}
}
async buildManagers(subreddits: string[] = []) {
await this.init();
this.logger.verbose('Syncing subreddits to moderate with managers...');
let availSubs: Subreddit[] = [];
let subListing = await this.client.getModeratedSubreddits({count: 100});
while(!subListing.isFinished) {
subListing = await subListing.fetchMore({amount: 100});
}
availSubs = subListing.filter(x => x.display_name !== `u_${user.name}`);
availSubs = subListing.filter(x => x.display_name !== `u_${this.botUser?.name}`);
this.logger.info(`u/${user.name} is a moderator of these subreddits: ${availSubs.map(x => x.display_name_prefixed).join(', ')}`);
this.logger.verbose(`${this.botAccount} is a moderator of these subreddits: ${availSubs.map(x => x.display_name_prefixed).join(', ')}`);
let subsToRun: Subreddit[] = [];
const subsToUse = subreddits.length > 0 ? subreddits.map(parseSubredditName) : this.subreddits;
if (subsToUse.length > 0) {
this.logger.info(`Operator-defined subreddit constraints detected (CLI argument or environmental variable), will try to run on: ${subsToUse.join(', ')}`);
for (const sub of subsToUse) {
this.logger.info(`Operator-specified subreddit constraints detected, will only use these: ${subsToUse.join(', ')}`);
const availSubsCI = availSubs.map(x => x.display_name.toLowerCase());
const [foundSubs, notFoundSubs] = partition(subsToUse, (aSub) => availSubsCI.includes(aSub.toLowerCase()));
if(notFoundSubs.length > 0) {
this.logger.warn(`Will not run some operator-specified subreddits because they are not modded by, or do not have appropriate mod permissions for, this bot: ${notFoundSubs.join(', ')}`);
}
for (const sub of foundSubs) {
const asub = availSubs.find(x => x.display_name.toLowerCase() === sub.toLowerCase())
if (asub === undefined) {
this.logger.warn(`Will not run on ${sub} because is not modded by, or does not have appropriate permissions to mod with, for this client.`);
} else {
// @ts-ignore
const fetchedSub = await asub.fetch();
subsToRun.push(fetchedSub);
}
subsToRun.push(asub as Subreddit);
}
} else {
if(this.excludeSubreddits.length > 0) {
this.logger.info(`Will run on all moderated subreddits but own profile and user-defined excluded: ${this.excludeSubreddits.join(', ')}`);
this.logger.info(`Will run on all moderated subreddits EXCEPT own profile and operator-defined excluded: ${this.excludeSubreddits.join(', ')}`);
const normalExcludes = this.excludeSubreddits.map(x => x.toLowerCase());
subsToRun = availSubs.filter(x => !normalExcludes.includes(x.display_name.toLowerCase()));
} else {
this.logger.info(`No user-defined subreddit constraints detected, will run on all moderated subreddits EXCEPT own profile (${this.botAccount})`);
this.logger.info(`No operator-defined subreddit constraints detected, will run on all moderated subreddits EXCEPT own profile (${this.botAccount})`);
subsToRun = availSubs;
}
}
@@ -415,30 +464,66 @@ class Bot {
return acc;
}
}, []);
const notMatched = difference(normalizedOverrideNames, subsToRunNames);
const notMatched = symmetricalDifference(normalizedOverrideNames, subsToRunNames);
if(notMatched.length > 0) {
this.logger.warn(`There are overrides defined for subreddits the bot is not running. Check your spelling! Overrides not matched: ${notMatched.join(', ')}`);
}
}
let subManagersChanged = false;
// get configs for subs we want to run on and build/validate them
const subsToRunNames = subsToRun.map(x => x.display_name.toLowerCase());
// first stop and remove any managers with subreddits not in subsToRun
// -- this covers scenario where bot is running and mods of a subreddit de-mod the bot
// -- or where the include/exclude subs list changed from operator (not yet implemented)
if(this.subManagers.length > 0) {
let index = 0;
for(const manager of this.subManagers) {
if(!subsToRunNames.includes(manager.subreddit.display_name.toLowerCase())) {
subManagersChanged = true;
// determine if bot was de-modded
const deModded = !availSubs.some(x => x.display_name.toLowerCase() === manager.subreddit.display_name.toLowerCase());
this.logger.warn(`Stopping and removing manager for ${manager.subreddit.display_name.toLowerCase()} because it is ${deModded ? 'no longer moderated by this bot' : 'not in the list of subreddits to moderate'}`);
await manager.destroy('system', {reason: deModded ? 'No longer moderated by this bot' : 'Subreddit is not in moderated list'});
this.subManagers.splice(index, 1);
}
index++;
}
}
// then create any managers that don't already exist
// -- covers init scenario
// -- and in-situ adding subreddits IE bot is modded to a new subreddit while CM is running
const subsToInit: string[] = [];
for (const sub of subsToRun) {
try {
this.subManagers.push(await this.createManager(sub));
} catch (err: any) {
if(!this.subManagers.some(x => x.subreddit.display_name === sub.display_name)) {
subManagersChanged = true;
this.logger.info(`Manager for ${sub.display_name_prefixed} not found in existing managers. Creating now...`);
subsToInit.push(sub.display_name);
try {
this.subManagers.push(await this.createManager(sub));
} catch (err: any) {
}
}
}
for(const m of this.subManagers) {
for(const subName of subsToInit) {
try {
await this.initManager(m);
const m = this.subManagers.find(x => x.subreddit.display_name === subName);
await this.initManager(m as Manager);
} catch (err: any) {
}
}
this.parseSharedStreams();
if(!subManagersChanged) {
this.logger.verbose('All managers were already synced!');
} else {
this.parseSharedStreams();
}
return subManagersChanged;
}
parseSharedStreams() {
@@ -559,7 +644,7 @@ class Bot {
}
}
async createManager(sub: Subreddit): Promise<Manager> {
async createManager(subVal: Subreddit): Promise<Manager> {
const {
flowControlDefaults: {
maxGotoDepth: botMaxDefault
@@ -570,6 +655,15 @@ class Bot {
} = {}
} = this.config;
let sub = subVal;
// make sure the subreddit is fully fetched
// @ts-ignore
if(subVal._hasFetched === false) {
// @ts-ignore
sub = await subVal.fetch();
}
const override = overrides.find(x => {
const configName = parseRedditEntity(x.name).name;
if(configName !== undefined) {
@@ -586,15 +680,15 @@ class Bot {
databaseConfig: {
retention = undefined
} = {},
wikiConfig = this.wikiLocation,
} = override || {};
const managerRepo = this.database.getRepository(ManagerEntity);
const subRepo = this.database.getRepository(SubredditEntity)
let subreddit = await subRepo.findOne({where: {id: sub.name}});
if(subreddit === null) {
subreddit = await subRepo.save(new SubredditEntity({id: sub.name, name: sub.display_name}))
}
let managerEntity = await managerRepo.findOne({
let managerEntity = await this.managerRepo.findOne({
where: {
bot: {
id: this.botEntity.id
@@ -603,12 +697,15 @@ class Bot {
id: subreddit.id
}
},
relations: {
guests: true
}
});
if(managerEntity === undefined || managerEntity === null) {
const invokee = await this.invokeeRepo.findOneBy({name: SYSTEM}) as InvokeeType;
const runType = await this.runTypeRepo.findOneBy({name: STOPPED}) as RunStateType;
managerEntity = await managerRepo.save(new ManagerEntity({
managerEntity = await this.managerRepo.save(new ManagerEntity({
name: sub.display_name,
bot: this.botEntity,
subreddit: subreddit as SubredditEntity,
@@ -621,7 +718,7 @@ class Bot {
const manager = new Manager(sub, this.client, this.logger, this.cacheManager, {
dryRun: this.dryRun,
sharedStreams: this.sharedStreams,
wikiLocation: this.wikiLocation,
wikiLocation: wikiConfig,
botName: this.botName as string,
maxWorkers: this.maxWorkers,
filterCriteriaDefaults: this.filterCriteriaDefaults,
@@ -630,6 +727,7 @@ class Bot {
managerEntity: managerEntity as ManagerEntity,
statDefaults: (statDefaultsFromOverride ?? databaseStatisticsDefaults) as DatabaseStatisticsOperatorConfig,
retention,
influxClients: this.influxClients,
});
// all errors from managers will count towards bot-level retry count
manager.on('error', async (err) => await this.panicOnRetries(err));
@@ -669,21 +767,6 @@ class Bot {
await this.client.getSubreddit(name).acceptModeratorInvite();
this.logger.info(`Accepted moderator invite for r/${name}!`);
await this.cacheManager.deletePendingSubredditInvite(name);
// @ts-ignore
const sub = await this.client.getSubreddit(name);
this.logger.info(`Attempting to add manager for r/${name}`);
try {
const manager = await this.createManager(sub);
this.logger.info(`Starting manager for r/${name}`);
this.subManagers.push(manager);
await this.initManager(manager);
await manager.start('system', {reason: 'Caused by creation due to moderator invite'});
await this.runSharedStreams();
} catch (err: any) {
if (!(err instanceof LoggedError)) {
this.logger.error(err);
}
}
} catch (err: any) {
if (err.message.includes('NO_INVITE_FOUND')) {
this.logger.warn(`No pending moderation invite for r/${name} was found`);
@@ -742,9 +825,15 @@ class Bot {
async healthLoop() {
while (this.running) {
await sleep(5000);
const time = dayjs().valueOf()
await this.apiHealthCheck(time);
await this.guestModCleanup();
if (!this.running) {
break;
}
for(const m of this.subManagers) {
await m.writeHealthMetrics(time);
}
const now = dayjs();
if (now.isSameOrAfter(this.nextNannyCheck)) {
try {
@@ -757,8 +846,17 @@ class Bot {
}
if(now.isSameOrAfter(this.nextHeartbeat)) {
try {
await this.heartbeat();
// run sanity check to see if there is a service issue
try {
await this.testClient(false);
} catch (err: any) {
throw new SimpleError(`Something isn't right! This could be a Reddit API issue (service is down? buggy??) or an issue with the Bot account. Will not run heartbeat operations and will wait until next heartbeat (${dayjs.duration(this.nextHeartbeat.diff(dayjs())).humanize()}) to try again`);
}
await this.checkModInvites();
await this.buildManagers();
await this.heartbeat();
} catch (err: any) {
this.logger.error(`Error occurred during heartbeat check: ${err.message}`);
}
@@ -770,6 +868,73 @@ class Bot {
this.emitter.emit('healthStopped');
}
getApiUsageSummary() {
const depletion = this.apiEstDepletion === undefined ? 'Not Calculated' : this.apiEstDepletion.humanize();
return`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${depletion} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`;
}
async apiHealthCheck(time?: number) {
const rollingSample = this.apiSample.slice(0, 7)
rollingSample.unshift(this.client.ratelimitRemaining);
this.apiSample = rollingSample;
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
if (this.apiSample[index + 1] !== undefined) {
const d = Math.abs(curr - this.apiSample[index + 1]);
if (d === 0) {
return [...acc, 0];
}
return [...acc, d / 10];
}
return acc;
}, []);
const diffTotal = diff.reduce((acc, curr) => acc + curr, 0);
if(diffTotal === 0 || diff.length === 0) {
this.apiRollingAvg = 0;
} else {
this.apiRollingAvg = diffTotal / diff.length; // api requests per second
}
this.depletedInSecs = this.apiRollingAvg === 0 ? Number.POSITIVE_INFINITY : this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
// if depletion/api usage is 0 we need a sane value to use here for both displaying in logs as well as for api nanny. 10 years seems reasonable
this.apiEstDepletion = dayjs.duration((this.depletedInSecs === Number.POSITIVE_INFINITY ? {years: 10} : {seconds: this.depletedInSecs}));
if(this.influxClients.length > 0) {
const apiMeasure = new Point('apiHealth')
.intField('remaining', this.client.ratelimitRemaining)
.stringField('nannyMod', this.nannyMode ?? 'none');
if(time !== undefined) {
apiMeasure.timestamp(time);
}
if(this.apiSample.length > 1) {
const curr = this.apiSample[0];
const last = this.apiSample[1];
if(curr <= last) {
apiMeasure.intField('used', last - curr);
}
}
for(const iclient of this.influxClients) {
await iclient.writePoint(apiMeasure);
}
}
}
async guestModCleanup() {
const now = dayjs();
for(const m of this.subManagers) {
const expiredGuests = m.managerEntity.getGuests().filter(x => x.expiresAt.isBefore(now));
if(expiredGuests.length > 0) {
m.managerEntity.removeGuestById(expiredGuests.map(x => x.id));
m.logger.info(`Removed expired Guest Mods: ${expiredGuests.map(x => x.author.name).join(', ')}`);
await this.managerRepo.save(m.managerEntity);
}
}
}
async retentionCleanup() {
const now = dayjs();
if(now.isSameOrAfter(this.nextRetentionCheck)) {
@@ -783,15 +948,8 @@ class Bot {
}
async heartbeat() {
const heartbeat = `HEARTBEAT -- API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ~${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion === undefined ? 'N/A' : this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`
this.logger.info(heartbeat);
this.logger.info(`HEARTBEAT -- ${this.getApiUsageSummary()}`);
// run sanity check to see if there is a service issue
try {
await this.testClient(false);
} catch (err: any) {
throw new SimpleError(`Something isn't right! This could be a Reddit API issue (service is down? buggy??) or an issue with the Bot account. Will not run heartbeat operations and will wait until next heartbeat (${dayjs.duration(this.nextHeartbeat.diff(dayjs())).humanize()}) to try again`);
}
let startedAny = false;
for (const s of this.subManagers) {
@@ -844,6 +1002,7 @@ class Bot {
async runApiNanny() {
try {
this.logger.debug(this.getApiUsageSummary());
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
const nowish = dayjs().add(10, 'second');
if (nowish.isAfter(this.nextExpiration)) {
@@ -867,30 +1026,12 @@ class Bot {
}
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
}
const rollingSample = this.apiSample.slice(0, 7)
rollingSample.unshift(this.client.ratelimitRemaining);
this.apiSample = rollingSample;
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
if (this.apiSample[index + 1] !== undefined) {
const d = Math.abs(curr - this.apiSample[index + 1]);
if (d === 0) {
return [...acc, 0];
}
return [...acc, d / 10];
}
return acc;
}, []);
this.apiRollingAvg = diff.reduce((acc, curr) => acc + curr, 0) / diff.length; // api requests per second
this.depletedInSecs = this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
this.apiEstDepletion = dayjs.duration({seconds: this.depletedInSecs});
this.logger.debug(`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`);
let hardLimitHit = false;
if (typeof this.hardLimit === 'string') {
if (typeof this.hardLimit === 'string' && this.apiEstDepletion !== undefined) {
const hardDur = parseDuration(this.hardLimit);
hardLimitHit = hardDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
} else if(typeof this.hardLimit === 'number') {
hardLimitHit = this.hardLimit > this.client.ratelimitRemaining;
}
@@ -899,7 +1040,6 @@ class Bot {
return;
}
this.logger.info(`Detected HARD LIMIT of ${this.hardLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${this.apiRollingAvg}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info(`All subreddit event polling has been paused`, {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
@@ -916,10 +1056,10 @@ class Bot {
}
let softLimitHit = false;
if (typeof this.softLimit === 'string') {
if (typeof this.softLimit === 'string' && this.apiEstDepletion !== undefined) {
const softDur = parseDuration(this.softLimit);
softLimitHit = softDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
} else if(typeof this.softLimit === 'number') {
softLimitHit = this.softLimit > this.client.ratelimitRemaining;
}
@@ -928,7 +1068,6 @@ class Bot {
return;
}
this.logger.info(`Detected SOFT LIMIT of ${this.softLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info('Trying to detect heavy usage subreddits...', {leaf: 'Api Nanny'});
let threshold = 0.5;
let offenders = this.subManagers.filter(x => {
@@ -986,6 +1125,117 @@ class Bot {
throw err;
}
}
getManagerNames(): string[] {
return this.subManagers.map(x => x.displayLabel);
}
getSubreddits(normalized = true): string[] {
return normalized ? this.subManagers.map(x => parseRedditEntity(x.subreddit.display_name).name) : this.subManagers.map(x => x.subreddit.display_name);
}
getGuestManagers(user: string): NormalizedManagerResponse[] {
return this.subManagers.filter(x => x.managerEntity.getGuests().map(y => y.author.name).includes(user)).map(x => x.toNormalizedManager());
}
getGuestSubreddits(user: string): string[] {
return this.getGuestManagers(user).map(x => x.subredditNormal);
}
getAccessibleSubreddits(user: string, subreddits: string[] = []): string[] {
const normalSubs = subreddits.map(x => parseRedditEntity(x).name);
const moderatedSubs = intersect(normalSubs, this.getSubreddits());
const guestSubs = this.getGuestSubreddits(user);
return Array.from(new Set([...guestSubs, ...moderatedSubs]));
}
canUserAccessBot(user: string, subreddits: string[] = []) {
return this.getAccessibleSubreddits(user, subreddits).length > 0;
}
canUserAccessSubreddit(subreddit: string, user: string, subreddits: string[] = []): boolean {
return this.getAccessibleSubreddits(user, subreddits).includes(parseRedditEntity(subreddit).name);
}
async addGuest(userVal: string | string[], expiresAt: Dayjs, managerVal?: string | string[]) {
let managerNames: string[];
if(typeof managerVal === 'string') {
managerNames = [managerVal];
} else if(Array.isArray(managerVal)) {
managerNames = managerVal;
} else {
managerNames = this.subManagers.map(x => x.subreddit.display_name);
}
const cleanSubredditNames = managerNames.map(x => parseRedditEntity(x).name);
const userNames = typeof userVal === 'string' ? [userVal] : userVal;
const cleanUsers = userNames.map(x => parseRedditEntity(x.trim(), 'user').name);
const users: AuthorEntity[] = [];
for(const uName of cleanUsers) {
let user = await this.authorRepo.findOne({
where: {
name: uName,
}
});
if(user === null) {
users.push(await this.authorRepo.save(new AuthorEntity({name: uName})));
} else {
users.push(user);
}
}
const newGuestData = users.map(x => ({author: x, expiresAt})) as GuestEntityData[];
let newGuests = new Map<string, Guest[]>();
const updatedManagerEntities: ManagerEntity[] = [];
for(const m of this.subManagers) {
if(!cleanSubredditNames.includes(m.subreddit.display_name)) {
continue;
}
const filteredGuests = m.managerEntity.addGuest(newGuestData);
updatedManagerEntities.push(m.managerEntity);
newGuests.set(m.displayLabel, filteredGuests.map(x => guestEntityToApiGuest(x)));
m.logger.info(`Added ${cleanUsers.join(', ')} as Guest`);
}
await this.managerRepo.save(updatedManagerEntities);
return newGuests;
}
async removeGuest(userVal: string | string[], managerVal?: string | string[]) {
let managerNames: string[];
if(typeof managerVal === 'string') {
managerNames = [managerVal];
} else if(Array.isArray(managerVal)) {
managerNames = managerVal;
} else {
managerNames = this.subManagers.map(x => x.subreddit.display_name);
}
const cleanSubredditNames = managerNames.map(x => parseRedditEntity(x).name);
const userNames = typeof userVal === 'string' ? [userVal] : userVal;
const cleanUsers = userNames.map(x => parseRedditEntity(x.trim(), 'user').name);
let newGuests = new Map<string, Guest[]>();
const updatedManagerEntities: ManagerEntity[] = [];
for(const m of this.subManagers) {
if(!cleanSubredditNames.includes(m.subreddit.display_name)) {
continue;
}
const filteredGuests = m.managerEntity.removeGuestByUser(cleanUsers);
updatedManagerEntities.push(m.managerEntity);
newGuests.set(m.displayLabel, filteredGuests.map(x => guestEntityToApiGuest(x)));
m.logger.info(`Removed ${cleanUsers.join(', ')} from Guests`);
}
await this.managerRepo.save(updatedManagerEntities);
return newGuests;
}
}
export default Bot;

View File

@@ -1,19 +1,18 @@
import {RuleSet, IRuleSet, RuleSetJson, RuleSetObjectJson, isRuleSetJSON} from "../Rule/RuleSet";
import {IRule, Rule, RuleJSONConfig} from "../Rule";
import Action, {ActionConfig, ActionJson, StructuredActionJson} from "../Action";
import {RuleSet, RuleSetConfigData, RuleSetConfigHydratedData, RuleSetConfigObject} from "../Rule/RuleSet";
import {Rule} from "../Rule";
import Action, {ActionConfig} from "../Action";
import {Logger} from "winston";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {Comment, Submission} from "snoowrap";
import {actionFactory} from "../Action/ActionFactory";
import {ruleFactory} from "../Rule/RuleFactory";
import {
asPostBehaviorOptionConfig,
boolToString,
createAjvFactory, determineNewResults,
FAIL, isRuleSetResult,
createAjvFactory,
FAIL,
isRuleSetResult,
mergeArr,
PASS,
resultsSummary,
ruleNamesFromResults,
truncateStringToLength
} from "../util";
import {
@@ -22,19 +21,17 @@ import {
CheckSummary,
JoinCondition,
NotificationEventPayload,
PostBehavior, PostBehaviorOptionConfig, PostBehaviorOptionConfigStrong, PostBehaviorStrong,
RuleResult,
RuleSetResult, UserResultCache
PostBehavior,
PostBehaviorOptionConfigStrong,
PostBehaviorStrong,
RuleSetResult
} from "../Common/interfaces";
import * as RuleSchema from '../Schema/Rule.json';
import * as RuleSetSchema from '../Schema/RuleSet.json';
import * as ActionSchema from '../Schema/Action.json';
import {
ActionJson as ActionTypeJson
} from "../Common/types";
import {SubredditResources} from "../Subreddit/SubredditResources";
import {SubredditResources} from "../Subreddit/SubredditResources";
import {ExtendedSnoowrap} from '../Utils/SnoowrapClients';
import {ActionProcessingError, CheckProcessingError, isRateLimitError} from "../Utils/Errors";
import {ActionProcessingError, CheckProcessingError} from "../Utils/Errors";
import {ErrorWithCause, stackWithCauses} from "pony-cause";
import {runCheckOptions} from "../Subreddit/Manager";
import EventEmitter from "events";
@@ -47,24 +44,28 @@ import {RunnableBase} from "../Common/RunnableBase";
import {ActionResultEntity} from "../Common/Entities/ActionResultEntity";
import {RuleSetResultEntity} from "../Common/Entities/RuleSetResultEntity";
import {CheckToRuleResultEntity} from "../Common/Entities/RunnableAssociation/CheckToRuleResultEntity";
import {
JoinOperands,
PostBehaviorType,
RecordOutputType,
recordOutputTypes
} from "../Common/Infrastructure/Atomic";
import {
MinimalOrFullFilter,
MinimalOrFullFilterJson
} from "../Common/Infrastructure/Filters/FilterShapes";
import {
CommentState,
SubmissionState,
} from "../Common/Infrastructure/Filters/FilterCriteria";
import {JoinOperands, PostBehaviorType, RecordOutputType, recordOutputTypes} from "../Common/Infrastructure/Atomic";
import {CommentState, SubmissionState,} from "../Common/Infrastructure/Filters/FilterCriteria";
import {ActivityType} from "../Common/Infrastructure/Reddit";
import {RunnableBaseJson, RunnableBaseOptions, StructuredRunnableBase} from "../Common/Infrastructure/Runnable";
import {RuleJson, StructuredRuleObjectJson, StructuredRuleSetObjectJson} from "../Common/Infrastructure/RuleShapes";
import {ActionObjectJson, StructuredActionObjectJson} from "../Common/Infrastructure/ActionShapes";
import {
RunnableBaseJson,
RunnableBaseOptions,
StructuredRunnableBase,
TypedRunnableBaseData, TypedStructuredRunnableBase
} from "../Common/Infrastructure/Runnable";
import {
RuleConfigData, RuleConfigHydratedData,
RuleConfigObject,
StructuredRuleConfigObject,
StructuredRuleSetConfigObject
} from "../Common/Infrastructure/RuleShapes";
import {
ActionConfigData,
ActionConfigHydratedData,
ActionConfigObject,
StructuredActionObjectJson
} from "../Common/Infrastructure/ActionShapes";
import {IncludesData} from "../Common/Infrastructure/Includes";
const checkLogName = truncateStringToLength(25);
@@ -155,7 +156,7 @@ export abstract class Check extends RunnableBase implements Omit<ICheck, 'postTr
if(asPostBehaviorOptionConfig(postFail)) {
const {
behavior = 'next',
recordTo = false
recordTo = ['influx']
} = postFail;
let recordStrong: RecordOutputType[] = [];
if(typeof recordTo === 'boolean') {
@@ -174,7 +175,7 @@ export abstract class Check extends RunnableBase implements Omit<ICheck, 'postTr
} else {
this.postFail = {
behavior: postFail,
recordTo: []
recordTo: ['influx']
}
}
@@ -192,12 +193,12 @@ export abstract class Check extends RunnableBase implements Omit<ICheck, 'postTr
let ruleErrors: any = [];
if (valid) {
const ruleConfig = r;
this.rules.push(new RuleSet({...ruleConfig as StructuredRuleSetObjectJson, logger: this.logger, subredditName, resources: this.resources, client: this.client}));
this.rules.push(new RuleSet({...ruleConfig as StructuredRuleSetConfigObject, logger: this.logger, subredditName, resources: this.resources, client: this.client}));
} else {
setErrors = ajv.errors;
valid = ajv.validate(RuleSchema, r);
if (valid) {
this.rules.push(ruleFactory(r as StructuredRuleObjectJson, this.logger, subredditName, this.resources, this.client));
this.rules.push(ruleFactory(r as StructuredRuleConfigObject, this.logger, subredditName, this.resources, this.client));
} else {
ruleErrors = ajv.errors;
const leastErrorType = setErrors.length < ruleErrors ? 'RuleSet' : 'Rule';
@@ -605,7 +606,7 @@ export interface ICheck extends JoinCondition, PostBehavior, RunnableBaseJson {
}
export interface CheckOptions extends Omit<ICheck, 'authorIs' | 'itemIs'>, RunnableBaseOptions {
rules: Array<StructuredRuleSetObjectJson | StructuredRuleObjectJson>;
rules: Array<RuleConfigObject | RuleSetConfigObject>;
actions: ActionConfig[];
logger: Logger;
subredditName: string;
@@ -616,7 +617,15 @@ export interface CheckOptions extends Omit<ICheck, 'authorIs' | 'itemIs'>, Runna
emitter: EventEmitter
}
export interface CheckJson extends ICheck {
/*
* Can contain actions/rules as:
* - full objects
* - string to hydrate IE "url:fsdfd"
* - named string IE "namedRule"
*
* Also can contain itemIs/authorIs as full object or named filter
* */
export interface CheckConfigData extends ICheck, RunnableBaseJson {
/**
* The type of event (new submission or new comment) this check should be run against
* @examples ["submission", "comment"]
@@ -631,7 +640,7 @@ export interface CheckJson extends ICheck {
*
* **If `rules` is an empty array or not present then `actions` are performed immediately.**
* */
rules?: Array<RuleSetJson | RuleJson>
rules?: (RuleSetConfigData | RuleConfigData | string | IncludesData)[]
/**
* The `Actions` to run after the check is successfully triggered. ALL `Actions` will run in the order they are listed
*
@@ -639,7 +648,7 @@ export interface CheckJson extends ICheck {
*
* @examples [[{"kind": "comment", "content": "this is the content of the comment", "distinguish": true}, {"kind": "lock"}]]
* */
actions?: Array<ActionTypeJson>
actions?: ActionConfigData[]
/**
* If notifications are configured and this is `true` then an `eventActioned` event will be sent when this check is triggered.
@@ -651,9 +660,49 @@ export interface CheckJson extends ICheck {
cacheUserResult?: UserResultCacheOptions;
}
export interface SubmissionCheckJson extends CheckJson {
export interface SubmissionCheckConfigData extends CheckConfigData, TypedRunnableBaseData<SubmissionState> {
kind: 'submission'
itemIs?: MinimalOrFullFilterJson<SubmissionState>
}
export interface CommentCheckConfigData extends CheckConfigData, TypedRunnableBaseData<CommentState> {
kind: 'comment'
}
/*
* Can contain actions/rules as:
* - full objects
* - named string IE "namedRule"
*
* Also can contain itemIs/authorIs as full object or named filter
* */
export interface CheckConfigHydratedData extends CheckConfigData {
rules?: (RuleSetConfigHydratedData | RuleConfigHydratedData)[]
actions?: ActionConfigHydratedData[]
}
export interface SubmissionCheckConfigHydratedData extends CheckConfigHydratedData, TypedRunnableBaseData<SubmissionState> {
kind: 'submission'
}
export interface CommentCheckConfigHydratedData extends CheckConfigHydratedData, TypedRunnableBaseData<CommentState> {
kind: 'comment'
}
/*
* All actions/rules/filters should now be full objects
* */
export interface CheckConfigObject extends Omit<CheckConfigHydratedData, 'itemIs' | 'authorIs'>, StructuredRunnableBase {
rules: Array<RuleSetConfigObject | RuleConfigObject>
actions: Array<ActionConfigObject>
}
export interface SubmissionCheckConfigObject extends Omit<CheckConfigObject, 'itemIs' | 'author'>, TypedStructuredRunnableBase<SubmissionState> {
kind: 'submission'
}
export interface CommentCheckConfigObject extends Omit<CheckConfigObject, 'itemIs' | 'author'>, TypedStructuredRunnableBase<CommentState> {
kind: 'comment'
}
/**
@@ -691,33 +740,18 @@ export const userResultCacheDefault: Required<UserResultCacheOptions> = {
runActions: true,
}
export interface CommentCheckJson extends CheckJson {
kind: 'comment'
itemIs?: MinimalOrFullFilterJson<CommentState>
}
export const asStructuredCommentCheckJson = (val: any): val is CommentCheckStructuredJson => {
export const asStructuredCommentCheckJson = (val: any): val is CommentCheckConfigObject => {
return val.kind === 'comment';
}
export const asStructuredSubmissionCheckJson = (val: any): val is SubmissionCheckStructuredJson => {
export const asStructuredSubmissionCheckJson = (val: any): val is SubmissionCheckConfigObject => {
return val.kind === 'submission';
}
export type CheckStructuredJson = SubmissionCheckStructuredJson | CommentCheckStructuredJson;
// export interface CheckStructuredJson extends CheckJson {
// rules: Array<RuleSetObjectJson | RuleObjectJson>
// actions: Array<ActionObjectJson>
// }
export type ActivityCheckConfigValue = string | IncludesData | SubmissionCheckConfigData | CommentCheckConfigData;
export interface SubmissionCheckStructuredJson extends Omit<SubmissionCheckJson, 'authorIs' | 'itemIs' | 'rules'>, StructuredRunnableBase {
rules: Array<StructuredRuleSetObjectJson | StructuredRuleObjectJson>
actions: Array<ActionObjectJson>
itemIs?: MinimalOrFullFilter<SubmissionState>
}
export type ActivityCheckConfigData = Exclude<ActivityCheckConfigValue, IncludesData>;
export interface CommentCheckStructuredJson extends Omit<CommentCheckJson, 'authorIs' | 'itemIs' | 'rules'>, StructuredRunnableBase {
rules: Array<StructuredRuleSetObjectJson | StructuredRuleObjectJson>
actions: Array<ActionObjectJson>
itemIs?: MinimalOrFullFilter<CommentState>
}
export type ActivityCheckConfigHydratedData = SubmissionCheckConfigHydratedData | CommentCheckConfigHydratedData;
export type ActivityCheckObject = SubmissionCheckConfigObject | CommentCheckConfigObject;

View File

@@ -6,4 +6,5 @@ export interface ConfigToObjectOptions {
location?: string,
jsonDocFunc?: (content: string, location?: string) => AbstractConfigDocument<OperatorJsonConfig>,
yamlDocFunc?: (content: string, location?: string) => AbstractConfigDocument<YamlDocument>
allowArrays?: boolean
}

View File

@@ -19,6 +19,7 @@ export const parseFromJsonOrYamlToObject = (content: string, options?: ConfigToO
location,
jsonDocFunc = (content: string, location?: string) => new JsonConfigDocument(content, location),
yamlDocFunc = (content: string, location?: string) => new YamlConfigDocument(content, location),
allowArrays = false,
} = options || {};
try {

View File

@@ -1,11 +1,14 @@
import YamlConfigDocument from "../YamlConfigDocument";
import JsonConfigDocument from "../JsonConfigDocument";
import {YAMLMap, YAMLSeq, Pair, Scalar} from "yaml";
import {BotInstanceJsonConfig, OperatorJsonConfig} from "../../interfaces";
import {BotInstanceJsonConfig, OperatorJsonConfig, WebCredentials} from "../../interfaces";
import {assign} from 'comment-json';
export interface OperatorConfigDocumentInterface {
addBot(botData: BotInstanceJsonConfig): void;
setFriendlyName(name: string): void;
setWebCredentials(data: Required<WebCredentials>): void;
setOperator(name: string): void;
toJS(): OperatorJsonConfig;
}
@@ -29,6 +32,18 @@ export class YamlOperatorConfigDocument extends YamlConfigDocument implements Op
}
}
setFriendlyName(name: string) {
this.parsed.addIn(['api', 'friendly'], name);
}
setWebCredentials(data: Required<WebCredentials>) {
this.parsed.addIn(['web', 'credentials'], data);
}
setOperator(name: string) {
this.parsed.addIn(['operator', 'name'], name);
}
toJS(): OperatorJsonConfig {
return super.toJS();
}
@@ -68,6 +83,23 @@ export class JsonOperatorConfigDocument extends JsonConfigDocument implements Op
}
}
setFriendlyName(name: string) {
const api = this.parsed.api || {};
this.parsed.api = {...api, friendly: name};
}
setWebCredentials(data: Required<WebCredentials>) {
const {
web = {},
} = this.parsed;
this.parsed.web = {...web, credentials: data};
}
setOperator(name: string) {
this.parsed.operator = { name };
}
toJS(): OperatorJsonConfig {
return super.toJS();
}

View File

@@ -1,11 +1,11 @@
import {Entity, Column, PrimaryGeneratedColumn, ManyToOne, PrimaryColumn, OneToMany, OneToOne, Index} from "typeorm";
import {Entity, Column, ManyToOne, PrimaryColumn, OneToMany, Index} from "typeorm";
import {AuthorEntity} from "./AuthorEntity";
import {Subreddit} from "./Subreddit";
import {CMEvent} from "./CMEvent";
import Submission from "snoowrap/dist/objects/Submission";
import {Comment} from "snoowrap";
import {asComment, getActivityAuthorName, parseRedditFullname, redditThingTypeToPrefix} from "../../util";
import {ActivityType} from "../Infrastructure/Reddit";
import {activityReports, ActivityType, Report, SnoowrapActivity} from "../Infrastructure/Reddit";
import {ActivityReport} from "./ActivityReport";
import dayjs, {Dayjs} from "dayjs";
export interface ActivityEntityOptions {
id: string
@@ -15,6 +15,7 @@ export interface ActivityEntityOptions {
permalink: string
author: AuthorEntity
submission?: Activity
reports?: ActivityReport[]
}
@Entity()
@@ -69,6 +70,9 @@ export class Activity {
@OneToMany(type => Activity, obj => obj.submission, {nullable: true})
comments!: Activity[];
@OneToMany(type => ActivityReport, act => act.activity, {cascade: ['insert'], eager: true})
reports: ActivityReport[] | undefined
constructor(data?: ActivityEntityOptions) {
if(data !== undefined) {
this.type = data.type;
@@ -78,10 +82,76 @@ export class Activity {
this.permalink = data.permalink;
this.author = data.author;
this.submission = data.submission;
this.reports = data.reports !== undefined ? data.reports : undefined;
}
}
static fromSnoowrapActivity(subreddit: Subreddit, activity: (Submission | Comment)) {
/**
* @param {SnoowrapActivity} activity
* @param {Dayjs|undefined} lastKnownStateTimestamp Override the last good state (useful when tracked through polling)
* */
syncReports(activity: SnoowrapActivity, lastKnownStateTimestamp?: Dayjs) {
if(activity.num_reports > 0 && (this.reports === undefined || activity.num_reports !== this.reports.length)) {
if(this.reports === undefined) {
this.reports = [];
}
const reports = activityReports(activity);
// match up existing reports
const usedReportEntities: string[] = [];
const unsyncedReports: Report[] = [];
for(const r of reports) {
const matchedEntity = this.reports.find(x => !usedReportEntities.includes(x.id) && x.matchReport(r));
if(matchedEntity !== undefined) {
usedReportEntities.push(matchedEntity.id);
} else {
// found an unsynced report
unsyncedReports.push(r);
}
}
// ideally we only have one report but it's possible (probable) there are more
//
// to simplify tracking over time we will spread out the "create time" for each report to be between NOW
// and the last recorded report, or if no reports then the create time of the activity
// -- the assumptions about tracking should be good enough for most users because:
// * default poll interval is 30 seconds so even if there are more than one reports in that time the resolution is high enough for accurate usage (most mods will use "> 1 report in 1 minute" or larger timescales)
// * for populating existing reports (CM has not been tracking since activity creation) we don't want to bunch up all reports at the timestamp which could create false positives,
// it's more likely that reports would be spread out than all occurring at the same time.
// TODO additionally, will allow users to specify minimum required granularity to use when filtering by reports over time
let lastRecordedTime = lastKnownStateTimestamp;
if(lastKnownStateTimestamp === undefined) {
lastRecordedTime = this.reports.length > 0 ?
// get the latest create date for existing reports
this.reports.reduce((acc, curr) => curr.createdAt.isAfter(acc) ? curr.createdAt : acc, dayjs('2000-1-1'))
// if no reports then use activity create date
: dayjs(activity.created_utc * 1000);
}
// find the amount of time between now and last good timestamp
const missingTimespan = dayjs.duration(dayjs().diff(lastRecordedTime));
const granularity = Math.floor(missingTimespan.asSeconds());
// each report will have its create date spaced out (mostly) equally between now and the last good timestamp
//
// if only one report stick it in exact middle
// if more than one than decrease span by 1/4 so that we don't end up having reports dead-on the last timestamp
const increment = Math.floor(unsyncedReports.length === 1 ? (granularity / 2) : ((granularity / 1.25) / unsyncedReports.length));
for(let i = 0; i < unsyncedReports.length; i++) {
const r = new ActivityReport({...unsyncedReports[i], activity: this, granularity});
r.createdAt = dayjs().subtract(increment * (i + 1), 'seconds');
this.reports.push(r);
}
return true;
}
return false;
}
static fromSnoowrapActivity(subreddit: Subreddit, activity: SnoowrapActivity, lastKnownStateTimestamp?: dayjs.Dayjs | undefined) {
let submission: Activity | undefined;
let type: ActivityType = 'submission';
let content: string;
@@ -99,7 +169,7 @@ export class Activity {
const author = new AuthorEntity();
author.name = getActivityAuthorName(activity.author);
return new Activity({
const entity = new Activity({
id: activity.name,
subreddit,
type,
@@ -107,6 +177,10 @@ export class Activity {
permalink: activity.permalink,
author,
submission
})
});
entity.syncReports(activity, lastKnownStateTimestamp);
return entity;
}
}

View File

@@ -0,0 +1,57 @@
import {
Entity,
Column,
ManyToOne, JoinColumn, AfterLoad,
} from "typeorm";
import {Activity} from "./Activity";
import {ManagerEntity} from "./ManagerEntity";
import {TimeAwareRandomBaseEntity} from "./Base/TimeAwareRandomBaseEntity";
import {Report, ReportType} from "../Infrastructure/Reddit";
@Entity()
export class ActivityReport extends TimeAwareRandomBaseEntity {
@Column({nullable: false, length: 500})
reason!: string
@Column({nullable: false, length: 20})
type!: ReportType
@Column({nullable: true, length: 100})
author?: string
@Column("int", {nullable: false})
granularity: number = 0;
@ManyToOne(type => Activity, act => act.reports, {cascade: ['update']})
@JoinColumn({name: 'activityId'})
activity!: Activity;
@Column({nullable: false, name: 'activityId'})
activityId!: string
constructor(data?: Report & { activity: Activity, granularity: number }) {
super();
if (data !== undefined) {
this.reason = data.reason;
this.type = data.type;
this.author = data.author;
this.activity = data.activity;
this.activityId = data.activity.id;
this.granularity = data.granularity
}
}
matchReport(report: Report): boolean {
return this.reason === report.reason
&& this.type === report.type
&& this.author === report.author;
}
@AfterLoad()
convertPrimitives() {
if(this.author === null) {
this.author = undefined;
}
}
}

View File

@@ -12,4 +12,10 @@ export class AuthorEntity {
@OneToMany(type => Activity, act => act.author)
activities!: Activity[]
constructor(data?: any) {
if(data !== undefined) {
this.name = data.name;
}
}
}

View File

@@ -1,13 +1,68 @@
import {Entity, Column, PrimaryColumn, OneToMany, PrimaryGeneratedColumn} from "typeorm";
import {ManagerEntity} from "./ManagerEntity";
import {RandomIdBaseEntity} from "./Base/RandomIdBaseEntity";
import {BotGuestEntity, ManagerGuestEntity} from "./Guest/GuestEntity";
import {Guest, GuestEntityData, HasGuests} from "./Guest/GuestInterfaces";
import {SubredditInvite} from "./SubredditInvite";
@Entity()
export class Bot extends RandomIdBaseEntity {
export class Bot extends RandomIdBaseEntity implements HasGuests {
@Column("varchar", {length: 200})
name!: string;
@OneToMany(type => ManagerEntity, obj => obj.bot)
managers!: Promise<ManagerEntity[]>
@OneToMany(type => BotGuestEntity, obj => obj.guestOf, {eager: true, cascade: ['insert', 'remove', 'update']})
guests!: BotGuestEntity[]
@OneToMany(type => SubredditInvite, obj => obj.bot, {eager: true, cascade: ['insert', 'remove', 'update']})
subredditInvites!: SubredditInvite[]
getGuests() {
const g = this.guests;
if (g === undefined) {
return [];
}
//return g.map(x => ({id: x.id, name: x.author.name, expiresAt: x.expiresAt})) as Guest[];
return g;
}
addGuest(val: GuestEntityData | GuestEntityData[]) {
const reqGuests = Array.isArray(val) ? val : [val];
const guests = this.guests;
for (const g of reqGuests) {
const existing = guests.find(x => x.author.name.toLowerCase() === g.author.name.toLowerCase());
if (existing !== undefined) {
// update existing guest expiresAt
existing.expiresAt = g.expiresAt;
} else {
guests.push(new BotGuestEntity({...g, guestOf: this}));
}
}
this.guests = guests
return guests;
}
removeGuestById(val: string | string[]) {
const reqGuests = Array.isArray(val) ? val : [val];
const guests = this.guests;
const filteredGuests = guests.filter(x => reqGuests.includes(x.id));
this.guests = filteredGuests;
return filteredGuests;
}
removeGuestByUser(val: string | string[]) {
const reqGuests = (Array.isArray(val) ? val : [val]).map(x => x.trim().toLowerCase());
const guests = this.guests;
const filteredGuests = guests.filter(x => reqGuests.includes(x.author.name.toLowerCase()));
this.guests =filteredGuests;
return filteredGuests;
}
removeGuests() {
this.guests = []
return [];
}
}

View File

@@ -1,13 +1,12 @@
import {Column, Entity, PrimaryColumn} from "typeorm";
import {TimeAwareBaseEntity} from "../Entities/Base/TimeAwareBaseEntity";
import {TimeAwareBaseEntity} from "./Base/TimeAwareBaseEntity";
import {InviteData} from "../../Web/Common/interfaces";
import dayjs, {Dayjs} from "dayjs";
import {TimeAwareRandomBaseEntity} from "./Base/TimeAwareRandomBaseEntity";
import {parseRedditEntity} from "../../util";
@Entity()
export class Invite extends TimeAwareBaseEntity implements InviteData {
@PrimaryColumn('varchar', {length: 255})
id!: string
@Entity({name: 'BotInvite'})
export class BotInvite extends TimeAwareRandomBaseEntity implements InviteData {
@Column("varchar", {length: 50})
clientId!: string;
@@ -30,6 +29,12 @@ export class Invite extends TimeAwareBaseEntity implements InviteData {
@Column()
overwrite?: boolean;
@Column("simple-json")
guests?: string[]
@Column("text")
initialConfig?: string
@Column("simple-json", {nullable: true})
subreddits?: string[];
@@ -51,10 +56,9 @@ export class Invite extends TimeAwareBaseEntity implements InviteData {
}
}
constructor(data?: InviteData & { id: string, expiresIn?: number }) {
constructor(data?: InviteData) {
super();
if (data !== undefined) {
this.id = data.id;
this.permissions = data.permissions;
this.subreddits = data.subreddits;
this.instance = data.instance;
@@ -63,9 +67,16 @@ export class Invite extends TimeAwareBaseEntity implements InviteData {
this.redirectUri = data.redirectUri;
this.creator = data.creator;
this.overwrite = data.overwrite;
this.initialConfig = data.initialConfig;
if(data.guests !== undefined && data.guests !== null && data.guests.length > 0) {
const cleanGuests = data.guests.filter(x => x !== '').map(x => parseRedditEntity(x, 'user').name);
if(cleanGuests.length > 0) {
this.guests = cleanGuests;
}
}
if (data.expiresIn !== undefined && data.expiresIn !== 0) {
this.expiresAt = dayjs().add(data.expiresIn, 'seconds');
if (data.expiresAt !== undefined && data.expiresAt !== 0) {
this.expiresAt = dayjs(data.expiresAt);
}
}
}

View File

@@ -0,0 +1,119 @@
import {ChildEntity, Column, Entity, JoinColumn, ManyToOne, TableInheritance} from "typeorm";
import {AuthorEntity} from "../AuthorEntity";
import { ManagerEntity } from "../ManagerEntity";
import { Bot } from "../Bot";
import {TimeAwareRandomBaseEntity} from "../Base/TimeAwareRandomBaseEntity";
import dayjs, {Dayjs} from "dayjs";
import {Guest, GuestAll, GuestEntityData} from "./GuestInterfaces";
export interface GuestOptions<T extends ManagerEntity | Bot> extends GuestEntityData {
guestOf: T
}
@Entity({name: 'Guests'})
@TableInheritance({ column: { type: "varchar", name: "type" } })
export abstract class GuestEntity<T extends ManagerEntity | Bot> extends TimeAwareRandomBaseEntity {
@ManyToOne(type => AuthorEntity, undefined, {cascade: ['insert'], eager: true})
@JoinColumn({name: 'authorName'})
author!: AuthorEntity;
@Column({ name: 'expiresAt', nullable: true })
_expiresAt?: Date = new Date();
public get expiresAt(): Dayjs {
return dayjs(this._expiresAt);
}
public set expiresAt(d: Dayjs | undefined) {
if(d === undefined) {
this._expiresAt = d;
} else {
this._expiresAt = d.utc().toDate();
}
}
expiresAtTimestamp(): number | undefined {
if(this._expiresAt !== undefined) {
return this.expiresAt.valueOf();
}
return undefined;
}
protected constructor(data?: GuestOptions<T>) {
super();
if(data !== undefined) {
this.author = data.author;
this.expiresAt = data.expiresAt;
}
}
}
@ChildEntity('manager')
export class ManagerGuestEntity extends GuestEntity<ManagerEntity> {
type: string = 'manager';
@ManyToOne(type => ManagerEntity, act => act.guests, {nullable: false, orphanedRowAction: 'delete'})
@JoinColumn({name: 'guestOfId', referencedColumnName: 'id'})
guestOf!: ManagerEntity
constructor(data?: GuestOptions<ManagerEntity>) {
super(data);
if(data !== undefined) {
this.guestOf = data.guestOf;
}
}
}
@ChildEntity('bot')
export class BotGuestEntity extends GuestEntity<Bot> {
type: string = 'bot';
@ManyToOne(type => Bot, act => act.guests, {nullable: false, orphanedRowAction: 'delete'})
@JoinColumn({name: 'guestOfId', referencedColumnName: 'id'})
guestOf!: Bot
constructor(data?: GuestOptions<Bot>) {
super(data);
if(data !== undefined) {
this.guestOf = data.guestOf;
this.author = data.author;
}
}
}
export const guestEntityToApiGuest = (val: GuestEntity<any>): Guest => {
return {
id: val.id,
name: val.author.name,
expiresAt: val.expiresAtTimestamp(),
}
}
interface ContextualGuest extends Guest {
subreddit: string
}
export const guestEntitiesToAll = (val: Map<string, Guest[]>): GuestAll[] => {
const contextualGuests: ContextualGuest[] = Array.from(val.entries()).map(([sub, guests]) => guests.map(y => ({...y, subreddit: sub} as ContextualGuest))).flat(3);
const userMap = contextualGuests.reduce((acc, curr) => {
let u: GuestAll | undefined = acc.get(curr.name);
if (u === undefined) {
u = {name: curr.name, expiresAt: curr.expiresAt, subreddits: [curr.subreddit]};
} else {
if (!u.subreddits.includes(curr.subreddit)) {
u.subreddits.push(curr.subreddit);
}
if ((u.expiresAt === undefined && curr.expiresAt !== undefined) || (u.expiresAt !== undefined && curr.expiresAt !== undefined && curr.expiresAt < u.expiresAt)) {
u.expiresAt = curr.expiresAt;
}
}
acc.set(curr.name, u);
return acc;
}, new Map<string, GuestAll>());
return Array.from(userMap.values());
}

View File

@@ -0,0 +1,28 @@
import { Dayjs } from "dayjs"
import {AuthorEntity} from "../AuthorEntity";
export interface Guest {
id: string
name: string
expiresAt?: number
}
export interface GuestAll {
name: string
expiresAt?: number
subreddits: string[]
}
export interface GuestEntityData {
expiresAt?: Dayjs
author: AuthorEntity
}
export interface HasGuests {
getGuests: () => GuestEntityData[]
addGuest: (val: GuestEntityData | GuestEntityData[]) => GuestEntityData[]
removeGuestById: (val: string | string[]) => GuestEntityData[]
removeGuestByUser: (val: string | string[]) => GuestEntityData[]
removeGuests: () => GuestEntityData[]
}

View File

@@ -15,12 +15,14 @@ import {RunEntity} from "./RunEntity";
import {Bot} from "./Bot";
import {RandomIdBaseEntity} from "./Base/RandomIdBaseEntity";
import {ManagerRunState} from "./EntityRunState/ManagerRunState";
import { QueueRunState } from "./EntityRunState/QueueRunState";
import {QueueRunState} from "./EntityRunState/QueueRunState";
import {EventsRunState} from "./EntityRunState/EventsRunState";
import {RulePremise} from "./RulePremise";
import {ActionPremise} from "./ActionPremise";
import { RunningStateTypes } from "../../Subreddit/Manager";
import {RunningStateTypes} from "../../Subreddit/Manager";
import {EntityRunState} from "./EntityRunState/EntityRunState";
import {GuestEntity, ManagerGuestEntity} from "./Guest/GuestEntity";
import {Guest, GuestEntityData, HasGuests} from "./Guest/GuestInterfaces";
export interface ManagerEntityOptions {
name: string
@@ -36,7 +38,7 @@ export type RunningStateEntities = {
};
@Entity({name: 'Manager'})
export class ManagerEntity extends RandomIdBaseEntity implements RunningStateEntities {
export class ManagerEntity extends RandomIdBaseEntity implements RunningStateEntities, HasGuests {
@Column("varchar", {length: 200})
name!: string;
@@ -56,12 +58,15 @@ export class ManagerEntity extends RandomIdBaseEntity implements RunningStateEnt
@OneToMany(type => ActionPremise, obj => obj.manager)
actions!: Promise<ActionPremise[]>
@OneToMany(type => CheckEntity, obj => obj.manager) // note: we will create author property in the Photo class below
@OneToMany(type => CheckEntity, obj => obj.manager)
checks!: Promise<CheckEntity[]>
@OneToMany(type => RunEntity, obj => obj.manager) // note: we will create author property in the Photo class below
@OneToMany(type => RunEntity, obj => obj.manager)
runs!: Promise<RunEntity[]>
@OneToMany(type => ManagerGuestEntity, obj => obj.guestOf, {eager: true, cascade: ['insert', 'remove', 'update']})
guests!: ManagerGuestEntity[]
@OneToOne(() => EventsRunState, {cascade: ['insert', 'update'], eager: true})
@JoinColumn()
eventsState!: EventsRunState
@@ -85,4 +90,50 @@ export class ManagerEntity extends RandomIdBaseEntity implements RunningStateEnt
this.managerState = data.managerState;
}
}
getGuests(): ManagerGuestEntity[] {
const g = this.guests;
if (g === undefined) {
return [];
}
//return g.map(x => ({id: x.id, name: x.author.name, expiresAt: x.expiresAt})) as Guest[];
return g;
}
addGuest(val: GuestEntityData | GuestEntityData[]) {
const reqGuests = Array.isArray(val) ? val : [val];
const guests = this.getGuests();
for (const g of reqGuests) {
const existing = guests.find(x => x.author.name.toLowerCase() === g.author.name.toLowerCase());
if (existing !== undefined) {
// update existing guest expiresAt
existing.expiresAt = g.expiresAt;
} else {
guests.push(new ManagerGuestEntity({...g, guestOf: this}));
}
}
this.guests = guests;
return guests;
}
removeGuestById(val: string | string[]) {
const reqGuests = Array.isArray(val) ? val : [val];
const guests = this.getGuests();
const filteredGuests = guests.filter(x => !reqGuests.includes(x.id));
this.guests = filteredGuests
return filteredGuests;
}
removeGuestByUser(val: string | string[]) {
const reqGuests = (Array.isArray(val) ? val : [val]).map(x => x.trim().toLowerCase());
const guests = this.getGuests();
const filteredGuests = guests.filter(x => !reqGuests.includes(x.author.name.toLowerCase()));
this.guests = filteredGuests;
return filteredGuests;
}
removeGuests() {
this.guests = [];
return [];
}
}

View File

@@ -0,0 +1,54 @@
import {Column, Entity, JoinColumn, ManyToOne, PrimaryColumn} from "typeorm";
import {InviteData, SubredditInviteData} from "../../Web/Common/interfaces";
import dayjs, {Dayjs} from "dayjs";
import {TimeAwareRandomBaseEntity} from "./Base/TimeAwareRandomBaseEntity";
import {AuthorEntity} from "./AuthorEntity";
import {Bot} from "./Bot";
@Entity()
export class SubredditInvite extends TimeAwareRandomBaseEntity implements SubredditInviteData {
subreddit!: string;
@Column("simple-json", {nullable: true})
guests?: string[]
@Column("text")
initialConfig?: string
@ManyToOne(type => Bot, bot => bot.subredditInvites, {nullable: false, orphanedRowAction: 'delete'})
@JoinColumn({name: 'botId', referencedColumnName: 'id'})
bot!: Bot;
@Column({name: 'expiresAt', nullable: true})
_expiresAt?: Date;
public get expiresAt(): Dayjs | undefined {
if (this._expiresAt === undefined) {
return undefined;
}
return dayjs(this._expiresAt);
}
public set expiresAt(d: Dayjs | undefined) {
if (d === undefined) {
this._expiresAt = d;
} else {
this._expiresAt = d.utc().toDate();
}
}
constructor(data?: SubredditInviteData & { expiresIn?: number }) {
super();
if (data !== undefined) {
this.subreddit = data.subreddit;
this.initialConfig = data.initialConfig;
this.guests = data.guests;
if (data.expiresIn !== undefined && data.expiresIn !== 0) {
this.expiresAt = dayjs().add(data.expiresIn, 'seconds');
}
}
}
}

View File

@@ -1,15 +1,17 @@
import fetch from "node-fetch";
import {Submission} from "snoowrap/dist/objects";
import {URL} from "url";
import {absPercentDifference, getSharpAsync, isValidImageURL} from "../util";
import {absPercentDifference, getExtension, getSharpAsync, isValidImageURL} from "../util";
import {Sharp} from "sharp";
import {blockhash} from "./blockhash/blockhash";
import {SimpleError} from "../Utils/Errors";
import {blockhashAndFlipped} from "./blockhash/blockhash";
import {CMError, SimpleError} from "../Utils/Errors";
import {FileHandle, open} from "fs/promises";
import {ImageHashCacheData} from "./Infrastructure/Atomic";
export interface ImageDataOptions {
width?: number,
height?: number,
url: string,
path: URL,
variants?: ImageData[]
}
@@ -17,19 +19,20 @@ class ImageData {
width?: number
height?: number
url: URL
path: URL
variants: ImageData[] = []
preferredResolution?: [number, number]
sharpImg!: Sharp
hashResult!: string
hashResult?: string
hashResultFlipped?: string
actualResolution?: [number, number]
constructor(data: ImageDataOptions, aggressive = false) {
this.width = data.width;
this.height = data.height;
this.url = new URL(data.url);
if (!aggressive && !isValidImageURL(`${this.url.origin}${this.url.pathname}`)) {
throw new Error('URL did not end with a valid image extension');
this.path = data.path;
if (!aggressive && !isValidImageURL(`${this.path.origin}${this.path.pathname}`)) {
throw new Error('Path did not end with a valid image extension');
}
this.variants = data.variants || [];
}
@@ -39,55 +42,90 @@ class ImageData {
return await (await this.sharp()).clone().toFormat(format).toBuffer();
}
async hash(bits: number, useVariantIfPossible = true): Promise<string> {
if(this.hashResult === undefined) {
async hash(bits: number = 16, useVariantIfPossible = true): Promise<Required<ImageHashCacheData>> {
if (this.hashResult === undefined || this.hashResultFlipped === undefined) {
let ref: ImageData | undefined;
if(useVariantIfPossible && this.preferredResolution !== undefined) {
if (useVariantIfPossible && this.preferredResolution !== undefined) {
ref = this.getSimilarResolutionVariant(this.preferredResolution[0], this.preferredResolution[1]);
}
if(ref === undefined) {
if (ref === undefined) {
ref = this;
}
this.hashResult = await blockhash((await ref.sharp()).clone(), bits);
const [hash, hashFlipped] = await blockhashAndFlipped((await ref.sharp()).clone(), bits);
this.hashResult = hash;
this.hashResultFlipped = hashFlipped;
}
return this.hashResult;
return {original: this.hashResult, flipped: this.hashResultFlipped};
}
async sharp(): Promise<Sharp> {
if (this.sharpImg === undefined) {
let animated = false;
let getBuffer: () => Promise<Buffer>;
let fileHandle: FileHandle | undefined;
try {
const response = await fetch(this.url.toString())
if (response.ok) {
const ct = response.headers.get('Content-Type');
if (ct !== null && ct.includes('image')) {
const sFunc = await getSharpAsync();
// if image is animated then we want to extract the first frame and convert it to a regular image
// so we can compare two static images later (also because sharp can't use resize() on animated images)
if(['gif','webp'].some(x => ct.includes(x))) {
this.sharpImg = await sFunc(await (await sFunc(await response.buffer(), {pages: 1, animated: false})).png().toBuffer());
} else {
this.sharpImg = await sFunc(await response.buffer());
}
const meta = await this.sharpImg.metadata();
if (this.width === undefined || this.height === undefined) {
this.width = meta.width;
this.height = meta.height;
}
this.actualResolution = [meta.width as number, meta.height as number];
} else {
throw new SimpleError(`Content-Type for fetched URL ${this.url} did not contain "image"`);
if (this.path.protocol === 'file:') {
try {
animated = ['gif', 'webp'].includes(getExtension(this.path.pathname));
fileHandle = await open(this.path, 'r');
getBuffer = async () => await (fileHandle as FileHandle).readFile();
} catch (err: any) {
throw new CMError(`Unable to retrieve local file ${this.path.toString()}`, {cause: err});
}
} else {
throw new SimpleError(`URL response was not OK: (${response.status})${response.statusText}`);
try {
const response = await fetch(this.path.toString())
if (response.ok) {
const ct = response.headers.get('Content-Type');
if (ct !== null && ct.includes('image')) {
animated = ['gif', 'webp'].some(x => ct.includes(x));
getBuffer = async () => await response.buffer();
} else {
throw new SimpleError(`Content-Type for fetched URL ${this.path.toString()} did not contain "image"`);
}
} else {
throw new SimpleError(`Fetching ${this.path.toString()} => URL response was not OK: (${response.status})${response.statusText}`);
}
} catch (err: any) {
if (!(err instanceof SimpleError)) {
throw new CMError(`Error occurred while fetching response from URL ${this.path.toString()}`, {cause: err});
} else {
throw err;
}
}
}
} catch (err: any) {
throw new CMError('Unable to fetch image resource', {cause: err, isSerious: false});
}
try {
const sFunc = await getSharpAsync();
// if image is animated then we want to extract the first frame and convert it to a regular image
// so we can compare two static images later (also because sharp can't use resize() on animated images)
if (animated) {
this.sharpImg = await sFunc(await (await sFunc(await getBuffer(), {
pages: 1,
animated: false
}).trim().greyscale()).png().withMetadata().toBuffer());
} else {
this.sharpImg = await sFunc(await sFunc(await getBuffer()).trim().greyscale().withMetadata().toBuffer());
}
if(fileHandle !== undefined) {
await fileHandle.close();
}
const meta = await this.sharpImg.metadata();
if (this.width === undefined || this.height === undefined) {
this.width = meta.width;
this.height = meta.height;
}
this.actualResolution = [meta.width as number, meta.height as number];
} catch (err: any) {
if(!(err instanceof SimpleError)) {
throw new Error(`Error occurred while fetching response from URL: ${err.message}`);
} else {
throw err;
}
throw new CMError('Error occurred while converting image buffer to Sharp object', {cause: err});
}
}
return this.sharpImg;
@@ -107,8 +145,8 @@ class ImageData {
return this.width !== undefined && this.height !== undefined;
}
get baseUrl() {
return `${this.url.origin}${this.url.pathname}`;
get basePath() {
return `${this.path.origin}${this.path.pathname}`;
}
setPreferredResolutionByWidth(prefWidth: number) {
@@ -225,10 +263,23 @@ class ImageData {
return [refSharp, compareSharp, width, height];
}
toHashCache(): ImageHashCacheData {
return {
original: this.hashResult,
flipped: this.hashResultFlipped
}
}
setFromHashCache(data: ImageHashCacheData) {
const {original, flipped} = data;
this.hashResult = original;
this.hashResultFlipped = flipped;
}
static fromSubmission(sub: Submission, aggressive = false): ImageData {
const url = new URL(sub.url);
const data: any = {
url,
path: url,
};
let variants = [];
if (sub.preview !== undefined && sub.preview.enabled && sub.preview.images.length > 0) {
@@ -237,7 +288,7 @@ class ImageData {
data.width = ref.width;
data.height = ref.height;
variants = firstImg.resolutions.map(x => new ImageData(x));
variants = firstImg.resolutions.map(x => new ImageData({...x, path: new URL(x.url)}));
data.variants = variants;
}
return new ImageData(data, aggressive);

View File

@@ -0,0 +1,115 @@
import {InfluxConfig} from "./interfaces";
import {InfluxDB, Point, WriteApi, setLogger} from "@influxdata/influxdb-client";
import {HealthAPI} from "@influxdata/influxdb-client-apis";
import dayjs, {Dayjs} from "dayjs";
import {Logger} from "winston";
import {mergeArr} from "../../util";
import {CMError} from "../../Utils/Errors";
export interface InfluxClientConfig extends InfluxConfig {
client?: InfluxDB
ready?: boolean
}
export class InfluxClient {
config: InfluxConfig;
client: InfluxDB;
write: WriteApi;
health: HealthAPI;
tags: Record<string, string>;
logger: Logger;
ready: boolean;
lastReadyAttempt: Dayjs | undefined;
constructor(config: InfluxClientConfig, logger: Logger, tags: Record<string, string> = {}) {
const {client, ready = false, ...rest} = config;
this.logger = logger.child({
labels: ['Influx']
}, mergeArr);
this.config = rest;
this.ready = ready;
if(client !== undefined) {
this.client = client;
} else {
this.client = InfluxClient.createClient(this.config);
setLogger(this.logger);
}
this.write = this.client.getWriteApi(config.credentials.org, config.credentials.bucket, 'ms');
this.tags = tags;
this.write.useDefaultTags(tags);
this.health = new HealthAPI(this.client);
}
async isReady() {
if (this.ready) {
return true;
}
if (this.lastReadyAttempt === undefined || dayjs().diff(this.lastReadyAttempt, 's') >= 10) {
if (!(await this.testConnection())) {
this.logger.warn('Influx endpoint is not ready');
} else {
this.ready = true;
}
} else {
this.logger.debug(`Influx endpoint testing throttled. Waiting another ${10 - dayjs().diff(this.lastReadyAttempt, 's')} seconds`);
}
return this.ready;
}
async testConnection() {
try {
const result = await this.health.getHealth();
if (result.status === 'fail') {
return false;
}
return true;
} catch (e: any) {
this.logger.error(new CMError(`Testing health of Influx endpoint failed`, {cause: e, isSerious: false}));
return false;
}
}
async writePoint(data: Point | Point[]) {
if (await this.isReady()) {
if (Array.isArray(data)) {
this.write.writePoints(data);
} else {
this.write.writePoint(data);
}
}
}
async flush() {
if (await this.isReady()) {
try {
await this.write.flush(true);
} catch (e: any) {
this.logger.error(new CMError('Failed to flush data to Influx', {cause: e}));
}
}
}
static createClient(config: InfluxConfig): InfluxDB {
return new InfluxDB({
url: config.credentials.url,
token: config.credentials.token,
writeOptions: {
defaultTags: config.defaultTags
}
});
}
childClient(logger: Logger, tags: Record<string, string> = {}) {
return new InfluxClient({
...this.config,
client: this.client,
ready: this.ready
}, logger, {...this.tags, ...tags});
}
}

View File

@@ -0,0 +1,13 @@
import {InfluxDB, WriteApi} from "@influxdata/influxdb-client/dist";
export interface InfluxConfig {
credentials: InfluxCredentials
defaultTags?: Record<string, string>
}
export interface InfluxCredentials {
url: string
token: string
org: string
bucket: string
}

View File

@@ -1,5 +1,8 @@
import {StructuredRunnableBase} from "./Runnable";
import {ActionJson} from "../types";
import {IncludesData} from "./Includes";
export type ActionObjectJson = Exclude<ActionJson, string>;
export type StructuredActionObjectJson = Omit<ActionObjectJson, 'authorIs' | 'itemIs'> & StructuredRunnableBase
export type ActionConfigData = ActionJson;
export type ActionConfigHydratedData = Exclude<ActionConfigData, IncludesData>;
export type ActionConfigObject = Exclude<ActionConfigHydratedData, string>;
export type StructuredActionObjectJson = Omit<ActionConfigObject, 'authorIs' | 'itemIs'> & StructuredRunnableBase

View File

@@ -148,6 +148,7 @@ export type RecordOutputOption = boolean | RecordOutputType | RecordOutputType[]
export type PostBehaviorType = 'next' | 'stop' | 'nextRun' | string;
export type onExistingFoundBehavior = 'replace' | 'skip' | 'ignore';
export type ActionTarget = 'self' | 'parent';
export type ArbitraryActionTarget = ActionTarget | string;
export type InclusiveActionTarget = ActionTarget | 'any';
export type DispatchSource = 'dispatch' | `dispatch:${string}`;
export type NonDispatchActivitySource = 'poll' | `poll:${PollOn}` | 'user' | `user:${string}`;
@@ -174,6 +175,7 @@ export type ActivitySource = NonDispatchActivitySource | DispatchSource;
export type ConfigFormat = 'json' | 'yaml';
export type ActionTypes =
'comment'
| 'submission'
| 'lock'
| 'remove'
| 'report'
@@ -249,3 +251,36 @@ export type ModActionType =
'REMOVAL' |
'SPAM' |
'APPROVAL';
export type UserNoteType =
'gooduser' |
'spamwatch' |
'spamwarn' |
'abusewarn' |
'ban' |
'permban' |
'botban' |
string;
export const userNoteTypes = ['gooduser', 'spamwatch', 'spamwarn', 'abusewarn', 'ban', 'permban', 'botban'];
export type ConfigFragmentValidationFunc = (data: object, fetched: boolean) => boolean;
export interface WikiContext {
wiki: string
subreddit?: string
}
export interface ExternalUrlContext {
url: string
}
export interface UrlContext {
value: string
context: WikiContext | ExternalUrlContext
}
export interface ImageHashCacheData {
original?: string
flipped?: string
}

View File

@@ -2,8 +2,9 @@ import {StringOperator} from "./Atomic";
import {Duration} from "dayjs/plugin/duration";
import InvalidRegexError from "../../Utils/InvalidRegexError";
import dayjs, {Dayjs, OpUnitType} from "dayjs";
import {SimpleError} from "../../Utils/Errors";
import { parseDuration } from "../../util";
import {CMError, SimpleError} from "../../Utils/Errors";
import {escapeRegex, parseDuration, parseDurationFromString, parseStringToRegex} from "../../util";
import {ReportType} from "./Reddit";
export interface DurationComparison {
operator: StringOperator,
@@ -15,8 +16,10 @@ export interface GenericComparison extends HasDisplayText {
value: number,
isPercent: boolean,
extra?: string,
groups?: Record<string, string>
displayText: string,
duration?: Duration
durationText?: string
}
export interface HasDisplayText {
@@ -53,18 +56,20 @@ export const parseGenericValueComparison = (val: string, options?: {
const groups = matches.groups as any;
let duration: Duration | undefined;
let durationText: string | undefined;
if(typeof groups.extra === 'string' && groups.extra.trim() !== '') {
try {
duration = parseDuration(groups.extra, false);
} catch (e) {
// if it returns an invalid regex just means they didn't
if (requireDuration || !(e instanceof InvalidRegexError)) {
throw e;
}
try {
const durationResult = parseDurationFromString(val, false);
if(durationResult.length > 1) {
throw new SimpleError(`Must only have one Duration value, found ${durationResult.length} in: ${val}`);
}
duration = durationResult[0].duration;
durationText = durationResult[0].original;
} catch (e) {
// if it returns an invalid regex just means they didn't
if (requireDuration || !(e instanceof InvalidRegexError)) {
throw e;
}
} else if(requireDuration) {
throw new SimpleError(`Comparison must contain a duration value but none was found. Given: ${val}`);
}
const displayParts = [`${groups.opStr} ${groups.value}`];
@@ -73,13 +78,33 @@ export const parseGenericValueComparison = (val: string, options?: {
displayParts.push('%');
}
const {
opStr,
value,
percent,
extra,
...rest
} = matches.groups || {};
const extraGroups: Record<string,string> = {};
let hasExtraGroups = false;
for(const [k,v] of Object.entries(rest)) {
if(typeof v === 'string' && v.trim() !== '') {
extraGroups[k] = v;
hasExtraGroups = true;
}
}
return {
operator: groups.opStr as StringOperator,
value: Number.parseFloat(groups.value),
isPercent: hasPercent,
extra: groups.extra,
groups: hasExtraGroups ? extraGroups : undefined,
displayText: displayParts.join(''),
duration
duration,
durationText,
}
}
const GENERIC_VALUE_PERCENT_COMPARISON = /^\s*(?<opStr>>|>=|<|<=)\s*(?<value>\d+)\s*(?<percent>%)?(?<extra>.*)$/
@@ -93,18 +118,16 @@ export const parseGenericValueOrPercentComparison = (val: string, options?: {req
const DURATION_COMPARISON_REGEX: RegExp = /^\s*(?<opStr>>|>=|<|<=)\s*(?<time>\d+)\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\s*$/;
const DURATION_COMPARISON_REGEX_URL = 'https://regexr.com/609n8';
export const parseDurationComparison = (val: string): DurationComparison => {
const matches = val.match(DURATION_COMPARISON_REGEX);
if (matches === null) {
throw new InvalidRegexError(DURATION_COMPARISON_REGEX, val, DURATION_COMPARISON_REGEX_URL)
const result = parseGenericValueComparison(val, {requireDuration: true});
if(result.isPercent) {
throw new InvalidRegexError(DURATION_COMPARISON_REGEX, val, DURATION_COMPARISON_REGEX_URL, 'Duration comparison value cannot be a percentage');
}
const groups = matches.groups as any;
const dur: Duration = dayjs.duration(groups.time, groups.unit);
if (!dayjs.isDuration(dur)) {
throw new SimpleError(`Parsed value '${val}' did not result in a valid Dayjs Duration`);
if(result.value < 0) {
throw new InvalidRegexError(DURATION_COMPARISON_REGEX, val, DURATION_COMPARISON_REGEX_URL,'Duration value cannot be negative');
}
return {
operator: groups.opStr as StringOperator,
duration: dur
operator: result.operator as StringOperator,
duration: result.duration as Duration
}
}
export const dateComparisonTextOp = (val1: Dayjs, strOp: StringOperator, val2: Dayjs, granularity?: OpUnitType): boolean => {
@@ -139,3 +162,51 @@ export const comparisonTextOp = (val1: number, strOp: string, val2: number): boo
throw new Error(`${strOp} was not a recognized operator`);
}
}
export interface ReportComparison extends Omit<GenericComparison, 'groups'> {
reportType?: ReportType
reasonRegex?: RegExp
reasonMatch?: string
}
const REPORT_COMPARISON = /^\s*(?<opStr>>|>=|<|<=)\s*(?<value>\d+)(?<percent>\s*%)?(?:\s+(?<reportType>mods?|users?))?(?:\s+(?<reasonMatch>["'].*["']|\/.*\/))?.*(?<time>\d+)?\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\s*$/i
const REPORT_REASON_LITERAL = /["'](.*)["']/i
export const parseReportComparison = (str: string): ReportComparison => {
const generic = parseGenericValueComparison(str, {reg: REPORT_COMPARISON});
const {
groups: {
reportType,
reasonMatch
} = {},
...rest
} = generic;
const result: ReportComparison = {...rest, reasonMatch};
if(reportType !== undefined) {
if(reportType.toLocaleLowerCase().includes('mod')) {
result.reportType = 'mod' as ReportType;
} else if (reportType.toLocaleLowerCase().includes('user')) {
result.reportType = 'user' as ReportType;
}
}
if(reasonMatch !== undefined) {
const literalMatch = reasonMatch.match(REPORT_REASON_LITERAL);
if(literalMatch !== null) {
const cleanLiteralMatch = `/.*${escapeRegex(literalMatch[1].trim())}.*/`;
result.reasonRegex = parseStringToRegex(cleanLiteralMatch, 'i');
if(result.reasonRegex === undefined) {
throw new CMError(`Could not convert reason match value to Regex: ${cleanLiteralMatch}`, {isSerious: false})
}
} else {
result.reasonRegex = parseStringToRegex(reasonMatch, 'i');
if(result.reasonRegex === undefined) {
throw new CMError(`Could not convert reason match value to Regex: ${reasonMatch}`, {isSerious: false})
}
}
}
return result;
}

View File

@@ -405,20 +405,40 @@ export interface ActivityState {
approved?: boolean | ModeratorNames | ModeratorNames[] | ModeratorNameCriteria
score?: CompareValue
/**
* A string containing a comparison operator and a value to compare against
* A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint
*
* The syntax is `(< OR > OR <= OR >=) <number>`
* The syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`
*
* If only comparison and number is given then defaults to TOTAL reports on an Activity.
*
* * EX `> 2` => greater than 2 total reports
*
* Defaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:
* Type (optional) determines which type of reports to look at:
*
* * EX `> 3 mod` => greater than 3 mod reports
* * EX `>= 1 user` => greater than 1 user report
* * `mod` -- mod reports
* * EX `> 3 mod` => greater than 3 mod reports
* * `user` -- user reports
* * EX `> 3 user` => greater than 3 user reports
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* Report reason qualifiers can be:
*
* * enclosed double or single quotes -- report reason contains
* * EX `> 1 "misinformation" => greater than 1 report with reason containing "misinformation"
* * enclosed in backslashes -- match regex
* * EX `> 1 \harassment towards .*\` => greater than 1 report with reason matching regex \harassment towards .*\
*
* Type and reason qualifiers can be used together:
*
* EX `> 2 user "misinformation" => greater than 2 user reports with reasons containing "misinformation"
*
* The time constraint filter reports created between NOW and [timeConstraint] in the past:
*
* * `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago
* * `> 2 user "misinformation" in 2 hours` => more than 2 user reports containing "misinformation" created between NOW and 2 hours ago
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)(\s*%)?(\s+(?:mods?|users?))?(\s+(?:["'].*["']|\/.*\/))?.*(\d+)?\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\s*$
* */
reports?: CompareValue
reports?: string
age?: DurationComparor
/**
* Test whether the activity is present in dispatched/delayed activities
@@ -434,7 +454,7 @@ export interface ActivityState {
dispatched?: boolean | string | string[]
// can use ActivitySource | ActivitySource[] here because of issues with generating json schema, see ActivitySource comments
// cant use ActivitySource | ActivitySource[] here because of issues with generating json schema, see ActivitySource comments
/**
* Test where the current activity was sourced from.
*
@@ -487,6 +507,18 @@ export interface SubmissionState extends ActivityState {
* Is the submission a reddit-hosted image or video?
* */
isRedditMediaDomain?: boolean
/**
* Compare the upvote ratio for this Submission, expressed as a whole number
*
* Can be either a comparison string or a number. If a number then CM assumes upvote ratio must be greater than or equal to this.
*
* Example:
*
* * `< 90` => less than 90% upvoted
* * 45 => greater than or equal to 45% upvoted
* */
upvoteRatio?: number | CompareValue
}
export const cmActivityProperties = ['submissionState', 'score', 'reports', 'removed', 'deleted', 'filtered', 'age', 'title'];

View File

@@ -0,0 +1,45 @@
import {ConfigFormat} from "./Atomic";
export interface IncludesData {
/**
* The special-form path to the config fragment to retrieve.
*
* If value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit
*
* * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/mybot`
*
* If the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page
*
* * EX `wiki:replytemplates/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/replytemplates/test`
*
* If the value starts with `url:` then the value is fetched as an external url and expects raw text returned
*
* * EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`
* */
path: string
/**
* An unused hint about the content type. Not implemented yet
* */
type?: ConfigFormat
/**
* Control caching for the config fragment.
*
* If not specified the value for `wikiTTL` will be used
*
* * If true then value is cached forever
* * If false then value is never cached
* * If a number then the number of seconds to cache value
* * If 'response' then CM will attempt to use Cache-Control or Expires headers from the response to determine how long to cache the value
* */
ttl?: number | boolean | 'response'
}
export type IncludesUrl = `url:${string}`;
export type IncludesWiki = `wiki:${string}`;
export type IncludesString = IncludesUrl | IncludesWiki;
export type IncludesType = string | IncludesData;
export const asIncludesData = (val: any): val is IncludesData => {
return val !== null && typeof val === 'object' && 'path' in val;
}

View File

@@ -30,3 +30,61 @@ export interface CachedFetchedActivitiesResult {
export interface FetchedActivitiesResult extends CachedFetchedActivitiesResult {
post: SnoowrapActivity[]
}
export type ReportType = 'mod' | 'user';
export interface Report {
reason: string
type: ReportType
author?: string
snoozed: boolean
canSnooze: boolean
}
export type RawRedditUserReport = [
string, // reason
number, // number of reports with this reason
boolean, // is report snoozed
boolean // can the reports be snoozed
];
export type RawRedditModReport = [string, string];
export const activityReports = (activity: SnoowrapActivity): Report[] => {
const reports: Report[] = [];
for(const r of (activity.user_reports as unknown as RawRedditUserReport[])) {
const report = {
reason: r[0],
type: 'user' as ReportType,
snoozed: r[2],
canSnooze: r[3]
};
for(let i = 0; i < r[1]; i++) {
reports.push(report);
}
}
for(const r of (activity.mod_reports as unknown as RawRedditModReport[])) {
reports.push({
reason: r[0],
type: 'mod' as ReportType,
author: r[1],
snoozed: false,
canSnooze: false
})
}
return reports;
}
export interface RawSubredditRemovalReasonData {
data: {
[key: string]: SubredditRemovalReason
},
order: [string]
}
export interface SubredditRemovalReason {
message: string
id: string,
title: string
}

View File

@@ -1,10 +1,12 @@
import {StructuredRunnableBase} from "./Runnable";
import {RuleSetObjectJson} from "../../Rule/RuleSet";
import {RuleSetConfigObject} from "../../Rule/RuleSet";
import {RuleObjectJsonTypes} from "../types";
import {IncludesData} from "./Includes";
export type RuleJson = RuleObjectJsonTypes | string;
export type RuleObjectJson = Exclude<RuleJson, string>
export type StructuredRuleObjectJson = Omit<RuleObjectJson, 'authorIs' | 'itemIs'> & StructuredRunnableBase
export type StructuredRuleSetObjectJson = Omit<RuleSetObjectJson, 'rules'> & {
rules: StructuredRuleObjectJson[]
export type RuleConfigData = RuleObjectJsonTypes | string | IncludesData;
export type RuleConfigHydratedData = Exclude<RuleConfigData, IncludesData>
export type RuleConfigObject = Exclude<RuleConfigHydratedData, string>
export type StructuredRuleConfigObject = Omit<RuleConfigObject, 'authorIs' | 'itemIs'> & StructuredRunnableBase
export type StructuredRuleSetConfigObject = Omit<RuleSetConfigObject, 'rules'> & {
rules: StructuredRuleConfigObject[]
}

View File

@@ -10,11 +10,16 @@ export interface RunnableBaseOptions extends Omit<RunnableBaseJson, 'itemIs' | '
authorIs?: MinimalOrFullFilter<AuthorCriteria>
}
export interface StructuredRunnableBase {
export interface StructuredRunnableBase extends RunnableBaseJson {
itemIs?: MinimalOrFullFilter<TypedActivityState>
authorIs?: MinimalOrFullFilter<AuthorCriteria>
}
export interface TypedStructuredRunnableBase<T> extends TypedRunnableBaseData<T> {
itemIs?: MinimalOrFullFilter<T>
authorIs?: MinimalOrFullFilter<AuthorCriteria>
}
export interface RunnableBaseJson {
/**
* A list of criteria to test the state of the `Activity` against before running the check.
@@ -31,3 +36,10 @@ export interface RunnableBaseJson {
* */
authorIs?: MinimalOrFullFilterJson<AuthorCriteria>
}
export interface TypedRunnableBaseData<T extends TypedActivityState> extends RunnableBaseJson {
/**
* If present then these Author criteria are checked before running the Check. If criteria fails then the Check will fail.
* */
authorIs?: MinimalOrFullFilterJson<AuthorCriteria>
}

View File

@@ -5,6 +5,7 @@ import {DatabaseMigrationOptions} from "./interfaces";
import {copyFile} from "fs/promises";
import {constants} from "fs";
import {ErrorWithCause} from "pony-cause";
import {CMError} from "../Utils/Errors";
export interface ExistingTable {
table: Table
@@ -118,9 +119,10 @@ export class MigrationService {
try {
await this.backupDatabase();
continueBCBackedup = true;
} catch (err) {
// @ts-ignore
this.dbLogger.error(err, {leaf: 'Backup'});
} catch (err: any) {
if(!(err instanceof CMError) || !err.logged) {
this.dbLogger.error(err, {leaf: 'Backup'});
}
}
} else {
this.dbLogger.info('Configuration DID NOT specify migrations may be executed if automated backup is successful. Will not try to create a backup.');
@@ -154,25 +156,34 @@ YOU SHOULD BACKUP YOUR EXISTING DATABASE BEFORE CONTINUING WITH MIGRATIONS.`);
async backupDatabase() {
try {
if (this.database.options.type === 'sqljs' && this.database.options.location !== undefined) {
let location: string | undefined;
const canBackup = ['sqljs','better-sqlite3'].includes(this.database.options.type);
if(canBackup) {
if(this.database.options.type === 'sqljs') {
location = this.database.options.location === ':memory:' ? undefined : this.database.options.location;
} else {
location = this.database.options.database === ':memory:' || (typeof this.database.options.database !== 'string') ? undefined : this.database.options.database;
}
}
if (canBackup && location !== undefined) {
try {
const ts = Date.now();
const backupLocation = `${this.database.options.location}.${ts}.bak`
const backupLocation = `${location}.${ts}.bak`
this.dbLogger.info(`Detected sqljs (sqlite) database. Will try to make a backup at ${backupLocation}`, {leaf: 'Backup'});
await copyFile(this.database.options.location, backupLocation, constants.COPYFILE_EXCL);
await copyFile(location, backupLocation, constants.COPYFILE_EXCL);
this.dbLogger.info('Successfully created backup!', {leaf: 'Backup'});
} catch (err: any) {
throw new ErrorWithCause('Cannot make an automated backup of your configured database.', {cause: err});
}
} else {
let msg = 'Cannot make an automated backup of your configured database.';
if (this.database.options.type !== 'sqljs') {
msg += ' Only SQlite (sqljs database type) is implemented for automated backups right now, sorry :( You will need to manually backup your database.';
if (!canBackup) {
msg += ' Only SQlite (sqljs or better-sqlite3 database type) is implemented for automated backups right now, sorry :( You will need to manually backup your database.';
} else {
// TODO don't throw for this??
msg += ' Database location is not defined (probably in-memory).';
}
throw new Error(msg);
throw new CMError(msg, {logged: true});
}
} catch (e: any) {
this.dbLogger.error(e, {leaf: 'Backup'});

View File

@@ -0,0 +1,92 @@
import {QueryRunner, TableIndex} from "typeorm";
/**
* Boilerplate for creating generic index
* */
export const index = (prefix: string, columns: string[], unique = true) => new TableIndex({
name: `IDX_${unique ? 'UN_' : ''}${prefix}_${columns.join('-')}`,
columnNames: columns,
isUnique: unique,
});
/**
* Create index on id column
* */
export const idIndex = (prefix: string, unique: boolean) => index(prefix, ['id'], unique);
/**
* Boilerplate primary key column for random ID
* */
export const randomIdColumn = () => ({
name: 'id',
type: 'varchar',
length: '20',
isPrimary: true,
isUnique: true,
});
/**
* Create a time data column based on database type
* */
export const timeAtColumn = (columnName: string, dbType: string, nullable?: boolean) => {
const dbSpecifics = dbType === 'postgres' ? {
type: 'timestamptz'
} : {
type: 'datetime',
// required to get millisecond precision on mysql/mariadb
// https://mariadb.com/kb/en/datetime/
// https://dev.mysql.com/doc/refman/8.0/en/fractional-seconds.html
length: '3',
}
return {
name: columnName,
isNullable: nullable ?? false,
...dbSpecifics
}
}
export const createdAtColumn = (type: string) => timeAtColumn('createdAt', type);
const updatedAtColumn = (type: string) => timeAtColumn('updatedAt', type);
const createdUpdatedAtColumns = (type: string) => [
timeAtColumn('createdAt', type),
timeAtColumn('updatedAt', type)
];
export const createdAtIndex = (prefix: string) => index(prefix, ['createdAt'], false);
const updatedAtIndex = (prefix: string) => index(prefix, ['updatedAt'], false);
const createdUpdatedAtIndices = (prefix: string) => {
return [
createdAtIndex(prefix),
updatedAtIndex(prefix)
]
}
/**
* Boilerplate for filter (itemIs, authorIs) FK column -- uses FK is filter ID
* */
const filterColumn = (name: string) => ({
name,
type: 'varchar',
length: '20',
isNullable: true
});
const authorIsColumn = () => filterColumn('authorIs');
const itemIsColumn = () => filterColumn('itemIs');
export const filterColumns = () => ([authorIsColumn(), itemIsColumn()]);
const authorIsIndex = (prefix: string) => index(prefix, ['authorIs']);
const itemIsIndex = (prefix: string) => index(prefix, ['itemIs']);
export const filterIndices = (prefix: string) => {
return [
authorIsIndex(prefix),
itemIsIndex(prefix)
]
}
export const tableHasData = async (runner: QueryRunner, name: string): Promise<boolean | null> => {
const countRes = await runner.query(`select count(*) from ${name}`);
let hasRows = null;
if (Array.isArray(countRes) && countRes[0] !== null) {
const {
'count(*)': count
} = countRes[0] || {};
hasRows = count !== 0;
}
return hasRows;
}

View File

@@ -1,86 +1,12 @@
import {MigrationInterface, QueryRunner, Table, TableIndex, TableColumn, TableForeignKey} from "typeorm";
const randomIdColumn = () => ({
name: 'id',
type: 'varchar',
length: '20',
isPrimary: true,
isUnique: true,
});
const timeAtColumn = (columnName: string, dbType: string, nullable?: boolean) => {
const dbSpecifics = dbType === 'postgres' ? {
type: 'timestamptz'
} : {
type: 'datetime',
// required to get millisecond precision on mysql/mariadb
// https://mariadb.com/kb/en/datetime/
// https://dev.mysql.com/doc/refman/8.0/en/fractional-seconds.html
length: '3',
}
return {
name: columnName,
isNullable: nullable ?? false,
...dbSpecifics
}
}
const createdAtColumn = (type: string) => timeAtColumn('createdAt', type);
const updatedAtColumn = (type: string) => timeAtColumn('updatedAt', type);
const createdUpdatedAtColumns = (type: string) => [
timeAtColumn('createdAt', type),
timeAtColumn('updatedAt', type)
];
const createdAtIndex = (prefix: string) => new TableIndex({
name: `IDX_${prefix}_createdAt`,
columnNames: ['createdAt']
});
const updatedAtIndex = (prefix: string) => new TableIndex({
name: `IDX_${prefix}_updatedAt`,
columnNames: ['updatedAt']
})
const createdUpdatedAtIndices = (prefix: string) => {
return [
createdAtIndex(prefix),
updatedAtIndex(prefix)
]
}
const filterColumn = (name: string) => ({
name,
type: 'varchar',
length: '20',
isNullable: true
});
const authorIsColumn = () => filterColumn('authorIs');
const itemIsColumn = () => filterColumn('itemIs');
const filterColumns = () => ([authorIsColumn(), itemIsColumn()]);
const authorIsIndex = (prefix: string) => new TableIndex({
name: `IDX_${prefix}_authorIs`,
columnNames: ['authorIs'],
isUnique: true,
});
const itemIsIndex = (prefix: string) => new TableIndex({
name: `IDX_${prefix}_itemIs`,
columnNames: ['itemIs'],
isUnique: true
});
const filterIndices = (prefix: string) => {
return [
authorIsIndex(prefix),
itemIsIndex(prefix)
]
}
import {MigrationInterface, QueryRunner, Table, TableIndex} from "typeorm";
import {
createdAtColumn,
createdAtIndex,
filterColumns,
filterIndices,
randomIdColumn,
timeAtColumn
} from "../MigrationUtil";
export class initApi1642180264563 implements MigrationInterface {
name = 'initApi1642180264563'

View File

@@ -1,12 +1,5 @@
import {MigrationInterface, QueryRunner, Table, TableIndex} from "typeorm"
const index = (prefix: string, columns: string[], unique = true) => new TableIndex({
name: `IDX_${unique ? 'UN_' : ''}${prefix}_${columns.join('-')}_MIG`,
columnNames: columns,
isUnique: unique,
});
const idIndex = (prefix: string, unique: boolean) => index(prefix, ['id'], unique);
import {MigrationInterface, QueryRunner, Table} from "typeorm"
import {idIndex, index} from "../MigrationUtil";
export class indexes1653586738904 implements MigrationInterface {

View File

@@ -0,0 +1,62 @@
import {MigrationInterface, QueryRunner, Table, TableIndex} from "typeorm"
import {createdAtColumn, createdAtIndex, idIndex, index, randomIdColumn} from "../MigrationUtil";
export class reportTracking1657632517934 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
const dbType = queryRunner.connection.driver.options.type;
await queryRunner.createTable(
new Table({
name: 'ActivityReport',
columns: [
randomIdColumn(),
{
name: 'activityId',
type: 'varchar',
length: '20',
isNullable: false
},
{
name: 'reason',
type: 'varchar',
length: '500',
isNullable: false
},
{
name: 'type',
type: 'varchar',
length: '200',
isNullable: false
},
{
name: 'author',
type: 'varchar',
length: '100',
isNullable: true
},
{
name: 'granularity',
type: 'int',
isNullable: false
},
createdAtColumn(dbType),
],
indices: [
idIndex('ActivityReport', true),
index('ActivityReport', ['activityId'], false),
index('ActivityReportReason', ['reason'], false),
createdAtIndex('report'),
]
}),
true,
true,
true
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
}
}

View File

@@ -0,0 +1,50 @@
import {MigrationInterface, QueryRunner, Table} from "typeorm"
import {createdAtColumn, createdAtIndex, idIndex, index, randomIdColumn, timeAtColumn} from "../MigrationUtil";
export class Guests1658930394548 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
const dbType = queryRunner.connection.driver.options.type;
await queryRunner.createTable(
new Table({
name: 'Guests',
columns: [
randomIdColumn(),
{
name: 'authorName',
type: 'varchar',
length: '200',
isNullable: false,
},
{
name: 'type',
type: 'varchar',
isNullable: false,
length: '50'
},
{
name: 'guestOfId',
type: 'varchar',
length: '20',
isNullable: true
},
timeAtColumn('expiresAt', dbType, true),
createdAtColumn(dbType),
],
indices: [
idIndex('Guests', true),
createdAtIndex('guests'),
index('guest', ['expiresAt'], false)
]
}),
true,
true,
true
);
}
public async down(queryRunner: QueryRunner): Promise<void> {
}
}

View File

@@ -0,0 +1,145 @@
import {MigrationInterface, QueryRunner, Table, TableColumn} from "typeorm"
import {createdAtColumn, createdAtIndex, idIndex, index, randomIdColumn, tableHasData, timeAtColumn} from "../MigrationUtil";
export class invites1660228987769 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
const dbType = queryRunner.connection.driver.options.type;
await queryRunner.createTable(
new Table({
name: 'SubredditInvite',
columns: [
{
name: 'id',
type: 'varchar',
length: '255',
isPrimary: true,
},
{
name: 'botId',
type: 'varchar',
length: '20',
isNullable: false
},
{
name: 'subreddit',
type: 'varchar',
length: '255',
isNullable: false
},
{
name: 'guests',
type: 'text',
isNullable: true
},
{
name: 'initialConfig',
type: 'text',
isNullable: true
},
createdAtColumn(dbType),
timeAtColumn('expiresAt', dbType, true)
],
}),
true,
true,
true
);
if (await queryRunner.hasTable('Invite')) {
await queryRunner.renameTable('Invite', 'BotInvite');
const table = await queryRunner.getTable('BotInvite') as Table;
await queryRunner.addColumns(table, [
new TableColumn({
name: 'initialConfig',
type: 'text',
isNullable: true
}),
new TableColumn({
name: 'guests',
type: 'text',
isNullable: true
})
]);
queryRunner.connection.logger.logSchemaBuild(`Table 'Invite' has been renamed 'BotInvite'. If there are existing rows on this table they will need to be recreated.`);
} else {
await queryRunner.createTable(
new Table({
name: 'BotInvite',
columns: [
{
name: 'id',
type: 'varchar',
length: '255',
isPrimary: true,
},
{
name: 'clientId',
type: 'varchar',
length: '255',
},
{
name: 'clientSecret',
type: 'varchar',
length: '255',
},
{
name: 'redirectUri',
type: 'text',
},
{
name: 'creator',
type: 'varchar',
length: '255',
},
{
name: 'permissions',
type: 'text'
},
{
name: 'instance',
type: 'varchar',
length: '255',
isNullable: true
},
{
name: 'overwrite',
type: 'boolean',
isNullable: true,
},
{
name: 'subreddits',
type: 'text',
isNullable: true
},
{
name: 'guests',
type: 'text',
isNullable: true
},
{
name: 'initialConfig',
type: 'text',
isNullable: true
},
createdAtColumn(dbType),
timeAtColumn('expiresAt', dbType, true)
],
}),
true,
true,
true
);
}
}
public async down(queryRunner: QueryRunner): Promise<void> {
}
}

View File

@@ -0,0 +1,15 @@
import { MigrationInterface, QueryRunner } from "typeorm"
import {ActionType} from "../../../Entities/ActionType";
export class submission1661183583080 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.manager.getRepository(ActionType).save([
new ActionType('submission')
]);
}
public async down(queryRunner: QueryRunner): Promise<void> {
}
}

View File

@@ -0,0 +1,33 @@
import {MigrationInterface, QueryRunner} from "typeorm"
import {tableHasData} from "../MigrationUtil";
export class removeInvites1660588028346 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
const dbType = queryRunner.connection.driver.options.type;
if (dbType === 'sqljs' && await queryRunner.hasTable('Invite')) {
// const countRes = await queryRunner.query('select count(*) from Invite');
// let hasNoRows = null;
// if (Array.isArray(countRes) && countRes[0] !== null) {
// const {
// 'count(*)': count
// } = countRes[0] || {};
// hasNoRows = count === 0;
// }
const hasRows = await tableHasData(queryRunner, 'Invite');
if (hasRows === false) {
await queryRunner.dropTable('Invite');
} else {
let prefix = hasRows === null ? `Could not determine if SQL.js 'web' database had the table 'Invite' --` : `SQL.js 'web' database had the table 'Invite' and it is not empty --`
queryRunner.connection.logger.logSchemaBuild(`${prefix} This table is being replaced by 'BotInvite' table in 'app' database. If you have existing invites you will need to recreate them.`);
}
}
}
public async down(queryRunner: QueryRunner): Promise<void> {
}
}

242
src/Common/OpenCVService.ts Normal file
View File

@@ -0,0 +1,242 @@
import winston, {Logger} from "winston";
import {CMError} from "../Utils/Errors";
import {formatNumber, mergeArr, resolvePath} from "../util";
import * as cvTypes from '@u4/opencv4nodejs'
import ImageData from "./ImageData";
import {pathToFileURL} from "url";
let cv: any;
export const getCV = async (): Promise<typeof cvTypes.cv> => {
if (cv === undefined) {
try {
const cvImport = await import('@u4/opencv4nodejs');
if (cvImport === undefined) {
throw new CMError('Could not initialize openCV because opencv4nodejs is not installed');
}
cv = cvImport.default;
} catch (e: any) {
throw new CMError('Could not initialize openCV', {cause: e});
}
}
return cv as typeof cvTypes.cv;
}
export class OpenCVService {
logger: Logger;
constructor(logger?: Logger) {
const parentLogger = logger ?? winston.loggers.get('app');
this.logger = parentLogger.child({labels: ['OpenCV']}, mergeArr)
}
async cv() {
if (cv === undefined) {
try {
const cvImport = await import('@u4/opencv4nodejs');
if (cvImport === undefined) {
throw new CMError('Could not initialize openCV because opencv4nodejs is not installed');
}
cv = cvImport.default;
} catch (e: any) {
throw new CMError('Could not initialize openCV', {cause: e});
}
}
return cv as typeof cvTypes.cv;
}
}
interface CurrentMaxData {
confidence: number,
loc: cvTypes.Point2,
ratio?: number
}
export interface MatchResult {matchRec?: cvTypes.Rect, matchedConfidence?: number}
/**
* Use openCV matchTemplate() to find images within images
*
* The majority of these code concepts are based on https://pyimagesearch.com/2015/01/26/multi-scale-template-matching-using-python-opencv/
* and examples/usage of opencv.js is from https://github.com/UrielCh/opencv4nodejs/tree/master/examples/src/templateMatch
*
* */
export class TemplateCompare {
cv: typeof cvTypes.cv;
logger: Logger;
template?: cvTypes.Mat;
downscaledTemplates: cvTypes.Mat[] = [];
constructor(cv: typeof cvTypes.cv, logger: Logger) {
this.cv = cv;
this.logger = logger.child({labels: ['OpenCV', 'Template Match']}, mergeArr)
}
protected async normalizeImage(image: ImageData) {
return this.cv.imdecode(await ((await image.sharp()).clone().greyscale().toBuffer()));
}
async setTemplate(image: ImageData) {
this.template = await this.normalizeImage(image);
}
protected getTemplate() {
if (this.template === undefined) {
throw new Error('Template is not defined, use setTemplate() first');
}
return this.template.copy().canny(50, 200);
}
downscaleTemplates() {
if (this.template === undefined) {
throw new Error('Template is not defined, use setTemplate() first');
}
const [tH, tW] = this.template.sizes;
for (let i = 10; i <= 80; i += 10) {
const templateRatio = (100 - i) / 100;
// for debugging
// const scaled = this.template.copy().resize(new cv.Size(Math.floor(templateRatio * tW), Math.floor(templateRatio * tH))).canny(50, 200);
// const path = pathToFileURL(resolvePath(`./tests/assets/star/starTemplateScaled-${Math.floor(templateRatio * 100)}.jpg`, './')).pathname;
// cv.imwrite(path, scaled);
this.downscaledTemplates.push(this.template.copy().resize(new cv.Size(Math.floor(templateRatio * tW), Math.floor(templateRatio * tH))).canny(50, 200))
}
}
async matchImage(sourceImageData: ImageData, downscaleWhich: 'template' | 'image', confidence = 0.5): Promise<[boolean, MatchResult]> {
if (this.template === undefined) {
throw new Error('Template is not defined, use setTemplate() first');
}
let currMax: CurrentMaxData | undefined;
let matchRec: cvTypes.Rect | undefined;
let matchedConfidence: number | undefined;
if (downscaleWhich === 'template') {
// in this scenario we assume our template is a significant fraction of the size of the source
// so we want to scale down the template size incrementally
// because we are assuming the template in the image is smaller than our source template
// generate scaled templates and save for later use!
// its likely this class is in use in Recent/Repeat rules which means we will probably be comparing this template against many images
if (this.downscaledTemplates.length === 0) {
this.downscaleTemplates();
}
let currMaxTemplateSize: number[] | undefined;
const src = (await this.normalizeImage(sourceImageData)).canny(50, 200);
const edgedTemplate = await this.getTemplate();
for (const scaledTemplate of [edgedTemplate].concat(this.downscaledTemplates)) {
// more information on methods...
// https://docs.opencv.org/4.x/d4/dc6/tutorial_py_template_matching.html
// https://stackoverflow.com/questions/58158129/understanding-and-evaluating-template-matching-methods
// https://stackoverflow.com/questions/48799711/explain-difference-between-opencvs-template-matching-methods-in-non-mathematica
// https://datahacker.rs/014-template-matching-using-opencv-in-python/
// ...may want to try with TM_SQDIFF but will need to use minimum values instead of max
const result = src.matchTemplate(scaledTemplate, cv.TM_CCOEFF_NORMED);
const minMax = result.minMaxLoc();
const {maxVal, maxLoc} = minMax;
if (currMax === undefined || maxVal > currMax.confidence) {
currMaxTemplateSize = scaledTemplate.sizes;
currMax = {confidence: maxVal, loc: maxLoc};
console.log(`New Best Max Confidence: ${formatNumber(maxVal, {toFixed: 4})}`)
}
if (maxVal >= confidence) {
this.logger.verbose(`Match with confidence ${formatNumber(maxVal, {toFixed: 4})} met threshold of ${confidence}`);
break;
}
}
if (currMax !== undefined) {
matchedConfidence = currMax.confidence;
if (currMaxTemplateSize !== undefined) {
const startX = currMax.loc.x;
const startY = currMax.loc.y;
matchRec = new cv.Rect(startX, startY, currMaxTemplateSize[1], currMaxTemplateSize[0]);
}
}
} else {
// in this scenario we assume our template is small, compared to the source image
// and the template found in the source is likely larger than the template
// so we scale down the source incrementally to try to get them to match
const normalSrc = (await this.normalizeImage(sourceImageData));
let src = normalSrc.copy();
const [width, height] = src.sizes;
const edgedTemplate = await this.getTemplate();
const [tH, tW] = edgedTemplate.sizes;
let ratio = 1;
for (let i = 0; i <= 80; i += 5) {
ratio = (100 - i) / 100;
if (i !== 100) {
const resizedWidth = Math.floor(width * ratio);
const resizedHeight = Math.floor(height * ratio);
src = src.resize(new cv.Size(resizedWidth, resizedHeight));
}
const [sH, sW] = src.sizes;
if (sH < tH || sW < tW) {
// scaled source is smaller than template
this.logger.debug(`Template matching ended early due to downscaled image being smaller than template`);
break;
}
const edged = src.canny(50, 200);
const result = edged.matchTemplate(edgedTemplate, cv.TM_CCOEFF_NORMED);
const minMax = result.minMaxLoc();
const {maxVal, maxLoc} = minMax;
if (currMax === undefined || maxVal > currMax.confidence) {
currMax = {confidence: maxVal, loc: maxLoc, ratio};
console.log(`New Best Confidence: ${formatNumber(maxVal, {toFixed: 4})}`)
}
if (maxVal >= confidence) {
this.logger.verbose(`Match with confidence ${formatNumber(maxVal, {toFixed: 4})} met threshold of ${confidence}`);
break;
}
}
if (currMax === undefined) {
// template was larger than source
this.logger.debug('No local max found');
} else {
const maxRatio = currMax.ratio as number;
const startX = currMax.loc.x * (1 / maxRatio);
const startY = currMax.loc.y * (1 / maxRatio);
const endWidth = tW * (1 / maxRatio);
const endHeight = tH * (1 / maxRatio);
matchRec = new cv.Rect(startX, startY, endWidth, endHeight);
matchedConfidence = currMax.confidence;
}
}
if (currMax !== undefined) {
return [currMax.confidence >= confidence, {matchRec, matchedConfidence}]
}
return [false, {matchRec, matchedConfidence}]
}
}

View File

@@ -1,5 +1,5 @@
import { ISession } from "connect-typeorm";
import { Column, Entity, Index, PrimaryColumn } from "typeorm";
import { Column, Entity, Index, PrimaryColumn, DeleteDateColumn } from "typeorm";
@Entity()
export class ClientSession implements ISession {
@Index()
@@ -12,6 +12,6 @@ export class ClientSession implements ISession {
@Column("text")
public json = "";
@Column({ name: 'destroyedAt', nullable: true })
@DeleteDateColumn({ name: 'destroyedAt', nullable: true })
destroyedAt?: Date;
}

View File

@@ -107,8 +107,7 @@ var bmvbhash_even = function(data: BlockImageData, bits: number) {
return bits_to_hexhash(result);
};
var bmvbhash = function(data: BlockImageData, bits: number) {
var result = [];
var bmvbhash = function(data: BlockImageData, bits: number, calculateFlipped: boolean = false): string | [string, string] {
var i, j, x, y;
var block_width, block_height;
@@ -198,30 +197,51 @@ var bmvbhash = function(data: BlockImageData, bits: number) {
}
}
for (i = 0; i < bits; i++) {
for (j = 0; j < bits; j++) {
result.push(blocks[i][j]);
const blocksFlipped: number[][] | undefined = calculateFlipped ? [] : undefined;
if(blocksFlipped !== undefined) {
for(const row of blocks) {
const flippedRow = [...row];
flippedRow.reverse();
blocksFlipped.push(flippedRow);
}
}
translate_blocks_to_bits(result, block_width * block_height);
return bits_to_hexhash(result);
if(blocksFlipped !== undefined) {
const result = [];
const resultFlip = [];
for (i = 0; i < bits; i++) {
for (j = 0; j < bits; j++) {
result.push(blocks[i][j]);
resultFlip.push(blocksFlipped[i][j])
}
}
translate_blocks_to_bits(result, block_width * block_height);
translate_blocks_to_bits(resultFlip, block_width * block_height);
return [bits_to_hexhash(result), bits_to_hexhash(resultFlip)];
} else {
const result = [];
for (i = 0; i < bits; i++) {
for (j = 0; j < bits; j++) {
result.push(blocks[i][j]);
}
}
translate_blocks_to_bits(result, block_width * block_height);
return bits_to_hexhash(result);
}
};
var blockhashData = function(imgData: BlockImageData, bits: number, method: number) {
var hash;
var blockhashData = function(imgData: BlockImageData, bits: number, method: number, calculateFlipped: boolean): string | [string, string] {
if (method === 1) {
hash = bmvbhash_even(imgData, bits);
return bmvbhash_even(imgData, bits);
}
else if (method === 2) {
hash = bmvbhash(imgData, bits);
}
else {
throw new Error("Bad hashing method");
return bmvbhash(imgData, bits, calculateFlipped);
}
return hash;
throw new Error("Bad hashing method");
};
export const blockhash = async function(src: Sharp, bits: number, method: number = 2): Promise<string> {
@@ -230,5 +250,14 @@ export const blockhash = async function(src: Sharp, bits: number, method: number
width: info.width,
height: info.height,
data: buff,
}, bits, method);
}, bits, method, false) as string;
};
export const blockhashAndFlipped = async function(src: Sharp, bits: number, method: number = 2): Promise<[string, string]> {
const {data: buff, info} = await src.ensureAlpha().raw().toBuffer({resolveWithObject: true});
return blockhashData({
width: info.width,
height: info.height,
data: buff,
}, bits, method, true) as [string, string];
};

View File

@@ -42,4 +42,4 @@ export const filterCriteriaDefault: FilterCriteriaDefaults = {
export const defaultDataDir = path.resolve(__dirname, '../..');
export const defaultConfigFilenames = ['config.json', 'config.yaml'];
export const VERSION = '0.11.1';
export const VERSION = '0.12.0';

View File

@@ -6,7 +6,6 @@ import Comment from "snoowrap/dist/objects/Comment";
import RedditUser from "snoowrap/dist/objects/RedditUser";
import {DataSource} from "typeorm";
import {JsonOperatorConfigDocument, YamlOperatorConfigDocument} from "./Config/Operator";
import {CommentCheckJson, SubmissionCheckJson} from "../Check";
import {SafeDictionary} from "ts-essentials";
import {RuleResultEntity} from "./Entities/RuleResultEntity";
import {Dayjs} from "dayjs";
@@ -43,8 +42,16 @@ import {
ItemOptions
} from "./Infrastructure/Filters/FilterShapes";
import {LoggingOptions, LogLevel, StrongLoggingOptions} from "./Infrastructure/Logging";
import {DatabaseConfig, DatabaseDriver, DatabaseDriverConfig, DatabaseDriverType} from "./Infrastructure/Database";
import {
DatabaseConfig,
DatabaseDriver,
DatabaseDriverConfig,
DatabaseDriverType
} from "./Infrastructure/Database";
import {ActivityType} from "./Infrastructure/Reddit";
import {InfluxDB, WriteApi} from "@influxdata/influxdb-client";
import {InfluxConfig} from "./Influx/interfaces";
import {InfluxClient} from "./Influx/InfluxClient";
export interface ReferenceSubmission {
@@ -725,19 +732,14 @@ export interface SearchAndReplaceRegExp {
}
export interface NamedGroup {
[name: string]: string
}
export interface GlobalRegExResult {
match: string,
groups: string[],
named: NamedGroup | undefined
[name: string]: any
}
export interface RegExResult {
matched: boolean,
matches: string[],
global: GlobalRegExResult[]
match: string,
groups: string[],
index: number
named: NamedGroup
}
export type StrongCache = {
@@ -1058,6 +1060,16 @@ export interface SubredditOverrides {
* */
retention?: EventRetentionPolicyRange
}
/**
* The relative URL to the ContextMod wiki page EX `https://reddit.com/r/subreddit/wiki/<path>`
*
* This will override the default relative URL as well as any URL set at the bot-level
*
* @default "botconfig/contextbot"
* @examples ["botconfig/contextbot"]
* */
wikiConfig?: string
}
/**
@@ -1119,6 +1131,8 @@ export interface BotInstanceJsonConfig {
retention?: EventRetentionPolicyRange
}
influxConfig?: InfluxConfig
/**
* Settings related to bot behavior for subreddits it is managing
* */
@@ -1374,6 +1388,8 @@ export interface OperatorJsonConfig {
retention?: EventRetentionPolicyRange
}
influxConfig?: InfluxConfig
/**
* Set global snoowrap options as well as default snoowrap config for all bots that don't specify their own
* */
@@ -1473,20 +1489,6 @@ export interface OperatorJsonConfig {
storage?: 'database' | 'cache'
}
/**
* Settings related to oauth flow invites
* */
invites?: {
/**
* Number of seconds an invite should be valid for
*
* If `0` or not specified (default) invites do not expire
*
* @default 0
* @examples [0]
* */
maxAge?: number
}
/**
* The default log level to filter to in the web interface
*
@@ -1542,11 +1544,30 @@ export interface OperatorJsonConfig {
secret?: string,
/**
* A friendly name for this server. This will override `friendly` in `BotConnection` if specified.
*
* If none is set one is randomly generated.
* */
friendly?: string,
}
credentials?: ThirdPartyCredentialsJsonConfig
dev?: {
/**
* Invoke `process.memoryUsage()` on an interval and send metrics to Influx
*
* Only works if Influx config is provided
* */
monitorMemory?: boolean
/**
* Interval, in seconds, to invoke `process.memoryUsage()` at
*
* Defaults to 15 seconds
*
* @default 15
* */
monitorMemoryInterval?: number
};
}
export interface RequiredOperatorRedditCredentials extends RedditCredentials {
@@ -1580,6 +1601,7 @@ export interface BotInstanceConfig extends BotInstanceJsonConfig {
database: DataSource
snoowrap: SnoowrapOptions
databaseStatisticsDefaults: DatabaseStatisticsOperatorConfig
opInflux?: InfluxClient,
subreddits: {
names?: string[],
exclude?: string[],
@@ -1620,6 +1642,7 @@ export interface OperatorConfig extends OperatorJsonConfig {
retention?: EventRetentionPolicyRange
}
database: DataSource
influx?: InfluxClient,
web: {
database: DataSource,
databaseConfig: {
@@ -1634,9 +1657,6 @@ export interface OperatorConfig extends OperatorJsonConfig {
secret?: string,
storage?: 'database' | 'cache'
},
invites: {
maxAge: number
},
logLevel?: LogLevel,
maxLogs: number,
clients: BotConnection[]
@@ -1651,6 +1671,10 @@ export interface OperatorConfig extends OperatorJsonConfig {
databaseStatisticsDefaults: DatabaseStatisticsOperatorConfig
bots: BotInstanceConfig[]
credentials: ThirdPartyCredentialsJsonConfig
dev: {
monitorMemory: boolean
monitorMemoryInterval: number
}
}
export interface OperatorFileConfig {
@@ -1901,8 +1925,6 @@ export interface TextMatchOptions {
caseSensitive?: boolean
}
export type ActivityCheckJson = SubmissionCheckJson | CommentCheckJson;
export interface PostBehaviorOptionConfig {
recordTo?: RecordOutputOption
behavior?: PostBehaviorType

View File

@@ -20,7 +20,9 @@ import {CancelDispatchActionJson} from "../Action/CancelDispatchAction";
import {ContributorActionJson} from "../Action/ContributorAction";
import {SentimentRuleJSONConfig} from "../Rule/SentimentRule";
import {ModNoteActionJson} from "../Action/ModNoteAction";
import {IncludesData} from "./Infrastructure/Includes";
import { SubmissionActionJson } from "../Action/SubmissionAction";
export type RuleObjectJsonTypes = RecentActivityRuleJSONConfig | RepeatActivityJSONConfig | AuthorRuleJSONConfig | AttributionJSONConfig | HistoryJSONConfig | RegexRuleJSONConfig | RepostRuleJSONConfig | SentimentRuleJSONConfig
export type ActionJson = CommentActionJson | FlairActionJson | ReportActionJson | LockActionJson | RemoveActionJson | ApproveActionJson | BanActionJson | UserNoteActionJson | MessageActionJson | UserFlairActionJson | DispatchActionJson | CancelDispatchActionJson | ContributorActionJson | ModNoteActionJson | string;
export type ActionJson = CommentActionJson | SubmissionActionJson | FlairActionJson | ReportActionJson | LockActionJson | RemoveActionJson | ApproveActionJson | BanActionJson | UserNoteActionJson | MessageActionJson | UserFlairActionJson | DispatchActionJson | CancelDispatchActionJson | ContributorActionJson | ModNoteActionJson | string | IncludesData;

View File

@@ -1,24 +1,30 @@
import winston, {Logger} from "winston";
import {
asNamedCriteria,
buildCacheOptionsFromProvider, buildCachePrefix, buildFilter, castToBool,
createAjvFactory, fileOrDirectoryIsWriteable,
asNamedCriteria, asWikiContext,
buildCachePrefix, buildFilter, castToBool,
createAjvFactory, fileOrDirectoryIsWriteable, generateRandomName,
mergeArr, mergeFilters,
normalizeName,
overwriteMerge,
parseBool, randomId,
readConfigFile, removeFromSourceIfKeysExistsInDestination,
parseBool, parseExternalUrl, parseUrlContext, parseWikiContext, randomId,
readConfigFile,
removeUndefinedKeys, resolvePathFromEnvWithRelative
} from "./util";
import {CommentCheck} from "./Check/CommentCheck";
import {SubmissionCheck} from "./Check/SubmissionCheck";
import Ajv, {Schema} from 'ajv';
import * as appSchema from './Schema/App.json';
import * as runSchema from './Schema/Run.json';
import * as checkSchema from './Schema/Check.json';
import * as operatorSchema from './Schema/OperatorConfig.json';
import {JSONConfig} from "./JsonConfig";
//import * as rulesetSchema from './Schema/RuleSet.json';
import {SubredditConfigHydratedData, SubredditConfigData} from "./SubredditConfigData";
import LoggedError from "./Utils/LoggedError";
import {CheckStructuredJson} from "./Check";
import {
ActivityCheckConfigData,
ActivityCheckConfigHydratedData,
CheckConfigHydratedData,
CheckConfigObject
} from "./Check";
import {
DEFAULT_POLLING_INTERVAL,
DEFAULT_POLLING_LIMIT,
@@ -37,15 +43,11 @@ import {
OperatorFileConfig,
PostBehavior
} from "./Common/interfaces";
import {isRuleSetJSON, RuleSetJson, RuleSetObjectJson} from "./Rule/RuleSet";
import {isRuleSetJSON, RuleSetConfigData, RuleSetConfigHydratedData, RuleSetConfigObject} from "./Rule/RuleSet";
import deepEqual from "fast-deep-equal";
import {
ActionJson
} from "./Common/types";
import {isActionJson} from "./Action";
import {getLogger, resolveLogDir} from "./Utils/loggerFactory";
import {getLogger} from "./Utils/loggerFactory";
import {GetEnvVars} from 'env-cmd';
import {operatorConfig} from "./Utils/CommandConfig";
import merge from 'deepmerge';
import * as process from "process";
import {
@@ -59,57 +61,54 @@ import objectHash from "object-hash";
import {
createAppDatabaseConnection,
createDatabaseConfig,
createDatabaseConnection,
createWebDatabaseConnection
} from "./Utils/databaseUtils";
import path from 'path';
import {
JsonOperatorConfigDocument,
OperatorConfigDocumentInterface,
YamlOperatorConfigDocument
} from "./Common/Config/Operator";
import {
ConfigDocumentInterface
} from "./Common/Config/AbstractConfigDocument";
import {Document as YamlDocument} from "yaml";
import {SimpleError} from "./Utils/Errors";
import {CMError, SimpleError} from "./Utils/Errors";
import {ErrorWithCause} from "pony-cause";
import {RunJson, RunStructuredJson} from "./Run";
import {RunConfigHydratedData, RunConfigData, RunConfigObject} from "./Run";
import {AuthorRuleConfig} from "./Rule/AuthorRule";
import {
CacheProvider, ConfigFormat,
CacheProvider, ConfigFormat, ConfigFragmentValidationFunc,
PollOn
} from "./Common/Infrastructure/Atomic";
import {
AuthorOptions,
FilterCriteriaDefaults,
FilterCriteriaDefaultsJson,
FilterOptionsJson,
MaybeAnonymousCriteria,
MaybeAnonymousOrStringCriteria, MinimalOrFullFilter, MinimalOrFullFilterJson, NamedCriteria
} from "./Common/Infrastructure/Filters/FilterShapes";
import {AuthorCriteria, TypedActivityState, TypedActivityStates} from "./Common/Infrastructure/Filters/FilterCriteria";
import {AuthorCriteria, TypedActivityState} from "./Common/Infrastructure/Filters/FilterCriteria";
import {StrongLoggingOptions} from "./Common/Infrastructure/Logging";
import {DatabaseDriver, DatabaseDriverConfig, DatabaseDriverType} from "./Common/Infrastructure/Database";
import {DatabaseDriver, DatabaseDriverType} from "./Common/Infrastructure/Database";
import {parseFromJsonOrYamlToObject} from "./Common/Config/ConfigUtil";
import {RunnableBaseJson, RunnableBaseOptions, StructuredRunnableBase} from "./Common/Infrastructure/Runnable";
import {RunnableBaseJson, StructuredRunnableBase} from "./Common/Infrastructure/Runnable";
import {
RuleJson,
RuleObjectJson,
StructuredRuleObjectJson,
StructuredRuleSetObjectJson
RuleConfigData, RuleConfigHydratedData,
RuleConfigObject,
} from "./Common/Infrastructure/RuleShapes";
import {ActionObjectJson, StructuredActionObjectJson} from "./Common/Infrastructure/ActionShapes";
import {
ActionConfigHydratedData, ActionConfigObject,
} from "./Common/Infrastructure/ActionShapes";
import {SubredditResources} from "./Subreddit/SubredditResources";
import {asIncludesData, IncludesData, IncludesString} from "./Common/Infrastructure/Includes";
import ConfigParseError from "./Utils/ConfigParseError";
import {InfluxClient} from "./Common/Influx/InfluxClient";
import {BotInvite} from "./Common/Entities/BotInvite";
export interface ConfigBuilderOptions {
logger: Logger,
}
export const validateJson = (config: object, schema: Schema, logger: Logger): any => {
export const validateJson = <T>(config: object, schema: Schema, logger: Logger): T => {
const ajv = createAjvFactory(logger);
const valid = ajv.validate(schema, config);
if (valid) {
return config;
return config as unknown as T;
} else {
logger.error('Json config was not valid. Please use schema to check validity.', {leaf: 'Config'});
if (Array.isArray(ajv.errors)) {
@@ -162,30 +161,217 @@ export class ConfigBuilder {
constructor(options: ConfigBuilderOptions) {
this.configLogger = options.logger.child({leaf: 'Config'}, mergeArr);
this.configLogger = options.logger.child({labels: ['Config']}, mergeArr);
this.logger = options.logger;
}
validateJson(config: object): JSONConfig {
const validConfig = validateJson(config, appSchema, this.logger);
return validConfig as JSONConfig;
validateJson(config: object): SubredditConfigData {
return validateJson<SubredditConfigData>(config, appSchema, this.logger);
}
parseToStructured(config: JSONConfig, filterCriteriaDefaultsFromBot?: FilterCriteriaDefaults, postCheckBehaviorDefaultsFromBot: PostBehavior = {}): RunStructuredJson[] {
let namedRules: Map<string, RuleObjectJson> = new Map();
let namedActions: Map<string, ActionObjectJson> = new Map();
const {checks = [], runs = [], filterCriteriaDefaults, postCheckBehaviorDefaults} = config;
async hydrateConfigFragment<T>(val: IncludesData | string | object, resource: SubredditResources, validateFunc?: ConfigFragmentValidationFunc): Promise<T[]> {
let includes: IncludesData | undefined = undefined;
if(typeof val === 'string') {
const strContextResult = parseUrlContext(val);
if(strContextResult !== undefined) {
this.configLogger.debug(`Detected ${asWikiContext(strContextResult) !== undefined ? 'REDDIT WIKI' : 'URL'} type Config Fragment from string: ${val}`);
includes = {
path: val as IncludesString
};
} else {
this.configLogger.debug(`Did not detect Config Fragment as a URL resource: ${val}`);
}
} else if (asIncludesData(val)) {
includes = val;
const strContextResult = parseUrlContext(val.path);
if(strContextResult === undefined) {
throw new ConfigParseError(`Could not detect Config Fragment path as a valid URL Resource. Resource must be prefixed with either 'url:' or 'wiki:' -- ${val.path}`);
}
}
if(includes === undefined) {
if(Array.isArray(val)) {
return val as unknown as T[];
} else {
return [val as unknown as T];
}
}
const resolvedFragment = await resource.getConfigFragment(includes, validateFunc);
if(Array.isArray(resolvedFragment)) {
return resolvedFragment
}
return [resolvedFragment as T];
}
async hydrateConfig(config: SubredditConfigData, resource: SubredditResources): Promise<SubredditConfigHydratedData> {
const {
runs = [],
checks = [],
...restConfig
} = config;
if(checks.length > 0 && runs.length > 0) {
// cannot have both checks and runs at top-level
throw new Error(`Subreddit configuration cannot contain both 'checks' and 'runs' at top-level.`);
}
// TODO consolidate this with parseToStructured
const realRuns = runs;
if(checks.length > 0) {
realRuns.push({name: 'Run1', checks: checks});
}
const hydratedRuns: RunConfigHydratedData[] = [];
let runIndex = 1;
for(const r of realRuns) {
let hydratedRunArr: RunConfigData | RunConfigData[];
try {
hydratedRunArr = await this.hydrateConfigFragment<RunConfigData>(r, resource, <RunConfigData>(data: object, fetched: boolean) => {
if (fetched) {
if (Array.isArray(data)) {
for (const runData of data) {
validateJson<RunConfigData>(runData, runSchema, this.logger);
}
} else {
validateJson<RunConfigData>(data, runSchema, this.logger);
}
return true;
}
return true;
});
} catch (e: any) {
throw new CMError(`Could not fetch or validate Run #${runIndex}`, {cause: e});
}
for(const hydratedRunVal of hydratedRunArr) {
if (typeof hydratedRunVal === 'string') {
throw new ConfigParseError(`Run Config Fragment #${runIndex} was not in a recognized Config Fragment format. Given: ${hydratedRunVal}`);
}
// validate run with unhydrated checks
const preValidatedRun = hydratedRunVal as RunConfigData;
const {checks, ...rest} = preValidatedRun;
const hydratedChecks: CheckConfigHydratedData[] = [];
let checkIndex = 1;
for (const c of preValidatedRun.checks) {
let hydratedCheckDataArr: ActivityCheckConfigHydratedData[];
try {
hydratedCheckDataArr = await this.hydrateConfigFragment<ActivityCheckConfigHydratedData>(c, resource, (data: object, fetched: boolean) => {
if (fetched) {
if (Array.isArray(data)) {
for (const checkObj of data) {
validateJson<ActivityCheckConfigHydratedData>(checkObj, checkSchema, this.logger);
}
} else {
validateJson<ActivityCheckConfigHydratedData>(data, checkSchema, this.logger);
}
return true;
}
return true;
});
} catch (e: any) {
throw new CMError(`Could not fetch or validate Check Config Fragment #${checkIndex} in Run #${runIndex}`, {cause: e});
}
for (const hydratedCheckData of hydratedCheckDataArr) {
if (typeof hydratedCheckData === 'string') {
throw new ConfigParseError(`Check #${checkIndex} in Run #${runIndex} was not in a recognized include format. Given: ${hydratedCheckData}`);
}
const preValidatedCheck = hydratedCheckData as ActivityCheckConfigHydratedData;
const {rules, actions, ...rest} = preValidatedCheck;
const hydratedCheckConfigData: CheckConfigHydratedData = rest;
if (rules !== undefined) {
const hydratedRulesOrSets: (RuleSetConfigHydratedData | RuleConfigHydratedData)[] = [];
let ruleIndex = 1;
for (const r of rules) {
let hydratedRuleOrSetArr: (RuleConfigHydratedData | RuleSetConfigHydratedData)[];
try {
hydratedRuleOrSetArr = await this.hydrateConfigFragment<(RuleSetConfigHydratedData | RuleConfigHydratedData)>(r, resource);
} catch (e: any) {
throw new CMError(`Rule Config Fragment #${ruleIndex} in Check #${checkIndex} could not be fetched`, {cause: e});
}
for (const hydratedRuleOrSet of hydratedRuleOrSetArr) {
if (typeof hydratedRuleOrSet === 'string') {
hydratedRulesOrSets.push(hydratedRuleOrSet);
} else if (isRuleSetJSON(hydratedRuleOrSet)) {
const hydratedRulesetRules: RuleConfigHydratedData[] = [];
for (const rsr of hydratedRuleOrSet.rules) {
const hydratedRuleSetRuleArr = await this.hydrateConfigFragment<RuleConfigHydratedData>(rsr, resource);
for(const rsrData of hydratedRuleSetRuleArr) {
// either a string or rule data at this point
// we will validate the whole check again so this rule will be validated eventually
hydratedRulesetRules.push(rsrData)
}
}
hydratedRuleOrSet.rules = hydratedRulesetRules;
hydratedRulesOrSets.push(hydratedRuleOrSet);
} else {
hydratedRulesOrSets.push(hydratedRuleOrSet);
}
ruleIndex++;
}
}
hydratedCheckConfigData.rules = hydratedRulesOrSets;
}
if (actions !== undefined) {
const hydratedActions: ActionConfigHydratedData[] = [];
let actionIndex = 1;
for (const a of actions) {
let hydratedActionArr: ActionConfigHydratedData[];
try {
hydratedActionArr = await this.hydrateConfigFragment<ActionConfigHydratedData>(a, resource);
} catch (e: any) {
throw new CMError(`Action Config Fragment #${actionIndex} in Check #${checkIndex} could not be fetched`, {cause: e});
}
for (const hydratedAction of hydratedActionArr) {
hydratedActions.push(hydratedAction);
actionIndex++;
}
}
hydratedCheckConfigData.actions = hydratedActions;
}
hydratedChecks.push(hydratedCheckConfigData);
checkIndex++;
}
}
const hydratedRun: RunConfigHydratedData = {...rest, checks: hydratedChecks};
hydratedRuns.push(hydratedRun);
runIndex++;
}
}
const hydratedConfig: SubredditConfigHydratedData = {...restConfig, runs: hydratedRuns};
const validatedHydratedConfig = validateJson<SubredditConfigHydratedData>(hydratedConfig, appSchema, this.logger);
return validatedHydratedConfig;
}
async parseToStructured(config: SubredditConfigData, resource: SubredditResources, filterCriteriaDefaultsFromBot?: FilterCriteriaDefaults, postCheckBehaviorDefaultsFromBot: PostBehavior = {}): Promise<RunConfigObject[]> {
let namedRules: Map<string, RuleConfigObject> = new Map();
let namedActions: Map<string, ActionConfigObject> = new Map();
const {filterCriteriaDefaults, postCheckBehaviorDefaults} = config;
const hydratedConfig = await this.hydrateConfig(config, resource);
const {runs: realRuns = []} = hydratedConfig;
for(const r of realRuns) {
for (const c of r.checks) {
const {rules = [], actions = []} = c;
@@ -194,11 +380,11 @@ export class ConfigBuilder {
}
}
const [namedAuthorFilters, namedItemFilters] = extractNamedFilters({...config, runs: realRuns});
const [namedAuthorFilters, namedItemFilters] = extractNamedFilters({...hydratedConfig, runs: realRuns});
const configFilterDefaults = filterCriteriaDefaults === undefined ? undefined : buildDefaultFilterCriteriaFromJson(filterCriteriaDefaults, namedAuthorFilters, namedItemFilters);
const structuredRuns: RunStructuredJson[] = [];
const structuredRuns: RunConfigObject[] = [];
const namedFilters = insertNameFilters(namedAuthorFilters, namedItemFilters);
@@ -210,7 +396,7 @@ export class ConfigBuilder {
const configFilterDefaultsFromRun = filterCriteriaDefaultsFromRun === undefined ? undefined : buildDefaultFilterCriteriaFromJson(filterCriteriaDefaultsFromRun, namedAuthorFilters, namedItemFilters);
const structuredChecks: CheckStructuredJson[] = [];
const structuredChecks: CheckConfigObject[] = [];
for (const c of r.checks) {
const {rules = [], actions = [], authorIs = {}, itemIs = []} = c;
const strongRules = insertNamedRules(rules, namedRules, namedAuthorFilters, namedItemFilters);
@@ -227,7 +413,7 @@ export class ConfigBuilder {
rules: strongRules,
actions: strongActions,
...postCheckBehaviors
} as CheckStructuredJson;
} as CheckConfigObject;
structuredChecks.push(strongCheck);
}
structuredRuns.push({
@@ -283,19 +469,19 @@ export const buildDefaultFilterCriteriaFromJson = (val: FilterCriteriaDefaultsJs
return def;
}
export const extractNamedRules = (rules: Array<RuleSetJson | RuleJson>, namedRules: Map<string, RuleObjectJson> = new Map()): Map<string, RuleObjectJson> => {
export const extractNamedRules = (rules: Array<RuleSetConfigData | RuleConfigData>, namedRules: Map<string, RuleConfigObject> = new Map()): Map<string, RuleConfigObject> => {
//const namedRules = new Map();
for (const r of rules) {
let rulesToAdd: RuleObjectJson[] = [];
let rulesToAdd: RuleConfigObject[] = [];
if ((typeof r === 'object')) {
if ((r as RuleObjectJson).kind !== undefined) {
if ((r as RuleConfigObject).kind !== undefined) {
// itsa rule
const rule = r as RuleObjectJson;
const rule = r as RuleConfigObject;
if (rule.name !== undefined) {
rulesToAdd.push(rule);
}
} else {
const ruleSet = r as RuleSetJson;
const ruleSet = r as RuleSetConfigData;
const nestedNamed = extractNamedRules(ruleSet.rules);
rulesToAdd = [...nestedNamed.values()];
}
@@ -306,7 +492,7 @@ export const extractNamedRules = (rules: Array<RuleSetJson | RuleJson>, namedRul
const ruleNoName = {...rest};
if (namedRules.has(normalName)) {
const {name: nn, ...ruleRest} = namedRules.get(normalName) as RuleObjectJson;
const {name: nn, ...ruleRest} = namedRules.get(normalName) as RuleConfigObject;
if (!deepEqual(ruleRest, ruleNoName)) {
throw new Error(`Rule names must be unique (case-insensitive). Conflicting name: ${name}`);
}
@@ -352,7 +538,7 @@ const parseFilterJson = <T>(addToFilter: FilterJsonFuncArg<T>) => (val: MinimalO
}
}
export const extractNamedFilters = (config: JSONConfig, namedAuthorFilters: Map<string, NamedCriteria<AuthorCriteria>> = new Map(), namedItemFilters: Map<string, NamedCriteria<TypedActivityState>> = new Map()): [Map<string, NamedCriteria<AuthorCriteria>>, Map<string, NamedCriteria<TypedActivityState>>] => {
export const extractNamedFilters = (config: SubredditConfigHydratedData, namedAuthorFilters: Map<string, NamedCriteria<AuthorCriteria>> = new Map(), namedItemFilters: Map<string, NamedCriteria<TypedActivityState>> = new Map()): [Map<string, NamedCriteria<AuthorCriteria>>, Map<string, NamedCriteria<TypedActivityState>>] => {
const addToAuthors = addToNamedFilter(namedAuthorFilters, 'authorIs');
const addToItems = addToNamedFilter(namedItemFilters, 'itemIs');
@@ -407,7 +593,7 @@ export const extractNamedFilters = (config: JSONConfig, namedAuthorFilters: Map<
parseAuthorIs(filterCriteriaDefaults?.authorIs);
parseItemIs(filterCriteriaDefaults?.itemIs);
for (const r of runs as RunJson[]) {
for (const r of runs) {
const {
filterCriteriaDefaults: filterCriteriaDefaultsFromRun
@@ -508,13 +694,13 @@ export const insertNameFilters = (namedAuthorFilters: Map<string, NamedCriteria<
return runnableOpts;
}
export const insertNamedRules = (rules: Array<RuleSetJson | RuleJson>, namedRules: Map<string, RuleObjectJson> = new Map(), namedAuthorFilters: Map<string, NamedCriteria<AuthorCriteria>> = new Map(), namedItemFilters: Map<string, NamedCriteria<TypedActivityState>> = new Map()): (StructuredRuleSetObjectJson | StructuredRuleObjectJson)[] => {
export const insertNamedRules = (rules: Array<RuleSetConfigHydratedData | RuleConfigHydratedData>, namedRules: Map<string, RuleConfigObject> = new Map(), namedAuthorFilters: Map<string, NamedCriteria<AuthorCriteria>> = new Map(), namedItemFilters: Map<string, NamedCriteria<TypedActivityState>> = new Map()): (RuleSetConfigObject | RuleConfigObject)[] => {
const namedFilters = insertNameFilters(namedAuthorFilters, namedItemFilters);
const strongRules: (StructuredRuleSetObjectJson | StructuredRuleObjectJson)[] = [];
const strongRules: (RuleSetConfigObject | RuleConfigObject)[] = [];
for (const r of rules) {
let rule: StructuredRuleObjectJson | undefined;
let rule: RuleConfigObject | undefined;
if (typeof r === 'string') {
const foundRule = namedRules.get(r.toLowerCase());
if (foundRule === undefined) {
@@ -523,20 +709,20 @@ export const insertNamedRules = (rules: Array<RuleSetJson | RuleJson>, namedRule
rule = {
...foundRule,
...namedFilters(foundRule)
} as StructuredRuleObjectJson
} as RuleConfigObject
//strongRules.push(foundRule);
} else if (isRuleSetJSON(r)) {
const {rules: sr, ...rest} = r;
const setRules = insertNamedRules(sr, namedRules, namedAuthorFilters, namedItemFilters);
const strongSet = {rules: setRules, ...rest} as StructuredRuleSetObjectJson;
const strongSet = {rules: setRules, ...rest} as RuleSetConfigObject;
strongRules.push(strongSet);
} else {
rule = {...r, ...namedFilters(r)} as StructuredRuleObjectJson;
rule = {...r, ...namedFilters(r)} as RuleConfigObject;
}
if(rule !== undefined) {
if(rule.kind === 'author') {
const authorRuleConfig = rule as (StructuredRuleObjectJson & AuthorRuleConfig);
const authorRuleConfig = rule as (RuleConfigObject & AuthorRuleConfig);
const filters = namedFilters({authorIs: {include: authorRuleConfig.include, exclude: authorRuleConfig.exclude}});
const builtFilter = buildFilter(filters.authorIs as MinimalOrFullFilter<AuthorCriteria>);
rule = {
@@ -546,14 +732,14 @@ export const insertNamedRules = (rules: Array<RuleSetJson | RuleJson>, namedRule
exclude: builtFilter.exclude
}
}
strongRules.push(rule as StructuredRuleObjectJson);
strongRules.push(rule as RuleConfigObject);
}
}
return strongRules;
}
export const extractNamedActions = (actions: Array<ActionJson>, namedActions: Map<string, ActionObjectJson> = new Map()): Map<string, ActionObjectJson> => {
export const extractNamedActions = (actions: Array<ActionConfigHydratedData>, namedActions: Map<string, ActionConfigObject> = new Map()): Map<string, ActionConfigObject> => {
for (const a of actions) {
if (!(typeof a === 'string')) {
if (isActionJson(a) && a.name !== undefined) {
@@ -562,7 +748,7 @@ export const extractNamedActions = (actions: Array<ActionJson>, namedActions: Ma
const actionNoName = {...rest};
if (namedActions.has(normalName)) {
// @ts-ignore
const {name: nn, ...aRest} = namedActions.get(normalName) as ActionObjectJson;
const {name: nn, ...aRest} = namedActions.get(normalName) as ActionConfigObject;
if (!deepEqual(aRest, actionNoName)) {
throw new Error(`Actions names must be unique (case-insensitive). Conflicting name: ${a.name}`);
}
@@ -575,20 +761,20 @@ export const extractNamedActions = (actions: Array<ActionJson>, namedActions: Ma
return namedActions;
}
export const insertNamedActions = (actions: Array<ActionJson>, namedActions: Map<string, ActionObjectJson> = new Map(), namedAuthorFilters: Map<string, NamedCriteria<AuthorCriteria>> = new Map(), namedItemFilters: Map<string, NamedCriteria<TypedActivityState>> = new Map()): Array<StructuredActionObjectJson> => {
export const insertNamedActions = (actions: Array<ActionConfigHydratedData>, namedActions: Map<string, ActionConfigObject> = new Map(), namedAuthorFilters: Map<string, NamedCriteria<AuthorCriteria>> = new Map(), namedItemFilters: Map<string, NamedCriteria<TypedActivityState>> = new Map()): Array<ActionConfigObject> => {
const namedFilters = insertNameFilters(namedAuthorFilters, namedItemFilters);
const strongActions: Array<StructuredActionObjectJson> = [];
const strongActions: Array<ActionConfigObject> = [];
for (const a of actions) {
if (typeof a === 'string') {
const foundAction = namedActions.get(a.toLowerCase());
if (foundAction === undefined) {
throw new Error(`No named Action with the name ${a} was found`);
}
strongActions.push({...foundAction, ...namedFilters(foundAction)} as StructuredActionObjectJson);
strongActions.push({...foundAction, ...namedFilters(foundAction)} as ActionConfigObject);
} else {
strongActions.push({...a, ...namedFilters(a)} as StructuredActionObjectJson);
strongActions.push({...a, ...namedFilters(a)} as ActionConfigObject);
}
}
@@ -1024,6 +1210,7 @@ export const buildOperatorConfigWithDefaults = async (data: OperatorJsonConfig):
migrations = {},
retention,
} = {},
influxConfig,
web: {
port = 8085,
maxLogs = 200,
@@ -1038,9 +1225,6 @@ export const buildOperatorConfigWithDefaults = async (data: OperatorJsonConfig):
maxAge: sessionMaxAge = 86400,
storage: sessionStorage = undefined,
} = {},
invites: {
maxAge: inviteMaxAge = 0,
} = {},
clients,
credentials: webCredentials,
operators,
@@ -1053,6 +1237,10 @@ export const buildOperatorConfigWithDefaults = async (data: OperatorJsonConfig):
} = {},
credentials = {},
bots = [],
dev: {
monitorMemory = false,
monitorMemoryInterval = 15
} = {},
} = data;
let cache: StrongCache;
@@ -1141,6 +1329,37 @@ export const buildOperatorConfigWithDefaults = async (data: OperatorJsonConfig):
}
const webDbConfig = createDatabaseConfig(realdbConnectionWeb);
const appDataSource = await createAppDatabaseConnection(dbConfig, appLogger);
let influx: InfluxClient | undefined = undefined;
if(influxConfig !== undefined) {
const tags = friendly !== undefined ? {server: friendly} : undefined;
influx = new InfluxClient(influxConfig, appLogger, tags);
await influx.isReady();
}
/* let friendlyId: string;
if (friendly === undefined) {
let randFriendly: string = generateRandomName();
// see if we can get invites to check for unique name
// if this is a new instance will not be able to get it but try anyway
try {
const inviteRepo = appDataSource.getRepository(BotInvite);
const exists = async (name: string) => {
const existing = await inviteRepo.findBy({instance: name});
return existing.length > 0;
}
while (await exists(randFriendly)) {
randFriendly = generateRandomName();
}
} catch (e: any) {
// something went wrong, just ignore this
}
friendlyId = randFriendly;
} else {
friendlyId = friendly;
}*/
const config: OperatorConfig = {
mode,
operator: {
@@ -1154,12 +1373,13 @@ export const buildOperatorConfigWithDefaults = async (data: OperatorJsonConfig):
frequency,
minFrequency
},
database: await createAppDatabaseConnection(dbConfig, appLogger),
database: appDataSource,
databaseConfig: {
connection: dbConfig,
migrations,
retention,
},
influx,
userAgent,
web: {
database: await createWebDatabaseConnection(webDbConfig, appLogger),
@@ -1173,9 +1393,6 @@ export const buildOperatorConfigWithDefaults = async (data: OperatorJsonConfig):
},
port,
storage: webStorage,
invites: {
maxAge: inviteMaxAge,
},
session: {
secret: sessionSecretFromConfig,
maxAge: sessionMaxAge,
@@ -1189,10 +1406,14 @@ export const buildOperatorConfigWithDefaults = async (data: OperatorJsonConfig):
api: {
port: apiPort,
secret: apiSecret,
friendly
friendly,
},
bots: [],
credentials,
dev: {
monitorMemory,
monitorMemoryInterval
}
};
config.bots = bots.map(x => buildBotConfig(x, config));
@@ -1214,6 +1435,7 @@ export const buildBotConfig = (data: BotInstanceJsonConfig, opConfig: OperatorCo
databaseConfig: {
retention: retentionFromOp,
} = {},
influx: opInflux
} = opConfig;
const {
name: botName,
@@ -1239,6 +1461,7 @@ export const buildBotConfig = (data: BotInstanceJsonConfig, opConfig: OperatorCo
databaseConfig: {
retention,
} = {},
influxConfig,
flowControlDefaults,
credentials = {},
subreddits: {
@@ -1370,6 +1593,7 @@ export const buildBotConfig = (data: BotInstanceJsonConfig, opConfig: OperatorCo
databaseConfig: {
retention: retention ?? retentionFromOp
},
opInflux,
subreddits: {
names,
exclude,

View File

@@ -1,23 +0,0 @@
import {CheckJson, CommentCheckJson, SubmissionCheckJson} from "./Check";
import {ActivityCheckJson, ManagerOptions} from "./Common/interfaces";
import {RunJson} from "./Run";
export interface JSONConfig extends ManagerOptions {
/**
* A list of all the checks that should be run for a subreddit.
*
* Checks are split into two lists -- submission or comment -- based on kind and run independently.
*
* Checks in each list are run in the order found in the configuration.
*
* When a check "passes", and actions are performed, then all subsequent checks are skipped.
* @minItems 1
* */
checks?: ActivityCheckJson[]
/**
* A list of sets of Checks to run
* @minItems 1
* */
runs?: RunJson[]
}

View File

@@ -42,6 +42,7 @@ import {
} from "../Common/Infrastructure/Filters/FilterCriteria";
import {ActivityWindow, ActivityWindowConfig} from "../Common/Infrastructure/ActivityWindow";
import {comparisonTextOp, parseGenericValueOrPercentComparison} from "../Common/Infrastructure/Comparisons";
import {ImageHashCacheData} from "../Common/Infrastructure/Atomic";
const parseLink = parseUsableLinkIdentifier();
@@ -195,21 +196,21 @@ export class RecentActivityRule extends Rule {
let filteredActivity: (Submission|Comment)[] = [];
let analysisTimes: number[] = [];
let referenceImage: ImageData | undefined;
let refHash: Required<ImageHashCacheData> | undefined;
if (this.imageDetection.enable) {
try {
referenceImage = ImageData.fromSubmission(item);
referenceImage.setPreferredResolutionByWidth(800);
if(this.imageDetection.hash.enable) {
let refHash: string | undefined;
if(this.imageDetection.hash.ttl !== undefined) {
refHash = await this.resources.getImageHash(referenceImage);
if(refHash === undefined) {
refHash = await referenceImage.hash(this.imageDetection.hash.bits);
await this.resources.setImageHash(referenceImage, refHash, this.imageDetection.hash.ttl);
} else if(refHash.length !== bitsToHexLength(this.imageDetection.hash.bits)) {
await this.resources.setImageHash(referenceImage, this.imageDetection.hash.ttl);
} else if(refHash.original.length !== bitsToHexLength(this.imageDetection.hash.bits)) {
this.logger.warn('Reference image hash length did not correspond to bits specified in config. Recomputing...');
refHash = await referenceImage.hash(this.imageDetection.hash.bits);
await this.resources.setImageHash(referenceImage, refHash, this.imageDetection.hash.ttl);
await referenceImage.hash(this.imageDetection.hash.bits);
await this.resources.setImageHash(referenceImage, this.imageDetection.hash.ttl);
}
} else {
refHash = await referenceImage.hash(this.imageDetection.hash.bits);
@@ -244,29 +245,38 @@ export class RecentActivityRule extends Rule {
}
// only do image detection if regular URL comparison and other conditions fail first
// to reduce CPU/bandwidth usage
if (referenceImage !== undefined) {
if (referenceImage !== undefined && refHash !== undefined) {
try {
let imgData = ImageData.fromSubmission(x);
imgData.setPreferredResolutionByWidth(800);
if(this.imageDetection.hash.enable) {
let compareHash: string | undefined;
let compareHash: Required<ImageHashCacheData> | undefined;
if(this.imageDetection.hash.ttl !== undefined) {
compareHash = await this.resources.getImageHash(imgData);
}
if(compareHash === undefined)
if(compareHash === undefined || compareHash.original.length !== refHash.original.length)
{
if(compareHash !== undefined) {
this.logger.debug(`Hash lengths were not the same! Will need to recompute compare hash to match reference.\n\nReference: ${referenceImage.basePath} has is ${refHash.original.length} char long | Comparing: ${imgData.basePath} has is ${compareHash} ${compareHash.original.length} long`);
}
compareHash = await imgData.hash(this.imageDetection.hash.bits);
if(this.imageDetection.hash.ttl !== undefined) {
await this.resources.setImageHash(imgData, compareHash, this.imageDetection.hash.ttl);
await this.resources.setImageHash(imgData, this.imageDetection.hash.ttl);
}
}
const refHash = await referenceImage.hash(this.imageDetection.hash.bits);
if(refHash.length !== compareHash.length) {
this.logger.debug(`Hash lengths were not the same! Will need to recompute compare hash to match reference.\n\nReference: ${referenceImage.baseUrl} has is ${refHash.length} char long | Comparing: ${imgData.baseUrl} has is ${compareHash} ${compareHash.length} long`);
compareHash = await imgData.hash(this.imageDetection.hash.bits)
let diff: number;
const odistance = leven(refHash.original, compareHash.original);
diff = (odistance/refHash.original.length)*100;
// compare flipped hash if it exists
// if it has less difference than normal comparison then the image is probably flipped (or so different it doesn't matter)
if(compareHash.flipped !== undefined) {
const fdistance = leven(refHash.original, compareHash.flipped);
const fdiff = (fdistance/refHash.original.length)*100;
if(fdiff < diff) {
diff = fdiff;
}
}
const distance = leven(refHash, compareHash);
const diff = (distance/refHash.length)*100;
// return image if hard is defined and diff is less

View File

@@ -380,8 +380,10 @@ export class RegexRule extends Rule {
for (const c of contents) {
const results = parseRegex(reg, c);
if (results.matched) {
m = m.concat(results.matches);
if(results !== undefined) {
for(const r of results) {
m.push(r.match);
}
}
}
return m;

View File

@@ -1,6 +1,6 @@
import {RecentActivityRule, RecentActivityRuleJSONConfig} from "./RecentActivityRule";
import RepeatActivityRule, {RepeatActivityJSONConfig} from "./RepeatActivityRule";
import {Rule, RuleJSONConfig, StructuredRuleJson} from "./index";
import {Rule} from "./index";
import AuthorRule, {AuthorRuleJSONConfig} from "./AuthorRule";
import {AttributionJSONConfig, AttributionRule} from "./AttributionRule";
import {Logger} from "winston";
@@ -11,9 +11,10 @@ import Snoowrap from "snoowrap";
import {RepostRule, RepostRuleJSONConfig} from "./RepostRule";
import {StructuredFilter} from "../Common/Infrastructure/Filters/FilterShapes";
import {SentimentRule, SentimentRuleJSONConfig} from "./SentimentRule";
import {StructuredRuleConfigObject} from "../Common/Infrastructure/RuleShapes";
export function ruleFactory
(config: StructuredRuleJson, logger: Logger, subredditName: string, resources: SubredditResources, client: Snoowrap): Rule {
(config: StructuredRuleConfigObject, logger: Logger, subredditName: string, resources: SubredditResources, client: Snoowrap): Rule {
let cfg;
switch (config.kind) {
case 'recentActivity':

View File

@@ -1,17 +1,20 @@
import {IRule, Triggerable, Rule, RuleJSONConfig, StructuredRuleJson} from "./index";
import {IRule, Rule} from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {ruleFactory} from "./RuleFactory";
import {createAjvFactory, mergeArr} from "../util";
import {Logger} from "winston";
import {JoinCondition, RuleResult, RuleSetResult} from "../Common/interfaces";
import {JoinCondition, RuleSetResult} from "../Common/interfaces";
import * as RuleSchema from '../Schema/Rule.json';
import Ajv from 'ajv';
import {SubredditResources} from "../Subreddit/SubredditResources";
import {runCheckOptions} from "../Subreddit/Manager";
import {RuleResultEntity} from "../Common/Entities/RuleResultEntity";
import {RuleSetResultEntity} from "../Common/Entities/RuleSetResultEntity";
import {JoinOperands} from "../Common/Infrastructure/Atomic";
import {RuleJson, RuleObjectJson, StructuredRuleObjectJson} from "../Common/Infrastructure/RuleShapes";
import {
RuleConfigData,
RuleConfigHydratedData,
RuleConfigObject,
StructuredRuleConfigObject
} from "../Common/Infrastructure/RuleShapes";
export class RuleSet implements IRuleSet {
rules: Rule[] = [];
@@ -99,7 +102,7 @@ export interface IRuleSet extends JoinCondition {
}
export interface RuleSetOptions extends IRuleSet {
rules: Array<StructuredRuleObjectJson>,
rules: Array<StructuredRuleConfigObject>,
logger: Logger
subredditName: string
resources: SubredditResources
@@ -109,20 +112,24 @@ export interface RuleSetOptions extends IRuleSet {
/**
* A RuleSet is a "nested" set of `Rule` objects that can be used to create more complex AND/OR behavior. Think of the outcome of a `RuleSet` as the result of all of its run `Rule` objects (based on `condition`)
* */
export interface RuleSetJson extends JoinCondition {
export interface RuleSetConfigData extends JoinCondition {
/**
* Can be `Rule` or the `name` of any **named** `Rule` in your subreddit's configuration
* @minItems 1
* */
rules: Array<RuleJson>
rules: RuleConfigData[]
}
export interface RuleSetObjectJson extends RuleSetJson {
rules: Array<RuleObjectJson>
export interface RuleSetConfigHydratedData extends RuleSetConfigData {
rules: RuleConfigHydratedData[]
}
export const isRuleSetJSON = (obj: object): obj is RuleSetJson => {
return (obj as RuleSetJson).rules !== undefined;
export interface RuleSetConfigObject extends RuleSetConfigHydratedData {
rules: RuleConfigObject[]
}
export const isRuleSetJSON = (obj: object): obj is RuleSetConfigData => {
return (obj as RuleSetConfigData).rules !== undefined;
}
export const isRuleSet = (obj: object): obj is RuleSet => {

View File

@@ -187,8 +187,3 @@ export interface RuleJSONConfig extends IRule {
*/
kind: 'recentActivity' | 'repeatActivity' | 'author' | 'attribution' | 'history' | 'regex' | 'repost' | 'sentiment'
}
export interface StructuredRuleJson extends Omit<RuleJSONConfig, 'authorIs' | 'itemIs'>, StructuredRunnableBase {
}

View File

@@ -1,6 +1,11 @@
import {asStructuredCommentCheckJson, asStructuredSubmissionCheckJson, Check, CheckStructuredJson} from "../Check";
import {
ActivityCheckJson,
ActivityCheckConfigHydratedData,
ActivityCheckConfigValue, ActivityCheckObject,
asStructuredCommentCheckJson,
asStructuredSubmissionCheckJson,
Check,
} from "../Check";
import {
PostBehavior, PostBehaviorOption,
RunResult
} from "../Common/interfaces";
@@ -20,6 +25,7 @@ import {RuleResultEntity} from "../Common/Entities/RuleResultEntity";
import {RunnableBase} from "../Common/RunnableBase";
import {RunnableBaseJson, RunnableBaseOptions, StructuredRunnableBase} from "../Common/Infrastructure/Runnable";
import {FilterCriteriaDefaults} from "../Common/Infrastructure/Filters/FilterShapes";
import {IncludesData} from "../Common/Infrastructure/Includes";
export class Run extends RunnableBase {
name: string;
@@ -296,7 +302,7 @@ export interface IRun extends PostBehavior, RunnableBaseJson {
enable?: boolean,
}
export interface RunOptions extends RunStructuredJson, RunnableBaseOptions {
export interface RunOptions extends RunConfigObject, RunnableBaseOptions {
// submissionChecks?: SubmissionCheck[]
// commentChecks?: CommentCheck[]
//checks: CheckStructuredJson[]
@@ -308,10 +314,16 @@ export interface RunOptions extends RunStructuredJson, RunnableBaseOptions {
emitter: EventEmitter;
}
export interface RunJson extends IRun {
checks: ActivityCheckJson[]
export interface RunConfigData extends IRun {
checks: ActivityCheckConfigValue[]
}
export interface RunStructuredJson extends Omit<RunJson, 'authorIs' | 'itemIs' | 'checks'>, StructuredRunnableBase {
checks: CheckStructuredJson[]
export type RunConfigValue = string | IncludesData | RunConfigData;
export interface RunConfigHydratedData extends IRun {
checks: ActivityCheckConfigHydratedData[]
}
export interface RunConfigObject extends Omit<RunConfigHydratedData, 'authorIs' | 'itemIs'>, StructuredRunnableBase {
checks: ActivityCheckObject[]
}

File diff suppressed because it is too large Load Diff

View File

@@ -78,8 +78,8 @@
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
"type": "string"
},
"score": {
@@ -919,19 +919,17 @@
"type": "string"
},
"note": {
"description": "A mod note for this ban",
"description": "A mod note for this ban. Can use Templating.\n\nIf the length expands to more than 100 characters it will truncated with \"...\"",
"examples": [
"Sock puppet for u/AnotherUser"
],
"maxLength": 100,
"type": "string"
},
"reason": {
"description": "Reason for ban.",
"description": "Reason for ban. Can use Templating.\n\nIf the length expands to more than 100 characters it will truncated with \"...\"",
"examples": [
"repeat spam"
],
"maxLength": 100,
"type": "string"
}
},
@@ -1378,6 +1376,20 @@
"sticky": {
"description": "Stick the comment after creation?",
"type": "boolean"
},
"targets": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "Specify where this comment should be made\n\nValid values: 'self' | 'parent' | [reddit permalink]\n\n'self' and 'parent' are special targets that are relative to the Activity being processed:\n* When Activity is Submission => 'parent' does nothing\n* When Activity is Comment\n * 'self' => reply to Activity\n * 'parent' => make a top-level comment in the Submission the Comment is in\n\nIf target is not self/parent then CM assumes the value is a reddit permalink and will attempt to make a comment to that Activity"
}
},
"required": [
@@ -1386,7 +1398,7 @@
],
"type": "object"
},
"CommentCheckJson": {
"CommentCheckConfigData": {
"properties": {
"actions": {
"description": "The `Actions` to run after the check is successfully triggered. ALL `Actions` will run in the order they are listed\n\n Can be `Action` or the `name` of any **named** `Action` in your subreddit's configuration",
@@ -1404,6 +1416,9 @@
],
"items": {
"anyOf": [
{
"$ref": "#/definitions/IncludesData"
},
{
"$ref": "#/definitions/FlairActionJson"
},
@@ -1446,6 +1461,9 @@
{
"$ref": "#/definitions/ModNoteActionJson"
},
{
"$ref": "#/definitions/SubmissionActionJson"
},
{
"type": "string"
}
@@ -1517,17 +1535,17 @@
},
"itemIs": {
"anyOf": [
{
"$ref": "#/definitions/FilterOptionsJson<CommentState>"
},
{
"items": {
"anyOf": [
{
"$ref": "#/definitions/SubmissionState"
},
{
"$ref": "#/definitions/CommentState"
},
{
"$ref": "#/definitions/NamedCriteria<CommentState>"
"$ref": "#/definitions/NamedCriteria<TypedActivityState>"
},
{
"type": "string"
@@ -1535,6 +1553,9 @@
]
},
"type": "array"
},
{
"$ref": "#/definitions/FilterOptionsJson<TypedActivityState>"
}
],
"description": "A list of criteria to test the state of the `Activity` against before running the check.\n\nIf any set of criteria passes the Check will be run. If the criteria fails then the Check will fail.\n\n* @examples [[{\"over_18\": true, \"removed': false}]]"
@@ -1587,6 +1608,9 @@
"description": "A list of Rules to run.\n\nIf `Rule` objects are triggered based on `condition` then `actions` will be performed.\n\nCan be `Rule`, `RuleSet`, or the `name` of any **named** `Rule` in your subreddit's configuration.\n\n**If `rules` is an empty array or not present then `actions` are performed immediately.**",
"items": {
"anyOf": [
{
"$ref": "#/definitions/IncludesData"
},
{
"$ref": "#/definitions/RecentActivityRuleJSONConfig"
},
@@ -1612,7 +1636,7 @@
"$ref": "#/definitions/SentimentRuleJSONConfig"
},
{
"$ref": "#/definitions/RuleSetJson"
"$ref": "#/definitions/RuleSetConfigData"
},
{
"type": "string"
@@ -1721,8 +1745,8 @@
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
"type": "string"
},
"score": {
@@ -2519,102 +2543,6 @@
},
"type": "object"
},
"FilterOptionsJson<CommentState>": {
"properties": {
"exclude": {
"description": "Only runs if `include` is not present. Each Criteria is comprised of conditions that the filter (Author/Item) being checked must \"not\" pass. See excludeCondition for set behavior\n\nEX: `isMod: true, name: Automoderator` => Will pass if the Author IS NOT a mod and IS NOT named Automoderator",
"items": {
"anyOf": [
{
"$ref": "#/definitions/CommentState"
},
{
"$ref": "#/definitions/NamedCriteria<CommentState>"
},
{
"type": "string"
}
]
},
"type": "array"
},
"excludeCondition": {
"default": "OR",
"description": "* OR => if ANY exclude condition \"does not\" pass then the exclude test passes\n* AND => if ALL exclude conditions \"do not\" pass then the exclude test passes\n\nDefaults to OR",
"enum": [
"AND",
"OR"
],
"type": "string"
},
"include": {
"description": "Will \"pass\" if any set of Criteria passes",
"items": {
"anyOf": [
{
"$ref": "#/definitions/CommentState"
},
{
"$ref": "#/definitions/NamedCriteria<CommentState>"
},
{
"type": "string"
}
]
},
"type": "array"
}
},
"type": "object"
},
"FilterOptionsJson<SubmissionState>": {
"properties": {
"exclude": {
"description": "Only runs if `include` is not present. Each Criteria is comprised of conditions that the filter (Author/Item) being checked must \"not\" pass. See excludeCondition for set behavior\n\nEX: `isMod: true, name: Automoderator` => Will pass if the Author IS NOT a mod and IS NOT named Automoderator",
"items": {
"anyOf": [
{
"$ref": "#/definitions/SubmissionState"
},
{
"$ref": "#/definitions/NamedCriteria<SubmissionState>"
},
{
"type": "string"
}
]
},
"type": "array"
},
"excludeCondition": {
"default": "OR",
"description": "* OR => if ANY exclude condition \"does not\" pass then the exclude test passes\n* AND => if ALL exclude conditions \"do not\" pass then the exclude test passes\n\nDefaults to OR",
"enum": [
"AND",
"OR"
],
"type": "string"
},
"include": {
"description": "Will \"pass\" if any set of Criteria passes",
"items": {
"anyOf": [
{
"$ref": "#/definitions/SubmissionState"
},
{
"$ref": "#/definitions/NamedCriteria<SubmissionState>"
},
{
"type": "string"
}
]
},
"type": "array"
}
},
"type": "object"
},
"FilterOptionsJson<SubredditCriteria>": {
"properties": {
"exclude": {
@@ -3286,6 +3214,41 @@
},
"type": "object"
},
"IncludesData": {
"properties": {
"path": {
"description": "The special-form path to the config fragment to retrieve.\n\nIf value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit\n\n * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/mybot`\n\nIf the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page\n\n* EX `wiki:replytemplates/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/replytemplates/test`\n\nIf the value starts with `url:` then the value is fetched as an external url and expects raw text returned\n\n* EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`",
"type": "string"
},
"ttl": {
"anyOf": [
{
"enum": [
false,
"response",
true
]
},
{
"type": "number"
}
],
"description": "Control caching for the config fragment.\n\nIf not specified the value for `wikiTTL` will be used\n\n* If true then value is cached forever\n* If false then value is never cached\n* If a number then the number of seconds to cache value\n* If 'response' then CM will attempt to use Cache-Control or Expires headers from the response to determine how long to cache the value"
},
"type": {
"description": "An unused hint about the content type. Not implemented yet",
"enum": [
"json",
"yaml"
],
"type": "string"
}
},
"required": [
"path"
],
"type": "object"
},
"LockActionJson": {
"description": "Lock the Activity",
"properties": {
@@ -4732,7 +4695,16 @@
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
"type": "string"
},
"note": {
"description": "(Optional) A mod-readable note added to the removal reason for this Activity. Can use Templating.\n\nThis note (and removal reasons) are only visible on New Reddit",
"type": "string"
},
"reasonId": {
"description": "(Optional) The ID of the Removal Reason to use\n\nRemoval reasons are only visible on New Reddit\n\nTo find IDs for removal reasons check the \"Removal Reasons\" popup located in the CM dashboard config editor for your subreddit\n\nMore info on Removal Reasons: https://mods.reddithelp.com/hc/en-us/articles/360010094892-Removal-Reasons",
"type": "string"
},
"spam": {
"description": "(Optional) Mark Activity as spam",
"type": "boolean"
}
},
@@ -5268,7 +5240,7 @@
],
"type": "object"
},
"RuleSetJson": {
"RuleSetConfigData": {
"description": "A RuleSet is a \"nested\" set of `Rule` objects that can be used to create more complex AND/OR behavior. Think of the outcome of a `RuleSet` as the result of all of its run `Rule` objects (based on `condition`)",
"properties": {
"condition": {
@@ -5287,6 +5259,9 @@
"description": "Can be `Rule` or the `name` of any **named** `Rule` in your subreddit's configuration",
"items": {
"anyOf": [
{
"$ref": "#/definitions/IncludesData"
},
{
"$ref": "#/definitions/RecentActivityRuleJSONConfig"
},
@@ -5325,7 +5300,7 @@
],
"type": "object"
},
"RunJson": {
"RunConfigData": {
"properties": {
"authorIs": {
"anyOf": [
@@ -5355,10 +5330,16 @@
"items": {
"anyOf": [
{
"$ref": "#/definitions/SubmissionCheckJson"
"$ref": "#/definitions/IncludesData"
},
{
"$ref": "#/definitions/CommentCheckJson"
"$ref": "#/definitions/SubmissionCheckConfigData"
},
{
"$ref": "#/definitions/CommentCheckConfigData"
},
{
"type": "string"
}
]
},
@@ -5697,7 +5678,161 @@
],
"type": "object"
},
"SubmissionCheckJson": {
"SubmissionActionJson": {
"description": "Reply to the Activity. For a submission the reply will be a top-level comment.",
"properties": {
"authorIs": {
"anyOf": [
{
"items": {
"anyOf": [
{
"$ref": "#/definitions/AuthorCriteria"
},
{
"$ref": "#/definitions/NamedCriteria<AuthorCriteria>"
},
{
"type": "string"
}
]
},
"type": "array"
},
{
"$ref": "#/definitions/FilterOptionsJson<AuthorCriteria>"
}
],
"description": "If present then these Author criteria are checked before running the Check. If criteria fails then the Check will fail."
},
"content": {
"description": "The Content to submit for this Action. Content is interpreted as reddit-flavored Markdown.\n\nIf value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit\n\n * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/mybot`\n\nIf the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page\n\n* EX `wiki:replytemplates/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/replytemplates/test`\n\nIf the value starts with `url:` then the value is fetched as an external url and expects raw text returned\n\n* EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`\n\nIf none of the above is used the value is treated as the raw context\n\n * EX `this is **bold** markdown text` => \"this is **bold** markdown text\"\n\nAll Content is rendered using [mustache](https://github.com/janl/mustache.js/#templates) to enable [Action Templating](https://github.com/FoxxMD/context-mod#action-templating).\n\nThe following properties are always available in the template (view individual Rules to see rule-specific template data):\n```\nitem.kind => The type of Activity that was checked (comment/submission)\nitem.author => The name of the Author of the Activity EX FoxxMD\nitem.permalink => A permalink URL to the Activity EX https://reddit.com/r/yourSub/comments/o1h0i0/title_name/1v3b7x\nitem.url => If the Activity is Link Sumbission then the external URL\nitem.title => If the Activity is a Submission then the title of that Submission\nrules => An object containing RuleResults of all the rules run for this check. See Action Templating for more details on naming\n```",
"examples": [
"This is the content of a comment/report/usernote",
"this is **bold** markdown text",
"wiki:botconfig/acomment"
],
"type": "string"
},
"distinguish": {
"description": "Distinguish as Mod after creation?",
"type": "boolean"
},
"dryRun": {
"default": false,
"description": "If `true` the Action will not make the API request to Reddit to perform its action.",
"examples": [
false,
true
],
"type": "boolean"
},
"enable": {
"default": true,
"description": "If set to `false` the Action will not be run",
"examples": [
true
],
"type": "boolean"
},
"flairId": {
"description": "Flair template to apply to this Submission",
"type": "string"
},
"flairText": {
"description": "Flair text to apply to this Submission",
"type": "string"
},
"footer": {
"anyOf": [
{
"enum": [
false
],
"type": "boolean"
},
{
"type": "string"
}
],
"description": "Customize the footer for Actions that send replies (Comment/Ban)\n\nIf `false` no footer is appended\n\nIf `string` the value is rendered as markdown or will use `wiki:` parser the same way `content` properties on Actions are rendered with [templating](https://github.com/FoxxMD/context-mod#action-templating).\n\nIf footer is `undefined` (not set) the default footer will be used:\n\n> *****\n> This action was performed by [a bot.] Mention a moderator or [send a modmail] if you any ideas, questions, or concerns about this action.\n\n*****\n\nThe following properties are available for [templating](https://github.com/FoxxMD/context-mod#action-templating):\n```\nsubName => name of subreddit Action was performed in (EX 'mealtimevideos')\npermaLink => The permalink for the Activity the Action was performed on EX https://reddit.com/r/yourSub/comments/o1h0i0/title_name/1v3b7x\nmodmaiLink => An encoded URL that will open a new message to your subreddit with the Action permalink appended to the body\nbotLink => A permalink to the FAQ for this bot.\n```\nIf you use your own footer or no footer **please link back to the bot FAQ** using the `{{botLink}}` property in your content :)"
},
"itemIs": {
"anyOf": [
{
"items": {
"anyOf": [
{
"$ref": "#/definitions/SubmissionState"
},
{
"$ref": "#/definitions/CommentState"
},
{
"$ref": "#/definitions/NamedCriteria<TypedActivityState>"
},
{
"type": "string"
}
]
},
"type": "array"
},
{
"$ref": "#/definitions/FilterOptionsJson<TypedActivityState>"
}
],
"description": "A list of criteria to test the state of the `Activity` against before running the check.\n\nIf any set of criteria passes the Check will be run. If the criteria fails then the Check will fail.\n\n* @examples [[{\"over_18\": true, \"removed': false}]]"
},
"kind": {
"description": "The type of action that will be performed",
"enum": [
"submission"
],
"type": "string"
},
"lock": {
"description": "Lock the Submission after creation?",
"type": "boolean"
},
"name": {
"description": "An optional, but highly recommended, friendly name for this Action. If not present will default to `kind`.\n\nCan only contain letters, numbers, underscore, spaces, and dashes",
"examples": [
"myDescriptiveAction"
],
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
"type": "string"
},
"nsfw": {
"type": "boolean"
},
"spoiler": {
"type": "boolean"
},
"sticky": {
"description": "Sticky the Submission after creation?",
"type": "boolean"
},
"targets": {
"description": "Specify where this Submission should be made\n\nValid values: 'self' | [subreddit]\n\n* 'self' -- DEFAULT. Post Submission to same subreddit of Activity being processed\n* [subreddit] -- The name of a subreddit to post Submission to. EX mealtimevideos",
"type": "string"
},
"title": {
"description": "The title of this Submission.\n\nTemplated the same as **content**",
"type": "string"
},
"url": {
"description": "If Submission should be a Link, the URL to use\n\nTemplated the same as **content**\n\nPROTIP: To make a Link Submission pointing to the Activity being processed use `{{item.permalink}}` as the URL value",
"type": "string"
}
},
"required": [
"kind",
"title"
],
"type": "object"
},
"SubmissionCheckConfigData": {
"properties": {
"actions": {
"description": "The `Actions` to run after the check is successfully triggered. ALL `Actions` will run in the order they are listed\n\n Can be `Action` or the `name` of any **named** `Action` in your subreddit's configuration",
@@ -5715,6 +5850,9 @@
],
"items": {
"anyOf": [
{
"$ref": "#/definitions/IncludesData"
},
{
"$ref": "#/definitions/FlairActionJson"
},
@@ -5757,6 +5895,9 @@
{
"$ref": "#/definitions/ModNoteActionJson"
},
{
"$ref": "#/definitions/SubmissionActionJson"
},
{
"type": "string"
}
@@ -5835,7 +5976,10 @@
"$ref": "#/definitions/SubmissionState"
},
{
"$ref": "#/definitions/NamedCriteria<SubmissionState>"
"$ref": "#/definitions/CommentState"
},
{
"$ref": "#/definitions/NamedCriteria<TypedActivityState>"
},
{
"type": "string"
@@ -5845,7 +5989,7 @@
"type": "array"
},
{
"$ref": "#/definitions/FilterOptionsJson<SubmissionState>"
"$ref": "#/definitions/FilterOptionsJson<TypedActivityState>"
}
],
"description": "A list of criteria to test the state of the `Activity` against before running the check.\n\nIf any set of criteria passes the Check will be run. If the criteria fails then the Check will fail.\n\n* @examples [[{\"over_18\": true, \"removed': false}]]"
@@ -5898,6 +6042,9 @@
"description": "A list of Rules to run.\n\nIf `Rule` objects are triggered based on `condition` then `actions` will be performed.\n\nCan be `Rule`, `RuleSet`, or the `name` of any **named** `Rule` in your subreddit's configuration.\n\n**If `rules` is an empty array or not present then `actions` are performed immediately.**",
"items": {
"anyOf": [
{
"$ref": "#/definitions/IncludesData"
},
{
"$ref": "#/definitions/RecentActivityRuleJSONConfig"
},
@@ -5923,7 +6070,7 @@
"$ref": "#/definitions/SentimentRuleJSONConfig"
},
{
"$ref": "#/definitions/RuleSetJson"
"$ref": "#/definitions/RuleSetConfigData"
},
{
"type": "string"
@@ -6088,8 +6235,8 @@
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
"type": "string"
},
"score": {
@@ -6123,6 +6270,13 @@
"title": {
"description": "A valid regular expression to match against the title of the submission",
"type": "string"
},
"upvoteRatio": {
"description": "Compare the upvote ratio for this Submission, expressed as a whole number\n\nCan be either a comparison string or a number. If a number then CM assumes upvote ratio must be greater than or equal to this.\n\nExample:\n\n* `< 90` => less than 90% upvoted\n* 45 => greater than or equal to 45% upvoted",
"type": [
"string",
"number"
]
}
},
"type": "object"
@@ -6492,10 +6646,16 @@
"items": {
"anyOf": [
{
"$ref": "#/definitions/SubmissionCheckJson"
"$ref": "#/definitions/IncludesData"
},
{
"$ref": "#/definitions/CommentCheckJson"
"$ref": "#/definitions/SubmissionCheckConfigData"
},
{
"$ref": "#/definitions/CommentCheckConfigData"
},
{
"type": "string"
}
]
},
@@ -6596,7 +6756,17 @@
"runs": {
"description": "A list of sets of Checks to run",
"items": {
"$ref": "#/definitions/RunJson"
"anyOf": [
{
"$ref": "#/definitions/IncludesData"
},
{
"$ref": "#/definitions/RunConfigData"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"

5948
src/Schema/Check.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -258,6 +258,9 @@
},
"type": "object"
},
"influxConfig": {
"$ref": "#/definitions/InfluxConfig"
},
"name": {
"type": "string"
},
@@ -578,8 +581,8 @@
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
"type": "string"
},
"score": {
@@ -866,6 +869,43 @@
},
"type": "object"
},
"InfluxConfig": {
"properties": {
"credentials": {
"$ref": "#/definitions/InfluxCredentials"
},
"defaultTags": {
"$ref": "#/definitions/Record<string,string>"
}
},
"required": [
"credentials"
],
"type": "object"
},
"InfluxCredentials": {
"properties": {
"bucket": {
"type": "string"
},
"org": {
"type": "string"
},
"token": {
"type": "string"
},
"url": {
"type": "string"
}
},
"required": [
"bucket",
"org",
"token",
"url"
],
"type": "object"
},
"LoggingOptions": {
"properties": {
"console": {
@@ -1669,6 +1709,9 @@
},
"type": "object"
},
"Record<string,string>": {
"type": "object"
},
"RedditCredentials": {
"description": "Credentials required for the bot to interact with Reddit's API\n\nThese credentials will provided to both the API and Web interface unless otherwise specified with the `web.credentials` property\n\nRefer to the [required credentials table](https://github.com/FoxxMD/context-mod/blob/master/docs/operatorConfiguration.md#minimum-required-configuration) to see what is necessary to run the bot.",
"examples": [
@@ -1906,8 +1949,8 @@
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
"type": "string"
},
"score": {
@@ -1941,6 +1984,13 @@
"title": {
"description": "A valid regular expression to match against the title of the submission",
"type": "string"
},
"upvoteRatio": {
"description": "Compare the upvote ratio for this Submission, expressed as a whole number\n\nCan be either a comparison string or a number. If a number then CM assumes upvote ratio must be greater than or equal to this.\n\nExample:\n\n* `< 90` => less than 90% upvoted\n* 45 => greater than or equal to 45% upvoted",
"type": [
"string",
"number"
]
}
},
"type": "object"
@@ -1980,6 +2030,14 @@
},
"name": {
"type": "string"
},
"wikiConfig": {
"default": "botconfig/contextbot",
"description": "The relative URL to the ContextMod wiki page EX `https://reddit.com/r/subreddit/wiki/<path>`\n\nThis will override the default relative URL as well as any URL set at the bot-level",
"examples": [
"botconfig/contextbot"
],
"type": "string"
}
},
"required": [
@@ -2082,7 +2140,7 @@
"description": "Configuration for the **Server** application. See [Architecture Documentation](https://github.com/FoxxMD/context-mod/blob/master/docs/serverClientArchitecture.md) for more info",
"properties": {
"friendly": {
"description": "A friendly name for this server. This will override `friendly` in `BotConnection` if specified.",
"description": "A friendly name for this server. This will override `friendly` in `BotConnection` if specified.\n\nIf none is set one is randomly generated.",
"type": "string"
},
"port": {
@@ -2193,6 +2251,23 @@
"$ref": "#/definitions/DatabaseStatisticsOperatorJsonConfig",
"description": "Set defaults for the frequency time series stats are collected"
},
"dev": {
"properties": {
"monitorMemory": {
"description": "Invoke `process.memoryUsage()` on an interval and send metrics to Influx\n\nOnly works if Influx config is provided",
"type": "boolean"
},
"monitorMemoryInterval": {
"default": 15,
"description": "Interval, in seconds, to invoke `process.memoryUsage()` at\n\nDefaults to 15 seconds",
"type": "number"
}
},
"type": "object"
},
"influxConfig": {
"$ref": "#/definitions/InfluxConfig"
},
"logging": {
"$ref": "#/definitions/LoggingOptions",
"description": "Settings to configure global logging defaults"
@@ -2358,20 +2433,6 @@
},
"type": "object"
},
"invites": {
"description": "Settings related to oauth flow invites",
"properties": {
"maxAge": {
"default": 0,
"description": "Number of seconds an invite should be valid for\n\n If `0` or not specified (default) invites do not expire",
"examples": [
0
],
"type": "number"
}
},
"type": "object"
},
"logLevel": {
"description": "The default log level to filter to in the web interface\n\nIf not specified or `null` will be same as global `logLevel`",
"enum": [

View File

@@ -1,6 +1,9 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"anyOf": [
{
"$ref": "#/definitions/IncludesData"
},
{
"$ref": "#/definitions/RecentActivityRuleJSONConfig"
},
@@ -107,8 +110,8 @@
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
"type": "string"
},
"score": {
@@ -819,8 +822,8 @@
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
"type": "string"
},
"score": {
@@ -1655,6 +1658,41 @@
},
"type": "object"
},
"IncludesData": {
"properties": {
"path": {
"description": "The special-form path to the config fragment to retrieve.\n\nIf value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit\n\n * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/mybot`\n\nIf the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page\n\n* EX `wiki:replytemplates/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/replytemplates/test`\n\nIf the value starts with `url:` then the value is fetched as an external url and expects raw text returned\n\n* EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`",
"type": "string"
},
"ttl": {
"anyOf": [
{
"enum": [
false,
"response",
true
]
},
{
"type": "number"
}
],
"description": "Control caching for the config fragment.\n\nIf not specified the value for `wikiTTL` will be used\n\n* If true then value is cached forever\n* If false then value is never cached\n* If a number then the number of seconds to cache value\n* If 'response' then CM will attempt to use Cache-Control or Expires headers from the response to determine how long to cache the value"
},
"type": {
"description": "An unused hint about the content type. Not implemented yet",
"enum": [
"json",
"yaml"
],
"type": "string"
}
},
"required": [
"path"
],
"type": "object"
},
"ModLogCriteria": {
"properties": {
"action": {
@@ -3318,8 +3356,8 @@
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
"type": "string"
},
"score": {
@@ -3353,6 +3391,13 @@
"title": {
"description": "A valid regular expression to match against the title of the submission",
"type": "string"
},
"upvoteRatio": {
"description": "Compare the upvote ratio for this Submission, expressed as a whole number\n\nCan be either a comparison string or a number. If a number then CM assumes upvote ratio must be greater than or equal to this.\n\nExample:\n\n* `< 90` => less than 90% upvoted\n* 45 => greater than or equal to 45% upvoted",
"type": [
"string",
"number"
]
}
},
"type": "object"

View File

@@ -78,8 +78,8 @@
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
"type": "string"
},
"score": {
@@ -790,8 +790,8 @@
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
"type": "string"
},
"score": {
@@ -1626,6 +1626,41 @@
},
"type": "object"
},
"IncludesData": {
"properties": {
"path": {
"description": "The special-form path to the config fragment to retrieve.\n\nIf value starts with `wiki:` then the proceeding value will be used to get a wiki page from the current subreddit\n\n * EX `wiki:botconfig/mybot` tries to get `https://reddit.com/r/currentSubreddit/wiki/botconfig/mybot`\n\nIf the value starts with `wiki:` and ends with `|someValue` then `someValue` will be used as the base subreddit for the wiki page\n\n* EX `wiki:replytemplates/test|ContextModBot` tries to get `https://reddit.com/r/ContextModBot/wiki/replytemplates/test`\n\nIf the value starts with `url:` then the value is fetched as an external url and expects raw text returned\n\n* EX `url:https://pastebin.com/raw/38qfL7mL` tries to get the text response of `https://pastebin.com/raw/38qfL7mL`",
"type": "string"
},
"ttl": {
"anyOf": [
{
"enum": [
false,
"response",
true
]
},
{
"type": "number"
}
],
"description": "Control caching for the config fragment.\n\nIf not specified the value for `wikiTTL` will be used\n\n* If true then value is cached forever\n* If false then value is never cached\n* If a number then the number of seconds to cache value\n* If 'response' then CM will attempt to use Cache-Control or Expires headers from the response to determine how long to cache the value"
},
"type": {
"description": "An unused hint about the content type. Not implemented yet",
"enum": [
"json",
"yaml"
],
"type": "string"
}
},
"required": [
"path"
],
"type": "object"
},
"ModLogCriteria": {
"properties": {
"action": {
@@ -3289,8 +3324,8 @@
"description": "* true/false => test whether Activity is removed or not\n* string or list of strings => test which moderator removed this Activity"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"description": "A string containing a comparison operator, a value to compare against, an (optional) report type filter, an (optional) qualifier for report reason, and an (optional) time constraint\n\nThe syntax is `(< OR > OR <= OR >=) number[%] [type] [reasonQualifier] [timeValue] [timeUnit]`\n\nIf only comparison and number is given then defaults to TOTAL reports on an Activity.\n\n* EX `> 2` => greater than 2 total reports\n\nType (optional) determines which type of reports to look at:\n\n* `mod` -- mod reports\n * EX `> 3 mod` => greater than 3 mod reports\n* `user` -- user reports\n * EX `> 3 user` => greater than 3 user reports\n\nReport reason qualifiers can be:\n\n* enclosed double or single quotes -- report reason contains\n * EX `> 1 \"misinformation\" => greater than 1 report with reason containing \"misinformation\"\n* enclosed in backslashes -- match regex\n * EX `> 1 \\harassment towards .*\\` => greater than 1 report with reason matching regex \\harassment towards .*\\\n\nType and reason qualifiers can be used together:\n\nEX `> 2 user \"misinformation\" => greater than 2 user reports with reasons containing \"misinformation\"\n\nThe time constraint filter reports created between NOW and [timeConstraint] in the past:\n\n* `> 3 in 30 minutes` => more than 3 reports created between NOW and 30 minutes ago\n* `> 2 user \"misinformation\" in 2 hours` => more than 2 user reports containing \"misinformation\" created between NOW and 2 hours ago",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)(\\s*%)?(\\s+(?:mods?|users?))?(\\s+(?:[\"'].*[\"']|\\/.*\\/))?.*(\\d+)?\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)?\\s*$",
"type": "string"
},
"score": {
@@ -3324,6 +3359,13 @@
"title": {
"description": "A valid regular expression to match against the title of the submission",
"type": "string"
},
"upvoteRatio": {
"description": "Compare the upvote ratio for this Submission, expressed as a whole number\n\nCan be either a comparison string or a number. If a number then CM assumes upvote ratio must be greater than or equal to this.\n\nExample:\n\n* `< 90` => less than 90% upvoted\n* 45 => greater than or equal to 45% upvoted",
"type": [
"string",
"number"
]
}
},
"type": "object"
@@ -3440,6 +3482,9 @@
"description": "Can be `Rule` or the `name` of any **named** `Rule` in your subreddit's configuration",
"items": {
"anyOf": [
{
"$ref": "#/definitions/IncludesData"
},
{
"$ref": "#/definitions/RecentActivityRuleJSONConfig"
},

6194
src/Schema/Run.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -57,8 +57,7 @@ import ConfigParseError from "../Utils/ConfigParseError";
import dayjs, {Dayjs as DayjsObj} from "dayjs";
import Action from "../Action";
import {queue, QueueObject} from 'async';
import {JSONConfig} from "../JsonConfig";
import {Check, CheckStructuredJson} from "../Check";
import {SubredditConfigHydratedData, SubredditConfigData} from "../SubredditConfigData";
import NotificationManager from "../Notification/NotificationManager";
import {createHistoricalDisplayDefaults} from "../Common/defaults";
import {ExtendedSnoowrap} from "../Utils/SnoowrapClients";
@@ -98,6 +97,10 @@ import {
} from "../Common/Infrastructure/Atomic";
import {parseFromJsonOrYamlToObject} from "../Common/Config/ConfigUtil";
import {FilterCriteriaDefaults} from "../Common/Infrastructure/Filters/FilterShapes";
import {InfluxClient} from "../Common/Influx/InfluxClient";
import { Point } from "@influxdata/influxdb-client";
import {NormalizedManagerResponse} from "../Web/Common/interfaces";
import {guestEntityToApiGuest} from "../Common/Entities/Guest/GuestEntity";
export interface RunningState {
state: RunState,
@@ -139,6 +142,7 @@ export interface RuntimeManagerOptions extends Omit<ManagerOptions, 'filterCrite
managerEntity: ManagerEntity
filterCriteriaDefaults?: FilterCriteriaDefaults
statDefaults: DatabaseStatisticsOperatorConfig
influxClients: InfluxClient[]
}
interface QueuedIdentifier {
@@ -227,7 +231,8 @@ export class Manager extends EventEmitter implements RunningStates {
rulesUniqueSample: number[] = [];
rulesUniqueSampleInterval: any;
rulesUniqueRollingAvg: number = 0;
actionedEvents: ActionedEvent[] = [];
modqueueInterval: number = 0;
delayedQueueInterval: any;
@@ -237,6 +242,8 @@ export class Manager extends EventEmitter implements RunningStates {
authorRepo!: Repository<AuthorEntity>
eventRepo!: Repository<CMEvent>;
influxClients: InfluxClient[] = [];
getStats = async (): Promise<ManagerStats> => {
const data: any = {
eventsAvg: formatNumber(this.eventsRollingAvg),
@@ -308,7 +315,8 @@ export class Manager extends EventEmitter implements RunningStates {
botEntity,
managerEntity,
statDefaults,
retention
retention,
influxClients,
} = opts || {};
this.displayLabel = opts.nickname || `${sub.display_name_prefixed}`;
const getLabels = this.getCurrentLabels;
@@ -325,7 +333,11 @@ export class Manager extends EventEmitter implements RunningStates {
}, mergeArr);
this.logger.stream().on('log', (log: LogInfo) => {
if(log.subreddit !== undefined && log.subreddit === this.getDisplay()) {
this.logs = [log, ...this.logs].slice(0, 301);
this.logs.unshift(log);
if(this.logs.length > 300) {
// remove all elements starting from the 300th index (301st item)
this.logs.splice(300);
}
}
});
this.globalDryRun = dryRun;
@@ -338,6 +350,9 @@ export class Manager extends EventEmitter implements RunningStates {
this.pollingRetryHandler = createRetryHandler({maxRequestRetry: 3, maxOtherRetry: 2}, this.logger);
this.subreddit = sub;
this.botEntity = botEntity;
for(const client of influxClients) {
this.influxClients.push(client.childClient(this.logger, {manager: this.displayLabel, subreddit: sub.display_name_prefixed}));
}
this.managerEntity = managerEntity;
// always init in stopped state but use last invokee to determine if we should start the manager automatically afterwards
@@ -651,7 +666,7 @@ export class Manager extends EventEmitter implements RunningStates {
this.logger.info('Subreddit-specific options updated');
this.logger.info('Building Runs and Checks...');
const structuredRuns = configBuilder.parseToStructured(validJson, this.filterCriteriaDefaults, this.postCheckBehaviorDefaults);
const structuredRuns = await configBuilder.parseToStructured(validJson, this.resources, this.filterCriteriaDefaults, this.postCheckBehaviorDefaults);
let runs: Run[] = [];
@@ -887,12 +902,67 @@ export class Manager extends EventEmitter implements RunningStates {
force = false,
} = options;
const event = new CMEvent();
if(refresh) {
this.logger.verbose(`Refreshed data`);
// @ts-ignore
item = await activity.refresh();
}
let activityEntity: Activity;
const existingEntity = await this.activityRepo.findOneBy({_id: item.name});
/**
* Report Tracking
*
* Store ids for activities we process. Enables us to be sure of whether modqueue has been monitored since we've last seen the activity
*
* */
let lastKnownStateTimestamp = await this.resources.getActivityLastSeenDate(item.name);
if(lastKnownStateTimestamp !== undefined && lastKnownStateTimestamp.isBefore(this.startedAt)) {
// if we last saw this activity BEFORE we started event polling (modqueue) then it's not useful to us
lastKnownStateTimestamp = undefined;
}
await this.resources.setActivityLastSeenDate(item.name);
// if modqueue is running then we know we are checking for new reports every X seconds
if(options.activitySource.identifier === 'modqueue') {
// if the activity is from modqueue and only has one report then we know that report was just created
if(item.num_reports === 1
// otherwise if it has more than one report AND we have seen it (its only seen if it has already been stored (in below block))
// then we are reasonably sure that any reports created were in the last X seconds
|| (item.num_reports > 1 && lastKnownStateTimestamp !== undefined)) {
lastKnownStateTimestamp = dayjs().subtract(this.modqueueInterval, 'seconds');
}
}
// if activity is not from modqueue then known good timestamps for "time between last known report and now" is dependent on these things:
// 1) (most accurate) lastKnownStateTimestamp -- only available if activity either had 0 reports OR 1+ and existing reports have been stored (see below code)
// 2) last stored report time from Activity
// 3) create date of activity
let shouldPersistReports = false;
if (existingEntity === null) {
activityEntity = Activity.fromSnoowrapActivity(this.managerEntity.subreddit, activity, lastKnownStateTimestamp);
// always persist if activity is not already persisted and any reports exist
if (item.num_reports > 0) {
shouldPersistReports = true;
}
} else {
activityEntity = existingEntity;
// always persist if reports need to be updated
if (activityEntity.syncReports(item, lastKnownStateTimestamp)) {
shouldPersistReports = true;
}
}
if (shouldPersistReports) {
activityEntity = await this.activityRepo.save(activityEntity);
}
const itemId = await item.id;
if(await this.resources.hasRecentSelf(item)) {
@@ -905,15 +975,6 @@ export class Manager extends EventEmitter implements RunningStates {
}
}
let activityEntity: Activity;
const existingEntity = await this.activityRepo.findOneBy({_id: item.name});
if(existingEntity === null) {
activityEntity = Activity.fromSnoowrapActivity(this.managerEntity.subreddit, activity);
} else {
activityEntity = existingEntity;
}
const event = new CMEvent();
event.triggered = false;
event.manager = this.managerEntity;
event.activity = activityEntity;
@@ -928,7 +989,6 @@ export class Manager extends EventEmitter implements RunningStates {
itemIdentifiers.push(`${checkType === 'Submission' ? 'SUB' : 'COM'} ${itemId}`);
this.currentLabels = itemIdentifiers;
let ePeek = '';
let peekParts: ItemContent;
try {
const [peek, { content: peekContent }] = await itemContentPeek(item);
ePeek = peekContent;
@@ -1084,6 +1144,10 @@ export class Manager extends EventEmitter implements RunningStates {
event.runResults = runResults;
//actionedEvent.runResults = runResults;
const checksRun = actionedEvent.runResults.map(x => x.checkResults).flat().length;
let actionsRun = actionedEvent.runResults.map(x => x.checkResults?.map(y => y.actionResults)).flat().length;
let totalRulesRun = actionedEvent.runResults.map(x => x.checkResults?.map(y => y.ruleResults)).flat(5).length;
// determine if event should be recorded
const allOutputs = [...new Set(runResults.map(x => x.checkResults.map(y => y.recordOutputs ?? [])).flat(2).filter(x => recordOutputTypes.includes(x)))];
if(allOutputs.length > 0) {
@@ -1115,11 +1179,108 @@ export class Manager extends EventEmitter implements RunningStates {
}
await this.eventRepo.save(event);
}
}
if (allOutputs.includes('influx') && this.influxClients.length > 0) {
try {
const time = dayjs().valueOf()
const checksRun = actionedEvent.runResults.map(x => x.checkResults).flat().length;
let actionsRun = actionedEvent.runResults.map(x => x.checkResults?.map(y => y.actionResults)).flat().length;
let totalRulesRun = actionedEvent.runResults.map(x => x.checkResults?.map(y => y.ruleResults)).flat().length;
const measurements: Point[] = [];
measurements.push(new Point('event')
.timestamp(time)
.tag('triggered', event.triggered ? '1' : '0')
.tag('activityType', isSubmission(item) ? 'submission' : 'comment')
.tag('sourceIdentifier', event.source.identifier ?? 'unknown')
.tag('sourceType', event.source.type)
.stringField('eventId', event.id)
.stringField('activityId', event.activity.id)
.stringField('author', actionedEvent.activity.author)
.intField('processingTime', time - event.processedAt.valueOf())
.intField('queuedTime', event.processedAt.valueOf() - event.queuedAt.valueOf())
.intField('runsProcessed', actionedEvent.runResults.length)
.intField('runsTriggered', actionedEvent.runResults.filter(x => x.triggered).length)
.intField('checksProcessed', checksRun)
.intField('checksTriggered', actionedEvent.runResults.map(x => x.checkResults).flat().filter(x => x.triggered).length)
.intField('totalRulesProcessed', totalRulesRun)
.intField('rulesTriggered', actionedEvent.runResults.map(x => x.checkResults?.map(y => y.ruleResults)).flat(5).filter((x: RuleResultEntity) => x.triggered === true).length)
.intField('uniqueRulesProcessed', allRuleResults.length)
.intField('cachedRulesProcessed', totalRulesRun - allRuleResults.length)
.intField('actionsProcessed', actionsRun)
.intField('apiUsage', startingApiLimit - this.client.ratelimitRemaining));
const defaultPoint = () => new Point('triggeredEntity')
.timestamp(time)
.tag('activityType', isSubmission(item) ? 'submission' : 'comment')
.tag('sourceIdentifier', event.source.identifier ?? 'unknown')
.tag('sourceType', event.source.type)
.stringField('activityId', event.activity.id)
.stringField('author', actionedEvent.activity.author)
.stringField('eventId', event.id);
for (const r of event.runResults) {
if (r.triggered) {
measurements.push(defaultPoint()
.tag('entityType', 'run')
.tag('name', r.run.name));
for (const c of r.checkResults) {
if (c.triggered) {
measurements.push(defaultPoint()
.tag('entityType', 'check')
.stringField('name', c.check.name)
.tag('fromCache', c.fromCache ? '1' : '0'));
if (c.ruleResults !== undefined) {
for (const ru of c.ruleResults) {
if (ru.result.triggered) {
measurements.push(defaultPoint()
.tag('entityType', 'rule')
.stringField('name', ru.result.premise.name)
.tag('fromCache', ru.result.fromCache ? '1' : '0'))
}
}
}
if (c.ruleSetResults !== undefined) {
for (const rs of c.ruleSetResults) {
if (rs.result.triggered) {
measurements.push(defaultPoint()
.tag('entityType', 'ruleSet'));
for (const ru of rs.result.results) {
if (ru.triggered) {
measurements.push(defaultPoint()
.tag('entityType', 'rule')
.stringField('name', ru.premise.name))
}
}
}
}
}
if (c.actionResults !== undefined) {
for (const a of c.actionResults) {
if (a.run) {
measurements.push(defaultPoint()
.tag('entityType', 'action')
.stringField('name', a.premise.name)
.tag('dryRun', a.dryRun ? '1' : '0')
.tag('succes', a.success ? '1' : '0')
)
}
}
}
}
}
}
}
for (const client of this.influxClients) {
await client.writePoint(measurements);
}
} catch (e: any) {
this.logger.error(new CMError('Error occurred while building or sending Influx data', {
cause: e,
isSerious: false
}));
}
}
}
this.logger.verbose(`Run Stats: Checks ${checksRun} | Rules => Total: ${totalRulesRun} Unique: ${allRuleResults.length} Cached: ${totalRulesRun - allRuleResults.length} Rolling Avg: ~${formatNumber(this.rulesUniqueRollingAvg)}/s | Actions ${actionsRun}`);
this.logger.verbose(`Reddit API Stats: Initial ${startingApiLimit} | Current ${this.client.ratelimitRemaining} | Used ~${startingApiLimit - this.client.ratelimitRemaining} | Events ~${formatNumber(this.eventsRollingAvg)}/s`);
@@ -1334,6 +1495,11 @@ export class Manager extends EventEmitter implements RunningStates {
async startQueue(causedBy: Invokee = 'system', options?: ManagerStateChangeOption) {
if(!this.validConfigLoaded) {
this.logger.warn('Cannot start queue while manager has an invalid configuration');
return;
}
if(this.activityRepo === undefined) {
this.activityRepo = this.resources.database.getRepository(Activity);
}
@@ -1474,6 +1640,11 @@ export class Manager extends EventEmitter implements RunningStates {
s.startInterval();
}
this.startedAt = dayjs();
const modQueuePollOpts = this.pollOptions.find(x => x.pollOn === 'modqueue');
if(modQueuePollOpts !== undefined) {
this.modqueueInterval = modQueuePollOpts.interval;
}
}
this.logger.info('Event polling STARTED');
@@ -1571,6 +1742,14 @@ export class Manager extends EventEmitter implements RunningStates {
await this.syncRunningState('managerState');
}
async destroy(causedBy: Invokee = 'system', options?: ManagerStateChangeOption) {
await this.stop(causedBy, options);
clearInterval(this.eventsSampleInterval);
clearInterval(this.delayedQueueInterval);
clearInterval(this.rulesUniqueSampleInterval)
await this.cacheManager.destroy(this.subreddit.display_name);
}
setInitialRunningState(managerEntity: RunningStateEntities, type: RunningStateTypes): RunningState {
if(managerEntity[type].runType.name === 'stopped' && managerEntity[type].invokee.name === 'user') {
return {state: STOPPED, causedBy: 'user'};
@@ -1591,4 +1770,41 @@ export class Manager extends EventEmitter implements RunningStates {
await this.cacheManager.defaultDatabase.getRepository(ManagerEntity).save(this.managerEntity);
}
async writeHealthMetrics(time?: number) {
if (this.influxClients.length > 0) {
const metric = new Point('managerHealth')
.intField('delayedActivities', this.resources !== undefined ? this.resources.delayedItems.length : 0)
.intField('processing', this.queue.running())
.intField('queued', this.queue.length())
.booleanField('eventsRunning', this.eventsState.state === RUNNING)
.booleanField('queueRunning', this.queueState.state === RUNNING)
.booleanField('running', this.managerState.state === RUNNING)
.intField('uptime', this.startedAt !== undefined ? dayjs().diff(this.startedAt, 'seconds') : 0)
.intField('configAge', this.lastWikiRevision === undefined ? 0 : dayjs().diff(this.lastWikiRevision, 'seconds'));
if (this.resources !== undefined) {
const {req, miss} = this.resources.getCacheTotals();
metric.intField('cacheRequests', req)
.intField('cacheMisses', miss);
}
if (time !== undefined) {
metric.timestamp(time);
}
for (const client of this.influxClients) {
await client.writePoint(metric);
}
}
}
toNormalizedManager(): NormalizedManagerResponse {
return {
name: this.displayLabel,
subreddit: this.subreddit.display_name,
subredditNormal: parseRedditEntity(this.subreddit.display_name).name,
guests: this.managerEntity.getGuests().map(x => guestEntityToApiGuest(x))
}
}
}

View File

@@ -4,12 +4,12 @@ import {
activityIsDeleted, activityIsFiltered,
activityIsRemoved,
AuthorTypedActivitiesOptions, BOT_LINK,
getAuthorHistoryAPIOptions
getAuthorHistoryAPIOptions, renderContent
} from "../Utils/SnoowrapUtils";
import {map as mapAsync} from 'async';
import winston, {Logger} from "winston";
import as from 'async';
import fetch from 'node-fetch';
import fetch, {Response} from 'node-fetch';
import {
asActivity,
asSubmission,
@@ -20,7 +20,7 @@ import {
createCacheManager,
escapeRegex,
FAIL,
fetchExternalUrl,
fetchExternalResult,
filterCriteriaSummary,
formatNumber,
generateItemFilterHelpers,
@@ -58,7 +58,7 @@ import {
filterByTimeRequirement,
asSubreddit,
modActionCriteriaSummary,
parseRedditFullname
parseRedditFullname, asStrongImageHashCache
} from "../util";
import LoggedError from "../Utils/LoggedError";
import {
@@ -96,7 +96,7 @@ import {CMEvent as ActionedEventEntity, CMEvent } from "../Common/Entities/CMEve
import {RuleResultEntity} from "../Common/Entities/RuleResultEntity";
import globrex from 'globrex';
import {runMigrations} from "../Common/Migrations/CacheMigrationUtils";
import {isStatusError, MaybeSeriousErrorWithCause, SimpleError} from "../Utils/Errors";
import {CMError, isStatusError, MaybeSeriousErrorWithCause, SimpleError} from "../Utils/Errors";
import {ErrorWithCause} from "pony-cause";
import {ManagerEntity} from "../Common/Entities/ManagerEntity";
import {Bot} from "../Common/Entities/Bot";
@@ -119,8 +119,8 @@ import {
UserNoteCriteria
} from "../Common/Infrastructure/Filters/FilterCriteria";
import {
ActivitySource, DurationVal,
EventRetentionPolicyRange,
ActivitySource, ConfigFragmentValidationFunc, DurationVal,
EventRetentionPolicyRange, ImageHashCacheData,
JoinOperands,
ModActionType,
ModeratorNameCriteria, ModUserNoteLabel, statFrequencies, StatisticFrequency,
@@ -141,11 +141,12 @@ import {
} from "../Common/Infrastructure/ActivityWindow";
import {Duration} from "dayjs/plugin/duration";
import {
activityReports,
ActivityType,
AuthorHistorySort,
CachedFetchedActivitiesResult, FetchedActivitiesResult,
SnoowrapActivity
SnoowrapActivity, SubredditRemovalReason
} from "../Common/Infrastructure/Reddit";
import {AuthorCritPropHelper} from "../Common/Infrastructure/Filters/AuthorCritPropHelper";
import {NoopLogger} from "../Utils/loggerFactory";
@@ -153,9 +154,13 @@ import {
compareDurationValue, comparisonTextOp,
parseDurationComparison,
parseGenericValueComparison,
parseGenericValueOrPercentComparison
parseGenericValueOrPercentComparison, parseReportComparison
} from "../Common/Infrastructure/Comparisons";
import {asCreateModNoteData, CreateModNoteData, ModNote, ModNoteRaw} from "./ModNotes/ModNote";
import {IncludesData} from "../Common/Infrastructure/Includes";
import {parseFromJsonOrYamlToObject} from "../Common/Config/ConfigUtil";
import ConfigParseError from "../Utils/ConfigParseError";
import {ActivityReport} from "../Common/Entities/ActivityReport";
export const DEFAULT_FOOTER = '\r\n*****\r\nThis action was performed by [a bot.]({{botLink}}) Mention a moderator or [send a modmail]({{modmailLink}}) if you any ideas, questions, or concerns about this action.';
@@ -345,7 +350,7 @@ export class SubredditResources {
// set default prune interval
this.pruneInterval = setInterval(() => {
// @ts-ignore
this.defaultCache?.store.prune();
this.cache?.store.prune();
this.logger.debug('Pruned cache');
// prune interval should be twice the smallest TTL
},min * 1000 * 2)
@@ -367,6 +372,16 @@ export class SubredditResources {
}
}
async destroy() {
if(this.historicalSaveInterval !== undefined) {
clearInterval(this.historicalSaveInterval);
}
if(this.pruneInterval !== undefined && this.cacheType === 'memory' && this.cacheSettingsHash !== 'default') {
clearInterval(this.pruneInterval);
this.cache?.reset();
}
}
async retentionCleanup() {
const logger = this.logger.child({labels: ['Event Retention'], mergeArr});
logger.debug('Starting cleanup');
@@ -495,9 +510,15 @@ export class SubredditResources {
this.delayedItems.push(data);
}
async removeDelayedActivity(id: string) {
await this.dispatchedActivityRepo.delete(id);
this.delayedItems = this.delayedItems.filter(x => x.id !== id);
async removeDelayedActivity(val?: string | string[]) {
if(val === undefined) {
await this.dispatchedActivityRepo.delete({manager: {id: this.managerEntity.id}});
this.delayedItems = [];
} else {
const ids = typeof val === 'string' ? [val] : val;
await this.dispatchedActivityRepo.delete(ids);
this.delayedItems = this.delayedItems.filter(x => !ids.includes(x.id));
}
}
async initStats() {
@@ -758,11 +779,15 @@ export class SubredditResources {
}
}
async getStats() {
const totals = Object.values(this.stats.cache).reduce((acc, curr) => ({
getCacheTotals() {
return Object.values(this.stats.cache).reduce((acc, curr) => ({
miss: acc.miss + curr.miss,
req: acc.req + curr.requests,
}), {miss: 0, req: 0});
}
async getStats() {
const totals = this.getCacheTotals();
const cacheKeys = Object.keys(this.stats.cache);
const res = {
cache: {
@@ -901,6 +926,29 @@ export class SubredditResources {
// return events;
// }
async getActivityLastSeenDate(value: SnoowrapActivity | string): Promise<Dayjs | undefined> {
if(this.selfTTL !== false) {
const id = typeof(value) === 'string' ? value : value.name;
const hash = `activityLastSeen-${id}`;
const lastSeenUnix = await this.cache.get(hash) as string | undefined | null;
if(lastSeenUnix !== undefined && lastSeenUnix !== null) {
return dayjs.unix(Number.parseInt(lastSeenUnix, 10));
}
return undefined;
}
return undefined;
}
async setActivityLastSeenDate(value: SnoowrapActivity | string, timestamp?: number): Promise<void> {
if(this.selfTTL !== false) {
const id = typeof(value) === 'string' ? value : value.name;
const hash = `activityLastSeen-${id}`;
this.cache.set(hash, timestamp ?? dayjs().unix(), {
ttl: 86400 // store for 24 hours (seconds)
});
}
}
async getActivity(item: Submission | Comment) {
try {
let hash = '';
@@ -946,7 +994,7 @@ export class SubredditResources {
hash = `sub-${item.name}`;
if (tryToFetch && item instanceof Submission) {
// @ts-ignore
const itemToCache = await item.fetch();
const itemToCache = await item.refresh();
await this.cache.set(hash, itemToCache, {ttl: this.submissionTTL});
return itemToCache;
} else {
@@ -958,7 +1006,7 @@ export class SubredditResources {
hash = `comm-${item.name}`;
if (tryToFetch && item instanceof Comment) {
// @ts-ignore
const itemToCache = await item.fetch();
const itemToCache = await item.refresh();
await this.cache.set(hash, itemToCache, {ttl: this.commentTTL});
return itemToCache;
} else {
@@ -968,8 +1016,12 @@ export class SubredditResources {
}
}
return item;
} catch (e) {
throw new ErrorWithCause('Error occurred while trying to add Activity to cache', {cause: e});
} catch (e: any) {
if(e.message !== undefined && e.message.includes('Cannot read properties of undefined (reading \'constructor\')')) {
throw new ErrorWithCause('Error occurred while trying to add Activity to cache (Comment likely does not exist)', {cause: e});
} else {
throw new ErrorWithCause('Error occurred while trying to add Activity to cache', {cause: e});
}
}
}
@@ -1054,8 +1106,9 @@ export class SubredditResources {
return subreddit as Subreddit;
}
} catch (err: any) {
this.logger.error('Error while trying to fetch a cached subreddit', err);
throw err.logged;
const cmError = new CMError('Error while trying to fetch a cached subreddit', {cause: err, logged: true});
this.logger.error(cmError);
throw cmError;
}
}
@@ -1631,13 +1684,12 @@ export class SubredditResources {
return filteredListing;
}
async getContent(val: string, subredditArg?: Subreddit): Promise<string> {
async getExternalResource(val: string, subredditArg?: Subreddit): Promise<{val: string, fromCache: boolean, response?: Response, hash?: string}> {
const subreddit = subredditArg || this.subreddit;
let cacheKey;
const wikiContext = parseWikiContext(val);
if (wikiContext !== undefined) {
cacheKey = `${wikiContext.wiki}${wikiContext.subreddit !== undefined ? `|${wikiContext.subreddit}` : ''}`;
cacheKey = `${subreddit.display_name}-content-${wikiContext.wiki}${wikiContext.subreddit !== undefined ? `|${wikiContext.subreddit}` : ''}`;
}
const extUrl = wikiContext === undefined ? parseExternalUrl(val) : undefined;
if (extUrl !== undefined) {
@@ -1645,25 +1697,25 @@ export class SubredditResources {
}
if (cacheKey === undefined) {
return val;
return {val, fromCache: false, hash: cacheKey};
}
// try to get cached value first
let hash = `${subreddit.display_name}-content-${cacheKey}`;
if (this.wikiTTL !== false) {
await this.stats.cache.content.identifierRequestCount.set(cacheKey, (await this.stats.cache.content.identifierRequestCount.wrap(cacheKey, () => 0) as number) + 1);
this.stats.cache.content.requestTimestamps.push(Date.now());
this.stats.cache.content.requests++;
const cachedContent = await this.cache.get(hash);
const cachedContent = await this.cache.get(cacheKey);
if (cachedContent !== undefined && cachedContent !== null) {
this.logger.debug(`Content Cache Hit: ${cacheKey}`);
return cachedContent as string;
return {val: cachedContent as string, fromCache: true, hash: cacheKey};
} else {
this.stats.cache.content.miss++;
}
}
let wikiContent: string;
let response: Response | undefined;
// no cache hit, get from source
if (wikiContext !== undefined) {
@@ -1691,7 +1743,9 @@ export class SubredditResources {
}
} else {
try {
wikiContent = await fetchExternalUrl(extUrl as string, this.logger);
const [wikiContentVal, responseVal] = await fetchExternalResult(extUrl as string, this.logger);
wikiContent = wikiContentVal;
response = responseVal;
} catch (err: any) {
const msg = `Error occurred while trying to fetch the url ${extUrl}`;
this.logger.error(msg, err);
@@ -1699,13 +1753,118 @@ export class SubredditResources {
}
}
if (this.wikiTTL !== false) {
return {val: wikiContent, fromCache: false, response, hash: cacheKey};
}
async getContent(val: string, subredditArg?: Subreddit): Promise<string> {
const {val: wikiContent, fromCache, hash} = await this.getExternalResource(val, subredditArg);
if (!fromCache && hash !== undefined && this.wikiTTL !== false) {
this.cache.set(hash, wikiContent, {ttl: this.wikiTTL});
}
return wikiContent;
}
/**
* Convenience method for using getContent and SnoowrapUtils@renderContent in one method
* */
async renderContent(contentStr: string, data: SnoowrapActivity, ruleResults: RuleResultEntity[] = [], usernotes?: UserNotes) {
const content = await this.getContent(contentStr);
return await renderContent(content, data, ruleResults, usernotes ?? this.userNotes);
}
async getConfigFragment<T>(includesData: IncludesData, validateFunc?: ConfigFragmentValidationFunc): Promise<T> {
const {
path,
ttl = this.wikiTTL,
} = includesData;
const {val: configStr, fromCache, hash, response} = await this.getExternalResource(path);
const [format, configObj, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(configStr);
if (configObj === undefined) {
//this.logger.error(`Could not parse includes URL of '${configStr}' contents as JSON or YAML.`);
this.logger.error(yamlErr);
this.logger.debug(jsonErr);
throw new ConfigParseError(`Could not parse includes URL of '${configStr}' contents as JSON or YAML.`)
}
// if its from cache then we know the data is valid
if(fromCache) {
this.logger.verbose(`Got Config Fragment ${path} from cache`);
return configObj.toJS() as unknown as T;
}
const rawData = configObj.toJS();
let validatedData: T;
// otherwise now we want to validate it if a function is present
if(validateFunc !== undefined) {
try {
validateFunc(configObj.toJS(), fromCache);
validatedData = rawData as unknown as T;
} catch (e) {
throw e;
}
} else {
validatedData = rawData as unknown as T;
}
let ttlVal: number | false = this.wikiTTL;
// never cache
if(ttl === false) {
return validatedData;
} else if(typeof ttl === 'number') {
ttlVal = ttl;
} else if(ttl === true) {
// cache forever
ttlVal = 0;
} else if(ttl === 'response') {
// try to get cache time from response headers, if they exist
// otherwise fallback to wiki ttl
if(response === undefined) {
ttlVal = this.wikiTTL;
} else {
const cc = response.headers.get('cache-control');
const ex = response.headers.get('expires');
if(cc !== null) {
// response doesn't want to be stored :(
if(null !== cc.match('/no-(cache|store)/i')) {
return validatedData;
}
const matches = cc.match(/max-age=(\d+)/);
if(null === matches) {
// huh? why include max-age but no value?
ttlVal = this.wikiTTL;
} else {
ttlVal = parseInt(matches[1], 10);
}
} else if(ex !== null) {
const expDate = dayjs(ex);
if(dayjs.isDayjs(expDate) && expDate.isValid()) {
const seconds = expDate.diff(dayjs(), 'second');
if(seconds < 0) {
// expiration is older than now?? don't cache
return validatedData;
}
ttlVal = seconds;
}
} else {
// couldn't get a cache header, fallback
ttlVal = this.wikiTTL;
}
}
}
if (ttlVal !== false) {
this.cache.set(hash as string, configStr, {ttl: ttlVal});
}
return validatedData;
}
async cacheSubreddits(subs: (Subreddit | string)[]) {
const allSubs = subs.map(x => typeof x !== 'string' ? x.display_name : x);
const subNames = [...new Set(allSubs)];
@@ -2132,7 +2291,7 @@ export class SubredditResources {
const requestedSourcesVal: string[] = !Array.isArray(itemOptVal) ? [itemOptVal] as string[] : itemOptVal as string[];
const requestedSources = requestedSourcesVal.map(x => strToActivitySource(x).toLowerCase());
propResultsMap.source!.passed = criteriaPassWithIncludeBehavior(requestedSources.some(x => source.toLowerCase().includes(x)), include);
propResultsMap.source!.passed = criteriaPassWithIncludeBehavior(requestedSources.some(x => source.toLowerCase().trim() === x.toLowerCase().trim()), include);
break;
}
case 'score':
@@ -2148,27 +2307,45 @@ export class SubredditResources {
propResultsMap.reports!.reason = reportsMsg;
break;
}
const reportCompare = parseGenericValueComparison(itemOptVal as string);
let reportType = 'total';
if(reportCompare.extra !== undefined && reportCompare.extra.trim() !== '') {
const requestedType = reportCompare.extra.toLocaleLowerCase().trim();
if(requestedType.includes('mod')) {
reportType = 'mod';
} else if(requestedType.includes('user')) {
reportType = 'user';
} else {
const reportTypeWarn = `Did not recognize the report type "${requestedType}" -- can only use "mod" or "user". Will default to TOTAL reports`;
log.debug(reportTypeWarn);
propResultsMap.reports!.reason = reportTypeWarn;
}
const reportSummaryParts: string[] = [];
let reports: ActivityReport[] = [];
if(item.num_reports > 0) {
reports = await this.database.getRepository(ActivityReport).createQueryBuilder('report')
.select('report')
.where({activityId: item.name})
.getMany();
}
let reportNum = item.num_reports;
if(reportType === 'user') {
reportNum = item.user_reports.length;
} else {
reportNum = item.mod_reports.length;
const reportCompare = parseReportComparison(itemOptVal as string);
let reportType = reportCompare.reportType ?? 'total';
let validReports = reports;
if(reportCompare.reportType === 'user') {
validReports = validReports.filter(x => x.type === 'user');
} else if(reportCompare.reportType === 'mod') {
validReports = validReports.filter(x => x.type === 'mod');
}
propResultsMap.reports!.found = `${reportNum} ${reportType}`;
if(reportCompare.reasonRegex !== undefined) {
reportSummaryParts.push(`containing reason matching ${reportCompare.reasonMatch}`);
validReports = validReports.filter(x => reportCompare.reasonRegex?.test(x.reason));
}
if(reportCompare.durationText !== undefined) {
reportSummaryParts.push(`within ${reportCompare.durationText}`);
const earliestDate = dayjs().subtract(reportCompare.duration as Duration);
validReports = validReports.filter(x => x.createdAt.isSameOrAfter(earliestDate));
}
let reportNum = validReports.length;
reportSummaryParts.unshift(`${reportNum} ${reportType} reports`);
propResultsMap.reports!.found = reportSummaryParts.join(' ');
propResultsMap.reports!.passed = criteriaPassWithIncludeBehavior(comparisonTextOp(reportNum, reportCompare.operator, reportCompare.value), include);
break;
case 'removed':
@@ -2381,6 +2558,23 @@ export class SubredditResources {
propResultsMap.depth!.found = depth;
propResultsMap.depth!.passed = criteriaPassWithIncludeBehavior(comparisonTextOp(depth, depthCompare.operator, depthCompare.value), include);
break;
case 'upvoteRatio':
if(asSubmission(item)) {
let compareStr = typeof itemOptVal === 'number' ? `>= ${itemOptVal}` : itemOptVal as string;
const ratioCompare = parseGenericValueComparison(compareStr);
const ratio = item.upvote_ratio * 100;
propResultsMap.upvoteRatio!.found = ratio;
propResultsMap.upvoteRatio!.passed = criteriaPassWithIncludeBehavior(comparisonTextOp(ratio, ratioCompare.operator, ratioCompare.value), include);;
break;
} else {
const ratioCommWarn = `Cannot test for 'upvoteRatio' on a Comment`;
log.debug(ratioCommWarn);
propResultsMap.depth!.passed = true;
propResultsMap.depth!.reason = ratioCommWarn;
break;
}
case 'flairTemplate':
case 'link_flair_text':
case 'link_flair_css_class':
@@ -3164,14 +3358,26 @@ export class SubredditResources {
return he.decode(Mustache.render(footerRawContent, {subName, permaLink, modmailLink, botLink: BOT_LINK}));
}
async getImageHash(img: ImageData): Promise<string|undefined> {
const hash = `imgHash-${img.baseUrl}`;
async getImageHash(img: ImageData): Promise<Required<ImageHashCacheData>|undefined> {
if(img.hashResult !== undefined && img.hashResultFlipped !== undefined) {
return img.toHashCache() as Required<ImageHashCacheData>;
}
const hash = `imgHash-${img.basePath}`;
const result = await this.cache.get(hash) as string | undefined | null;
this.stats.cache.imageHash.requests++
this.stats.cache.imageHash.requestTimestamps.push(Date.now());
await this.stats.cache.imageHash.identifierRequestCount.set(hash, (await this.stats.cache.imageHash.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
if(result !== undefined && result !== null) {
return result;
try {
const data = JSON.parse(result);
if(asStrongImageHashCache(data)) {
return data;
}
} catch (e) {
// had old values, just drop it
}
}
this.stats.cache.commentCheck.miss++;
return undefined;
@@ -3182,8 +3388,8 @@ export class SubredditResources {
// return hash;
}
async setImageHash(img: ImageData, hash: string, ttl: number): Promise<void> {
await this.cache.set(`imgHash-${img.baseUrl}`, hash, {ttl});
async setImageHash(img: ImageData, ttl: number): Promise<void> {
await this.cache.set(`imgHash-${img.basePath}`, img.toHashCache() as Required<ImageHashCacheData>, {ttl});
// const hash = await this.cache.wrap(img.baseUrl, async () => await img.hash(true), { ttl }) as string;
// if(img.hashResult === undefined) {
// img.hashResult = hash;
@@ -3197,6 +3403,21 @@ export class SubredditResources {
}
return undefined;
}
async getSubredditRemovalReasons(): Promise<SubredditRemovalReason[]> {
if(this.wikiTTL !== false) {
return await this.cache.wrap(`removalReasons`, async () => {
const res = await this.client.getSubredditRemovalReasons(this.subreddit.display_name);
return Object.values(res.data);
}, { ttl: this.wikiTTL }) as SubredditRemovalReason[];
}
const res = await this.client.getSubredditRemovalReasons(this.subreddit.display_name);
return Object.values(res.data);
}
async getSubredditRemovalReasonById(id: string): Promise<SubredditRemovalReason | undefined> {
return (await this.getSubredditRemovalReasons()).find(x => x.id === id);
}
}
export class BotResourcesManager {
@@ -3380,6 +3601,14 @@ export class BotResourcesManager {
return resource;
}
async destroy(subName: string) {
const res = this.get(subName);
if(res !== undefined) {
await res.destroy();
this.resources.delete(subName);
}
}
async getPendingSubredditInvites(): Promise<(string[])> {
const subredditNames = await this.defaultCache.get(`modInvites`);
if (subredditNames !== undefined && subredditNames !== null) {
@@ -3389,11 +3618,19 @@ export class BotResourcesManager {
}
async addPendingSubredditInvite(subreddit: string): Promise<void> {
if(subreddit === null || subreddit === undefined || subreddit == '') {
throw new CMError('Subreddit name cannot be empty');
}
let subredditNames = await this.defaultCache.get(`modInvites`) as (string[] | undefined | null);
if (subredditNames === undefined || subredditNames === null) {
subredditNames = [];
}
subredditNames.push(subreddit);
const cleanName = subreddit.trim();
if(subredditNames.some(x => x.trim().toLowerCase() === cleanName.toLowerCase())) {
throw new CMError(`An invite for the Subreddit '${subreddit}' already exists`);
}
subredditNames.push(cleanName);
await this.defaultCache.set(`modInvites`, subredditNames, {ttl: 0});
return;
}
@@ -3403,7 +3640,7 @@ export class BotResourcesManager {
if (subredditNames === undefined || subredditNames === null) {
subredditNames = [];
}
subredditNames = subredditNames.filter(x => x !== subreddit);
subredditNames = subredditNames.filter(x => x.toLowerCase() !== subreddit.trim().toLowerCase());
await this.defaultCache.set(`modInvites`, subredditNames, {ttl: 0});
return;
}

View File

@@ -15,6 +15,7 @@ import {RichContent} from "../Common/interfaces";
import {Cache} from 'cache-manager';
import {isScopeError} from "../Utils/Errors";
import {ErrorWithCause} from "pony-cause";
import {UserNoteType} from "../Common/Infrastructure/Atomic";
interface RawUserNotesPayload {
ver: number,
@@ -125,7 +126,7 @@ export class UserNotes {
return this.mod as RedditUser;
}
async addUserNote(item: (Submission|Comment), type: string | number, text: string = '', wikiEditReasonPrefix?: string): Promise<UserNote>
async addUserNote(item: (Submission|Comment), type: UserNoteType | number, text: string = '', wikiEditReasonPrefix?: string): Promise<UserNote>
{
const payload = await this.retrieveData();
const userName = getActivityAuthorName(item.author);
@@ -235,7 +236,7 @@ export interface UserNoteJson extends RichContent {
* User Note type key
* @examples ["spamwarn"]
* */
type: string,
type: UserNoteType,
}
export class UserNote {
@@ -246,7 +247,7 @@ export class UserNote {
// noteType: string | null;
// link: string;
constructor(public time: Dayjs, public text: string, public modIndex: number, public noteType: string | number, public link: (string | null) = null, public moderator?: RedditUser) {
constructor(public time: Dayjs, public text: string, public modIndex: number, public noteType: UserNoteType | number, public link: (string | null) = null, public moderator?: RedditUser) {
}

View File

@@ -0,0 +1,33 @@
import {
ActivityCheckConfigValue,
} from "./Check";
import {ManagerOptions} from "./Common/interfaces";
import {RunConfigHydratedData, RunConfigValue, RunConfigObject} from "./Run";
export interface SubredditConfigData extends ManagerOptions {
/**
* A list of all the checks that should be run for a subreddit.
*
* Checks are split into two lists -- submission or comment -- based on kind and run independently.
*
* Checks in each list are run in the order found in the configuration.
*
* When a check "passes", and actions are performed, then all subsequent checks are skipped.
* @minItems 1
* */
checks?: ActivityCheckConfigValue[]
/**
* A list of sets of Checks to run
* @minItems 1
* */
runs?: RunConfigValue[]
}
export interface SubredditConfigHydratedData extends Omit<SubredditConfigData, 'checks'> {
runs?: RunConfigHydratedData[]
}
export interface SubredditConfigObject extends SubredditConfigHydratedData {
runs?: RunConfigObject[]
}

View File

@@ -1,9 +1,9 @@
import ExtendableError from "es6-error";
class InvalidRegexError extends ExtendableError {
constructor(regex: RegExp | RegExp[], val?: string, url?: string) {
constructor(regex: RegExp | RegExp[], val?: string, url?: string, message?: string) {
const msgParts = [
'Regex(es) did not match the value given.',
message ?? 'Regex(es) did not match the value given.',
];
let regArr = Array.isArray(regex) ? regex : [regex];
for(const r of regArr) {

View File

@@ -3,7 +3,8 @@ import {Submission, Subreddit, Comment} from "snoowrap/dist/objects";
import {parseSubredditName} from "../util";
import {ModUserNoteLabel} from "../Common/Infrastructure/Atomic";
import {CreateModNoteData, ModNote, ModNoteRaw, ModNoteSnoowrapPopulated} from "../Subreddit/ModNotes/ModNote";
import {SimpleError} from "./Errors";
import {CMError, SimpleError} from "./Errors";
import {RawSubredditRemovalReasonData, SnoowrapActivity} from "../Common/Infrastructure/Reddit";
// const proxyFactory = (endpoint: string) => {
// return class ProxiedSnoowrap extends Snoowrap {
@@ -130,7 +131,7 @@ export class ExtendedSnoowrap extends Snoowrap {
user: data.user.name,
}
if(data.activity !== undefined) {
requestData.reddit_id = data.activity.id;
requestData.reddit_id = data.activity.name;
}
const response =await this.oauthRequest({
@@ -140,6 +141,47 @@ export class ExtendedSnoowrap extends Snoowrap {
}) as { created: ModNoteRaw };
return new ModNote(response.created, this);
}
/**
* Add a removal reason and/or mod note to a REMOVED Activity
*
* The activity must already be removed for this call to succeed. This is an UNDOCUMENTED endpoint.
*
* @see https://github.com/praw-dev/praw/blob/b22e1f514d68d36545daf62e8a8d6c6c8caf782b/praw/endpoints.py#L149 for endpoint
* @see https://github.com/praw-dev/praw/blob/b22e1f514d68d36545daf62e8a8d6c6c8caf782b/praw/models/reddit/mixins/__init__.py#L28 for usage
* */
async addRemovalReason(item: SnoowrapActivity, note?: string, reason?: string) {
try {
if(note === undefined && reason === undefined) {
throw new CMError(`Must provide either a note or reason in order to add removal reason on Activity ${item.name}`, {isSerious: false});
}
await this.oauthRequest({
uri: 'api/v1/modactions/removal_reasons',
method: 'post',
body: {
item_ids: [item.name],
mod_note: note ?? null,
reason_id: reason ?? null,
},
});
} catch(e: any) {
throw e;
}
}
/**
* Get a list of New Reddit removal reasons for a Subreddit
*
* This is an UNDOCUMENTED endpoint.
*
* @see https://github.com/praw-dev/praw/blob/b22e1f514d68d36545daf62e8a8d6c6c8caf782b/praw/endpoints.py#L151 for endpoint
* */
async getSubredditRemovalReasons(sub: Subreddit | string): Promise<RawSubredditRemovalReasonData> {
return await this.oauthRequest({
uri: `api/v1/${typeof sub === 'string' ? sub : sub.display_name}/removal_reasons`,
method: 'get'
}) as RawSubredditRemovalReasonData;
}
}
export class RequestTrackingSnoowrap extends ExtendedSnoowrap {

View File

@@ -116,47 +116,34 @@ export const isSubreddit = async (subreddit: Subreddit, stateCriteria: Subreddit
})() as boolean;
}
const renderContentCommentTruncate = truncateStringToLength(50);
const shortTitleTruncate = truncateStringToLength(15);
export const renderContent = async (template: string, data: (Submission | Comment), ruleResults: RuleResultEntity[] = [], usernotes: UserNotes) => {
const conditional: any = {};
if(data.can_mod_post) {
conditional.reports = data.num_reports;
conditional.modReports = data.mod_reports.length;
conditional.userReports = data.user_reports.length;
}
if(asSubmission(data)) {
conditional.nsfw = data.over_18;
conditional.spoiler = data.spoiler;
conditional.op = true;
conditional.upvoteRatio = `${data.upvote_ratio * 100}%`;
} else {
conditional.op = data.is_submitter;
}
const templateData: any = {
kind: data instanceof Submission ? 'submission' : 'comment',
author: await data.author.name,
// make this a getter so that if we don't load notes (and api call) if we don't need to
// didn't work either for some reason
// tried to get too fancy :(
// get notes() {
// return usernotes.getUserNotes(data.author).then((notesData) => {
// // return usable notes data with some stats
// const current = notesData.length > 0 ? notesData[notesData.length -1] : undefined;
// // group by type
// const grouped = notesData.reduce((acc: any, x) => {
// const {[x.noteType]: nt = []} = acc;
// return Object.assign(acc, {[x.noteType]: nt.concat(x)});
// }, {});
// return {
// data: notesData,
// current,
// ...grouped,
// };
// });
// },
// when i was trying to use mustache-async (didn't work)
// notes: async () => {
// const notesData = await usernotes.getUserNotes(data.author);
// // return usable notes data with some stats
// const current = notesData.length > 0 ? notesData[notesData.length -1] : undefined;
// // group by type
// const grouped = notesData.reduce((acc: any, x) => {
// const {[x.noteType]: nt = []} = acc;
// return Object.assign(acc, {[x.noteType]: nt.concat(x)});
// }, {});
// return {
// data: notesData,
// current,
// ...grouped,
// };
// },
// @ts-ignore
author: getActivityAuthorName(await data.author),
votes: data.score,
age: dayjs.duration(dayjs().diff(dayjs.unix(data.created))).humanize(),
permalink: `https://reddit.com${data.permalink}`,
botLink: BOT_LINK,
id: data.name,
...conditional
}
if (template.includes('{{item.notes')) {
// we need to get notes
@@ -177,6 +164,10 @@ export const renderContent = async (template: string, data: (Submission | Commen
if (data instanceof Submission) {
templateData.url = data.url;
templateData.title = data.title;
templateData.shortTitle = shortTitleTruncate(data.title);
} else {
templateData.title = renderContentCommentTruncate(data.body);
templateData.shortTitle = shortTitleTruncate(data.body);
}
// normalize rule names and map context data
// NOTE: we are relying on users to use unique names for rules. If they don't only the last rule run of kind X will have its results here

View File

@@ -1,13 +1,18 @@
import {URL} from "url";
import {Logger} from "winston";
import {BotInstance, CMInstanceInterface, CMInstanceInterface as CMInterface} from "../interfaces";
import dayjs from 'dayjs';
import {BotConnection, LogInfo} from "../../Common/interfaces";
import normalizeUrl from "normalize-url";
import {HeartbeatResponse} from "../Common/interfaces";
import {
BotInstance,
CMInstanceInterface as CMInterface,
CMInstanceInterface,
HeartbeatResponse
} from "../Common/interfaces";
import jwt from "jsonwebtoken";
import got from "got";
import {ErrorWithCause} from "pony-cause";
import ClientBotInstance from "./ClientBotInstance";
export class CMInstance implements CMInterface {
friendly?: string;
@@ -24,6 +29,7 @@ export class CMInstance implements CMInterface {
migrationBlocker?: string
host: string;
secret: string;
invites: string[] = [];
logger: Logger;
logs: LogInfo[] = [];
@@ -72,6 +78,7 @@ export class CMInstance implements CMInterface {
secret: this.secret,
ranMigrations: this.ranMigrations,
migrationBlocker: this.migrationBlocker,
invites: this.invites,
}
}
@@ -86,6 +93,16 @@ export class CMInstance implements CMInterface {
return normalizeUrl(val) == this.normalUrl;
}
getToken() {
return jwt.sign({
data: {
machine: true,
},
}, this.secret, {
expiresIn: '1m'
});
}
updateFromHeartbeat = (resp: HeartbeatResponse, otherFriendlies: string[] = []) => {
this.operators = resp.operators ?? [];
this.operatorDisplay = resp.operatorDisplay ?? '';
@@ -101,9 +118,10 @@ export class CMInstance implements CMInterface {
}
}
this.subreddits = resp.subreddits;
//@ts-ignore
this.bots = resp.bots.map(x => ({...x, instance: this}));
this.bots = resp.bots.map(x => new ClientBotInstance(x, this));
this.subreddits = this.bots.map(x => x.getSubreddits()).flat(3);
this.invites = resp.invites;
}
checkHeartbeat = async (force = false, otherFriendlies: string[] = []) => {
@@ -125,13 +143,7 @@ export class CMInstance implements CMInterface {
if (shouldCheck) {
this.logger.debug('Starting Heartbeat check');
this.lastCheck = dayjs().unix();
const machineToken = jwt.sign({
data: {
machine: true,
},
}, this.secret, {
expiresIn: '1m'
});
const machineToken = this.getToken();
try {
const resp = await got.get(`${this.normalUrl}/heartbeat`, {

View File

@@ -0,0 +1,61 @@
import {
BotInstance,
BotInstanceResponse,
CMInstanceInterface,
ManagerResponse,
NormalizedManagerResponse
} from '../Common/interfaces';
import {intersect, parseRedditEntity} from "../../util";
export class ClientBotInstance implements BotInstance {
instance: CMInstanceInterface;
botName: string;
// botLink: string;
error?: string | undefined;
managers: NormalizedManagerResponse[];
nanny?: string | undefined;
running: boolean;
constructor(data: BotInstanceResponse, instance: CMInstanceInterface) {
this.instance = instance;
this.botName = data.botName;
//this.botLink = data.botLink;
this.error = data.error;
this.managers = data.managers.map(x => ({...x, subredditNormal: parseRedditEntity(x.subreddit).name}));
this.nanny = data.nanny;
this.running = data.running;
}
getManagerNames(): string[] {
return this.managers.map(x => x.name);
}
getSubreddits(normalized = true): string[] {
return normalized ? this.managers.map(x => x.subredditNormal) : this.managers.map(x => x.subreddit);
}
getAccessibleSubreddits(user: string, subreddits: string[] = []): string[] {
const normalSubs = subreddits.map(x => parseRedditEntity(x).name);
return Array.from(new Set([...this.getGuestSubreddits(user), ...intersect(normalSubs, this.getSubreddits())]));
}
getGuestManagers(user: string): NormalizedManagerResponse[] {
const louser = user.toLowerCase();
return this.managers.filter(x => x.guests.map(y => y.name.toLowerCase()).includes(louser));
}
getGuestSubreddits(user: string): string[] {
return this.getGuestManagers(user).map(x => x.subredditNormal);
}
canUserAccessBot(user: string, subreddits: string[] = []) {
return this.getAccessibleSubreddits(user, subreddits).length > 0;
}
canUserAccessSubreddit(subreddit: string, user: string, subreddits: string[] = []): boolean {
return this.getAccessibleSubreddits(user, subreddits).includes(parseRedditEntity(subreddit).name);
}
}
export default ClientBotInstance;

View File

@@ -7,11 +7,8 @@ import {Cache} from "cache-manager";
import CacheManagerStore from 'express-session-cache-manager'
import {CacheOptions} from "../../Common/interfaces";
import {Brackets, DataSource, IsNull, LessThanOrEqual, Repository} from "typeorm";
import {DateUtils} from 'typeorm/util/DateUtils';
import {ClientSession} from "../../Common/WebEntities/ClientSession";
import dayjs from "dayjs";
import {Logger} from "winston";
import {Invite} from "../../Common/WebEntities/Invite";
import {WebSetting} from "../../Common/WebEntities/WebSetting";
import {ErrorWithCause} from "pony-cause";
@@ -29,12 +26,6 @@ export type TypeormStoreOptions = Partial<SessionOptions & {
interface IWebStorageProvider {
createSessionStore(options?: CacheManagerStoreOptions | TypeormStoreOptions): Store
inviteGet(id: string): Promise<InviteData | undefined>
inviteDelete(id: string): Promise<void>
inviteCreate(id: string, data: InviteData): Promise<InviteData>
getSessionSecret(): Promise<string | undefined>
setSessionSecret(secret: string): Promise<void>
@@ -42,43 +33,25 @@ interface IWebStorageProvider {
interface StorageProviderOptions {
logger: Logger
invitesMaxAge?: number
loggerLabels?: string[]
}
abstract class StorageProvider implements IWebStorageProvider {
invitesMaxAge?: number
logger: Logger;
protected constructor(data: StorageProviderOptions) {
const {
logger,
invitesMaxAge,
loggerLabels = [],
} = data;
this.invitesMaxAge = invitesMaxAge;
this.logger = logger.child({labels: ['Web', 'Storage', ...loggerLabels]}, mergeArr);
}
protected abstract getInvite(id: string): Promise<InviteData | undefined | null>;
async inviteGet(id: string) {
const data = await this.getInvite(id);
if (data === undefined || data === null) {
return undefined;
}
return data;
}
abstract createSessionStore(options?: CacheManagerStoreOptions | TypeormStoreOptions): Store;
abstract getSessionSecret(): Promise<string | undefined>;
abstract inviteCreate(id: string, data: InviteData): Promise<InviteData>;
abstract inviteDelete(id: string): Promise<void>;
abstract setSessionSecret(secret: string): Promise<void>;
}
@@ -100,19 +73,6 @@ export class CacheStorageProvider extends StorageProvider {
return new CacheManagerStore(this.cache, {prefix: 'sess:'});
}
protected async getInvite(id: string) {
return await this.cache.get(`invite:${id}`) as InviteData | undefined | null;
}
async inviteCreate(id: string, data: InviteData): Promise<InviteData> {
await this.cache.set(`invite:${id}`, data, {ttl: (this.invitesMaxAge ?? 0) * 1000});
return data;
}
async inviteDelete(id: string): Promise<void> {
return await this.cache.del(`invite:${id}`);
}
async getSessionSecret() {
const val = await this.cache.get(`sessionSecret`);
if (val === null || val === undefined) {
@@ -130,14 +90,12 @@ export class CacheStorageProvider extends StorageProvider {
export class DatabaseStorageProvider extends StorageProvider {
database: DataSource;
inviteRepo: Repository<Invite>;
webSettingRepo: Repository<WebSetting>;
clientSessionRepo: Repository<ClientSession>
constructor(data: { database: DataSource } & StorageProviderOptions) {
super(data);
this.database = data.database;
this.inviteRepo = this.database.getRepository(Invite);
this.webSettingRepo = this.database.getRepository(WebSetting);
this.clientSessionRepo = this.database.getRepository(ClientSession);
this.logger.debug('Using DATABASE');
@@ -147,26 +105,6 @@ export class DatabaseStorageProvider extends StorageProvider {
return new TypeormStore(options).connect(this.clientSessionRepo)
}
protected async getInvite(id: string): Promise<InviteData | undefined | null> {
const qb = this.inviteRepo.createQueryBuilder('invite');
return await qb
.andWhere({id})
.andWhere(new Brackets((qb) => {
qb.where({_expiresAt: LessThanOrEqual(DateUtils.mixedDateToDatetimeString(dayjs().toDate()))})
.orWhere({_expiresAt: IsNull()})
})
).getOne();
}
async inviteCreate(id: string, data: InviteData): Promise<InviteData> {
await this.inviteRepo.save(new Invite({...data, id}));
return data;
}
async inviteDelete(id: string): Promise<void> {
await this.inviteRepo.delete(id);
}
async getSessionSecret(): Promise<string | undefined> {
try {
const dbSessionSecret = await this.webSettingRepo.findOneBy({name: 'sessionSecret'});

View File

@@ -13,7 +13,7 @@ import {
CheckSummary,
RunResult,
ActionedEvent,
ActionResult, RuleResult, EventActivity
ActionResult, RuleResult, EventActivity, OperatorConfigWithFileContext
} from "../../Common/interfaces";
import {
buildCachePrefix,
@@ -38,7 +38,6 @@ import sharedSession from "express-socket.io-session";
import dayjs from "dayjs";
import httpProxy from 'http-proxy';
import {arrayMiddle, booleanMiddle} from "../Common/middleware";
import {BotInstance, CMInstanceInterface} from "../interfaces";
import { URL } from "url";
import {MESSAGE} from "triple-beam";
import Autolinker from "autolinker";
@@ -57,6 +56,8 @@ import {MigrationService} from "../../Common/MigrationService";
import {RuleResultEntity} from "../../Common/Entities/RuleResultEntity";
import {RuleSetResultEntity} from "../../Common/Entities/RuleSetResultEntity";
import { PaginationAwareObject } from "../Common/util";
import {BotInstance, BotStatusResponse, CMInstanceInterface, InviteData} from "../Common/interfaces";
import {open} from "fs/promises";
const emitter = new EventEmitter();
@@ -151,10 +152,10 @@ const availableLevels = ['error', 'warn', 'info', 'verbose', 'debug'];
let webLogs: LogInfo[] = [];
const webClient = async (options: OperatorConfig) => {
const webClient = async (options: OperatorConfigWithFileContext) => {
const {
operator: {
name,
name: operatorName,
display,
},
userAgent: uaFragment,
@@ -169,9 +170,6 @@ const webClient = async (options: OperatorConfig) => {
port,
storage: webStorage = 'database',
caching,
invites: {
maxAge: invitesMaxAge,
},
session: {
secret: sessionSecretFromConfig,
maxAge: sessionMaxAge,
@@ -179,16 +177,14 @@ const webClient = async (options: OperatorConfig) => {
},
maxLogs,
clients,
credentials: {
clientId,
clientSecret,
redirectUri
},
credentials,
operators = [],
},
//database
} = options;
let clientCredentials = credentials;
let sessionSecretSynced = false;
const userAgent = getUserAgent(`web:contextBot:{VERSION}{FRAG}:dashboard`, uaFragment);
@@ -205,7 +201,9 @@ const webClient = async (options: OperatorConfig) => {
logger.stream().on('log', (log: LogInfo) => {
emitter.emit('log', log);
webLogs.unshift(log);
webLogs = webLogs.slice(0, 201);
if(webLogs.length > 200) {
webLogs.splice(200);
}
});
const migrationService = new MigrationService({
@@ -235,7 +233,7 @@ const webClient = async (options: OperatorConfig) => {
}
});
const storage = webStorage === 'database' ? new DatabaseStorageProvider({database, invitesMaxAge, logger}) : new CacheStorageProvider({...caching, invitesMaxAge, logger});
const storage = webStorage === 'database' ? new DatabaseStorageProvider({database, logger}) : new CacheStorageProvider({...caching, logger});
let sessionSecret: string;
if (sessionSecretFromConfig !== undefined) {
@@ -295,9 +293,9 @@ const webClient = async (options: OperatorConfig) => {
}
const client = await ExtendedSnoowrap.fromAuthCode({
userAgent,
clientId,
clientSecret,
redirectUri: redirectUri as string,
clientId: clientCredentials.clientId,
clientSecret: clientCredentials.clientSecret,
redirectUri: clientCredentials.redirectUri as string,
code: code as string,
});
const user = await client.getMe().name as string;
@@ -313,7 +311,7 @@ const webClient = async (options: OperatorConfig) => {
let sessionStoreProvider = storage;
if(sessionStorage !== webStorage) {
sessionStoreProvider = sessionStorage === 'database' ? new DatabaseStorageProvider({database, invitesMaxAge, logger, loggerLabels: ['Session']}) : new CacheStorageProvider({...caching, invitesMaxAge, logger, loggerLabels: ['Session']});
sessionStoreProvider = sessionStorage === 'database' ? new DatabaseStorageProvider({database, logger, loggerLabels: ['Session']}) : new CacheStorageProvider({...caching, logger, loggerLabels: ['Session']});
}
const sessionObj = session({
cookie: {
@@ -347,10 +345,50 @@ const webClient = async (options: OperatorConfig) => {
}
}
app.postAsync('/init', async (req, res, next) => {
if (clientCredentials.clientId === undefined || clientCredentials.clientSecret === undefined) {
const {
redirect = '',
clientId = '',
clientSecret = '',
operator = '',
} = req.body as any;
if (redirect === null || redirect.trim() === '') {
return res.status(400).send('redirect cannot be empty');
}
if (clientId === null || clientId.trim() === '') {
return res.status(400).send('clientId cannot be empty');
}
if (clientSecret === null || clientSecret.trim() === '') {
return res.status(400).send('clientSecret cannot be empty');
}
if(operatorName === undefined) {
return res.status(400).send('operator cannot be empty');
}
options.fileConfig.document.setWebCredentials({redirectUri: redirect.trim(), clientId: clientId.trim(), clientSecret: clientSecret.trim()});
if(operators.length === 0 && operator !== '') {
options.fileConfig.document.setOperator(parseRedditEntity(operator, 'user').name);
}
const handle = await open(options.fileConfig.document.location as string, 'w');
await handle.writeFile(options.fileConfig.document.toString());
await handle.close();
clientCredentials = {
clientId,
clientSecret,
redirectUri: redirect
}
return res.status(200).send();
} else {
return res.status(400).send('Can only do init setup when client credentials do not already exist.');
}
});
const scopeMiddle = arrayMiddle(['scope']);
const successMiddle = booleanMiddle([{name: 'closeOnSuccess', defaultVal: undefined, required: false}]);
app.getAsync('/login', scopeMiddle, successMiddle, async (req, res, next) => {
if (redirectUri === undefined) {
if (clientCredentials.redirectUri === undefined) {
return res.render('error', {error: `No <b>redirectUri</b> was specified through environmental variables or program argument. This must be provided in order to use the web interface.`});
}
const {query: { scope: reqScopes = [], closeOnSuccess } } = req;
@@ -362,10 +400,13 @@ const webClient = async (options: OperatorConfig) => {
// @ts-ignore
req.session.closeOnSuccess = closeOnSuccess;
}
if(clientCredentials.clientId === undefined) {
return res.render('init', { operators: operators.join(',') });
}
const authUrl = Snoowrap.getAuthUrl({
clientId,
clientId: clientCredentials.clientId,
scope: scope,
redirectUri: redirectUri as string,
redirectUri: clientCredentials.redirectUri as string,
permanent: false,
state: req.session.state,
});
@@ -392,10 +433,10 @@ const webClient = async (options: OperatorConfig) => {
return res.render('error', {error: errContent});
}
// @ts-ignore
const invite = await storage.inviteGet(req.session.inviteId);
const invite = req.session.invite as InviteData; //await storage.inviteGet(req.session.inviteId);
if(invite === undefined) {
// @ts-ignore
return res.render('error', {error: `Could not find invite with id ${req.session.inviteId}?? This should happen!`});
return res.render('error', {error: `Could not find invite in session?? This should happen!`});
}
const client = await Snoowrap.fromAuthCode({
userAgent,
@@ -407,40 +448,36 @@ const webClient = async (options: OperatorConfig) => {
// @ts-ignore
const user = await client.getMe();
const userName = `u/${user.name}`;
// @ts-ignore
await storage.inviteDelete(req.session.inviteId);
//await storage.inviteDelete(req.session.inviteId);
let data: any = {
accessToken: client.accessToken,
refreshToken: client.refreshToken,
userName,
};
if(invite.instance !== undefined) {
const bot = cmInstances.find(x => x.getName() === invite.instance);
if(bot !== undefined) {
const botPayload: any = {
overwrite: invite.overwrite === true,
name: userName,
credentials: {
reddit: {
accessToken: client.accessToken,
refreshToken: client.refreshToken,
clientId: invite.clientId,
clientSecret: invite.clientSecret,
}
}
};
if(invite.subreddits !== undefined && invite.subreddits.length > 0) {
botPayload.subreddits = {names: invite.subreddits};
// @ts-ignore
const inviteId = invite.id as string;
// @ts-ignore
const botAddResult: any = await addBot(inviteId, {
invite: inviteId,
credentials: {
reddit: {
accessToken: client.accessToken,
refreshToken: client.refreshToken,
clientId: invite.clientId,
clientSecret: invite.clientSecret,
}
const botAddResult: any = await addBot(bot, {name: invite.creator}, botPayload);
// stored
// success
data = {...data, ...botAddResult};
// @ts-ignore
req.session.destroy();
req.logout();
}
}
},
name: userName,
});
data = {...data, ...botAddResult};
// @ts-ignore
req.session.destroy();
req.logout();
return res.render('callback', data);
} else {
return next();
@@ -501,31 +538,100 @@ const webClient = async (options: OperatorConfig) => {
}
}
app.getAsync('/auth/helper', helperAuthed, (req, res) => {
const createUserToken = async (req: express.Request, res: express.Response, next: Function) => {
req.token = createToken(req.instance as CMInstanceInterface, req.user);
next();
}
const instanceWithPermissions = async (req: express.Request, res: express.Response, next: Function) => {
delete req.session.botId;
delete req.session.authBotId;
const msg = 'Bot does not exist or you do not have permission to access it';
const instance = cmInstances.find(x => x.getName() === req.query.instance);
if (instance === undefined) {
return res.status(404).render('error', {error: msg});
}
if (!req.user?.clientData?.webOperator && !req.user?.canAccessInstance(instance)) {
return res.status(404).render('error', {error: msg});
}
if (req.params.subreddit !== undefined && !req.user?.canAccessSubreddit(instance,req.params.subreddit)) {
return res.status(404).render('error', {error: msg});
}
req.instance = instance;
req.session.botId = instance.getName();
req.session.authBotId = instance.getName();
return next();
}
const instancesViewData = async (req: express.Request, res: express.Response, next: Function) => {
const user = req.user as Express.User;
const instance = req.instance as CMInstance;
const shownInstances = cmInstances.reduce((acc: CMInstance[], curr) => {
const isBotOperator = user?.isInstanceOperator(curr);
if(user?.clientData?.webOperator) {
// @ts-ignore
return acc.concat({...curr.getData(), canAccessLocation: true, isOperator: isBotOperator});
}
if(!isBotOperator && !req.user?.canAccessInstance(curr)) {
return acc;
}
// @ts-ignore
return acc.concat({...curr.getData(), canAccessLocation: isBotOperator, isOperator: isBotOperator, botId: curr.getName()});
},[]);
// @ts-ignore
req.instancesViewData = {
instances: shownInstances,
instanceId: instance.getName()
};
next();
}
app.getAsync('/auth/helper', helperAuthed, instanceWithPermissions, instancesViewData, (req, res) => {
return res.render('helper', {
redirectUri,
clientId,
clientSecret,
redirectUri: clientCredentials.redirectUri,
clientId: clientCredentials.clientId,
clientSecret: clientCredentials.clientSecret,
token: req.isAuthenticated() && req.user?.clientData?.webOperator ? token : undefined,
instances: cmInstances.filter(x => req.user?.isInstanceOperator(x)).map(x => x.getName()),
// @ts-ignore
...req.instancesViewData,
});
});
app.getAsync('/auth/invite', async (req, res) => {
const {invite: inviteId} = req.query;
app.getAsync('/auth/invite/:inviteId', async (req, res) => {
const {inviteId} = req.params;
if(inviteId === undefined) {
if (inviteId === undefined) {
return res.render('error', {error: '`invite` param is missing from URL'});
}
const invite = await storage.inviteGet(inviteId as string);
if(invite === undefined || invite === null) {
const cmInstance = cmInstances.find(x => x.invites.includes(inviteId));
if (cmInstance === undefined) {
return res.render('error', {error: 'Invite with the given id does not exist'});
}
return res.render('invite', {
permissions: JSON.stringify(invite.permissions || []),
invite: inviteId,
});
try {
const invite = await got.get(`${cmInstance.normalUrl}/invites/${inviteId}`, {
headers: {
'Authorization': `Bearer ${cmInstance.getToken()}`,
}
}).json() as InviteData;
return res.render('invite', {
guests: invite.guests !== undefined && invite.guests !== null && invite.guests.length > 0 ? invite.guests.join(',') : '',
permissions: JSON.stringify(invite.permissions || []),
invite: inviteId,
});
} catch (err: any) {
cmInstance.logger.error(new ErrorWithCause(`Retrieving invite failed`, {cause: err}));
return res.render('error', {error: 'An error occurred while validating your invite and has been logged. Let the person who gave you this invite know! Sorry about that.'})
}
});
app.postAsync('/auth/create', helperAuthed, async (req: express.Request, res: express.Response) => {
@@ -536,15 +642,15 @@ const webClient = async (options: OperatorConfig) => {
redirect: redir,
instance,
subreddits,
code,
guests: guestsVal,
} = req.body as any;
const cid = ci || clientId;
const cid = ci || clientCredentials.clientId;
if(cid === undefined || cid.trim() === '') {
return res.status(400).send('clientId is required');
}
const ced = ce || clientSecret;
const ced = ce || clientCredentials.clientSecret;
if(ced === undefined || ced.trim() === '') {
return res.status(400).send('clientSecret is required');
}
@@ -553,32 +659,74 @@ const webClient = async (options: OperatorConfig) => {
return res.status(400).send('redirectUrl is required');
}
const inviteId = code || nanoid(20);
await storage.inviteCreate(inviteId, {
let guestArr = [];
if(typeof guestsVal === 'string') {
guestArr = guestsVal.split(',');
} else if(Array.isArray(guestsVal)) {
guestArr = guestsVal;
}
guestArr = guestArr.filter(x => x.trim() !== '').map(x => parseRedditEntity(x, 'user').name);
const inviteData = {
permissions,
clientId: (ci || clientId).trim(),
clientSecret: (ce || clientSecret).trim(),
clientId: (ci || clientCredentials.clientId).trim(),
clientSecret: (ce || clientCredentials.clientSecret).trim(),
redirectUri: redir.trim(),
instance,
subreddits: subreddits.trim() === '' ? [] : subreddits.split(',').map((x: string) => parseRedditEntity(x).name),
creator: (req.user as Express.User).name,
});
return res.send(inviteId);
guests: guestArr.length > 0 ? guestArr : undefined
};
const cmInstance = cmInstances.find(x => x.friendly === instance);
if(cmInstance === undefined) {
return res.status(400).send(`No instance found with name "${instance}"`);
}
const token = createToken(cmInstance, req.user);
try {
const resp = await got.post(`${cmInstance.normalUrl}/invites`, {
headers: {
'Authorization': `Bearer ${token}`,
},
json: inviteData,
}).json() as any;
cmInstance.invites.push(resp.id);
return res.send(resp.id);
} catch (err: any) {
cmInstance.logger.error(new ErrorWithCause(`Could not create bot invite.`, {cause: err}));
return res.status(400).send(`Error while creating invite: ${err.message}`);
}
});
app.getAsync('/auth/init', async (req: express.Request, res: express.Response) => {
const {invite: inviteId} = req.query;
app.getAsync('/auth/init/:inviteId', async (req: express.Request, res: express.Response) => {
const { inviteId } = req.params;
if(inviteId === undefined) {
return res.render('error', {error: '`invite` param is missing from URL'});
}
const invite = await storage.inviteGet(inviteId as string);
if(invite === undefined || invite === null) {
const cmInstance = cmInstances.find(x => x.invites.includes(inviteId));
if (cmInstance === undefined) {
return res.render('error', {error: 'Invite with the given id does not exist'});
}
let invite: InviteData;
try {
invite = await got.get(`${cmInstance.normalUrl}/invites/${inviteId}`, {
headers: {
'Authorization': `Bearer ${cmInstance.getToken()}`,
}
}).json() as InviteData;
} catch (err: any) {
cmInstance.logger.error(new ErrorWithCause(`Retrieving invite failed`, {cause: err}));
return res.render('error', {error: 'An error occurred while validating your invite and has been logged. Let the person who gave you this invite know! Sorry about that.'})
}
req.session.state = `bot_${randomId()}`;
// @ts-ignore
req.session.inviteId = inviteId;
req.session.invite = invite;
const scope = Object.entries(invite.permissions).reduce((acc: string[], curr) => {
const [k, v] = curr as unknown as [string, boolean];
@@ -618,31 +766,6 @@ const webClient = async (options: OperatorConfig) => {
}
logger.info(`Web UI started: http://localhost:${port}`, {label: ['Web']});
const instanceWithPermissions = async (req: express.Request, res: express.Response, next: Function) => {
delete req.session.botId;
delete req.session.authBotId;
const msg = 'Bot does not exist or you do not have permission to access it';
const instance = cmInstances.find(x => x.getName() === req.query.instance);
if (instance === undefined) {
return res.status(404).render('error', {error: msg});
}
if (!req.user?.clientData?.webOperator && !req.user?.canAccessInstance(instance)) {
return res.status(404).render('error', {error: msg});
}
if (req.params.subreddit !== undefined && !req.user?.isInstanceOperator(instance) && !req.user?.subreddits.includes(req.params.subreddit)) {
return res.status(404).render('error', {error: msg});
}
req.instance = instance;
req.session.botId = instance.getName();
if(req.user?.canAccessInstance(instance)) {
req.session.authBotId = instance.getName();
}
return next();
}
const botWithPermissions = (required: boolean = false, setDefault: boolean = false) => async (req: express.Request, res: express.Response, next: Function) => {
@@ -678,7 +801,7 @@ const webClient = async (options: OperatorConfig) => {
return res.status(404).render('error', {error: msg});
}
if (req.params.subreddit !== undefined && !req.user?.isInstanceOperator(instance) && !req.user?.subreddits.includes(req.params.subreddit)) {
if (req.params.subreddit !== undefined && !req.user?.canAccessSubreddit(instance,req.params.subreddit)) {
return res.status(404).render('error', {error: msg});
}
req.bot = botInstance;
@@ -687,11 +810,6 @@ const webClient = async (options: OperatorConfig) => {
next();
}
const createUserToken = async (req: express.Request, res: express.Response, next: Function) => {
req.token = createToken(req.instance as CMInstanceInterface, req.user);
next();
}
const defaultSession = (req: express.Request, res: express.Response, next: Function) => {
if(req.session.limit === undefined) {
req.session.limit = 200;
@@ -712,7 +830,14 @@ const webClient = async (options: OperatorConfig) => {
// botUserRouter.use([ensureAuthenticated, defaultSession, botWithPermissions, createUserToken]);
// app.use(botUserRouter);
app.useAsync('/api/', [ensureAuthenticated, defaultSession, instanceWithPermissions, botWithPermissions(false), createUserToken], (req: express.Request, res: express.Response) => {
// proxy.on('proxyReq', (req) => {
// logger.debug(`Got proxy request: ${req.path}`);
// });
// proxy.on('proxyRes', (proxyRes, req, res) => {
// logger.debug(`Got proxy response: ${res.statusCode} for ${req.url}`);
// });
app.useAsync('/api/', [ensureAuthenticatedApi, defaultSession, instanceWithPermissions, botWithPermissions(false), createUserToken], (req: express.Request, res: express.Response) => {
req.headers.Authorization = `Bearer ${req.token}`
const instance = req.instance as CMInstanceInterface;
@@ -723,6 +848,10 @@ const webClient = async (options: OperatorConfig) => {
port: instance.url.port,
},
prependPath: false,
proxyTimeout: 11000,
}, (e: any) => {
logger.error(e);
res.status(500).send();
});
});
@@ -737,7 +866,7 @@ const webClient = async (options: OperatorConfig) => {
if(x.operators.includes(user.name)) {
return true;
}
return intersect(user.subreddits, x.subreddits).length > 0;
return x.bots.some(y => y.canUserAccessBot(user.name, user.subreddits));
});
if(accessibleInstance === undefined) {
@@ -752,13 +881,13 @@ const webClient = async (options: OperatorConfig) => {
next();
}
const defaultSubreddit = async (req: express.Request, res: express.Response, next: Function) => {
/* const defaultSubreddit = async (req: express.Request, res: express.Response, next: Function) => {
if(req.bot !== undefined && req.query.subreddit === undefined) {
const firstAccessibleSub = req.bot.subreddits.find(x => req.user?.isInstanceOperator(req.instance) || req.user?.subreddits.includes(x));
const firstAccessibleSub = req.bot.managers.find(x => req.user?.isInstanceOperator(req.instance) || req.user?.subreddits.includes(x));
req.query.subreddit = firstAccessibleSub;
}
next();
}
}*/
const initHeartbeat = async (req: express.Request, res: express.Response, next: Function) => {
if(!init) {
@@ -780,19 +909,41 @@ const webClient = async (options: OperatorConfig) => {
}
const migrationRedirect = async (req: express.Request, res: express.Response, next: Function) => {
const user = req.user as Express.User;
const instance = req.instance as CMInstance;
if(instance.bots.length === 0 && instance?.ranMigrations === false && instance?.migrationBlocker !== undefined) {
if(!user.isInstanceOperator(instance)) {
return res.render('error-authenticated', {
error: `A database migration, which requires manual confirmation by its <strong>Operator</strong>, is required before this CM instance can finish starting up.`,
// @ts-ignore
...req.instancesViewData
})
}
return res.render('migrations', {
type: 'app',
ranMigrations: instance.ranMigrations,
migrationBlocker: instance.migrationBlocker,
instance: instance.friendly
instance: instance.friendly,
// @ts-ignore
...req.instancesViewData
});
}
return next();
};
app.getAsync('/', [initHeartbeat, redirectBotsNotAuthed, ensureAuthenticated, defaultSession, defaultInstance, instanceWithPermissions, migrationRedirect, botWithPermissions(false, true), createUserToken], async (req: express.Request, res: express.Response) => {
const redirectNoBots = async (req: express.Request, res: express.Response, next: Function) => {
const i = req.instance as CMInstance;
if (i.bots.length === 0) {
// assuming user is doing first-time setup and this is the default localhost bot
return res.redirect(`/auth/helper?instance=${i.getName()}`);
}
next();
}
app.getAsync('/', [initHeartbeat, redirectBotsNotAuthed, ensureAuthenticated, defaultSession, defaultInstance, instanceWithPermissions, instancesViewData, migrationRedirect, redirectNoBots, botWithPermissions(false, true), createUserToken], async (req: express.Request, res: express.Response) => {
const user = req.user as Express.User;
const instance = req.instance as CMInstance;
@@ -801,19 +952,6 @@ const webClient = async (options: OperatorConfig) => {
const sort = req.session.sort;
const level = req.session.level;
const shownInstances = cmInstances.reduce((acc: CMInstance[], curr) => {
const isBotOperator = req.user?.isInstanceOperator(curr);
if(user?.clientData?.webOperator) {
// @ts-ignore
return acc.concat({...curr.getData(), canAccessLocation: true, isOperator: isBotOperator});
}
if(!isBotOperator && !req.user?.canAccessInstance(curr)) {
return acc;
}
// @ts-ignore
return acc.concat({...curr.getData(), canAccessLocation: isBotOperator, isOperator: isBotOperator, botId: curr.getName()});
},[]);
let resp;
try {
resp = await got.get(`${instance.normalUrl}/status`, {
@@ -835,8 +973,8 @@ const webClient = async (options: OperatorConfig) => {
refreshClient({host: instance.host, secret: instance.secret});
const isOp = req.user?.isInstanceOperator(instance);
return res.render('offline', {
instances: shownInstances,
instanceId: (req.instance as CMInstance).getName(),
// @ts-ignore
...req.instancesViewData,
isOperator: isOp,
// @ts-ignore
logs: filterLogs((isOp ? instance.logs : instance.logs.filter(x => x.user === undefined || x.user.includes(req.user.name))), {limit, sort, level}),
@@ -867,11 +1005,31 @@ const webClient = async (options: OperatorConfig) => {
const isOp = req.user?.isInstanceOperator(instance);
// const bots = resp.bots.map((x: BotStatusResponse) => {
// return {
// ...x,
// subreddits: x.subreddits.map(y => {
// return {
// ...y,
// guests: y.guests.map(z => {
// const d = z.expiresAt === undefined ? undefined : dayjs(z.expiresAt);
// return {
// ...z,
// relative: d === undefined ? 'Never' : dayjs.duration(d.diff(dayjs())).humanize(),
// date: d === undefined ? 'Never' : d.format('YYYY-MM-DD HH:mm:ssZ')
// }
// })
// }
// })
// }
// });
res.render('status', {
instances: shownInstances,
// @ts-ignore
...req.instancesViewData,
bots: resp.bots,
now: dayjs().add(1, 'minute').format('YYYY-MM-DDTHH:mm'),
botId: (req.instance as CMInstance).getName(),
instanceId: (req.instance as CMInstance).getName(),
isOperator: isOp,
system: isOp ? {
// @ts-ignore
@@ -902,14 +1060,19 @@ const webClient = async (options: OperatorConfig) => {
});
});
app.getAsync('/guest', [ensureAuthenticatedApi, defaultSession, instanceWithPermissions, botWithPermissions(true)], async (req: express.Request, res: express.Response) => {
const {subreddit} = req.query as any;
return res.status(req.user?.isSubredditGuest(req.bot, subreddit) ? 200 : 403).send();
});
app.postAsync('/config', [ensureAuthenticatedApi, defaultSession, instanceWithPermissions, botWithPermissions(true)], async (req: express.Request, res: express.Response) => {
const {subreddit} = req.query as any;
const {location, data, reason = 'Updated through CM Web', create = false} = req.body as any;
const client = new ExtendedSnoowrap({
userAgent,
clientId,
clientSecret,
clientId: clientCredentials.clientId,
clientSecret: clientCredentials.clientSecret,
accessToken: req.user?.clientData?.token
});
@@ -1340,14 +1503,18 @@ const webClient = async (options: OperatorConfig) => {
}
}
const addBot = async (bot: CMInstanceInterface, userPayload: any, botPayload: any) => {
const addBot = async (inviteId: string, botPayload: any) => {
const cmInstance = cmInstances.find(x => x.invites.includes(inviteId));
if(cmInstance === undefined) {
return {success: false, error: 'Could not determine CM instance to add bot to based on invite id (invite id was not found)'};
}
try {
const token = createToken(bot, userPayload);
const resp = await got.post(`${bot.normalUrl}/bot`, {
body: JSON.stringify(botPayload),
const resp = await got.post(`${cmInstance.normalUrl}/bot`, {
json: botPayload,
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${token}`,
'Authorization': `Bearer ${cmInstance.getToken()}`,
}
}).json() as object;
return {success: true, ...resp};

View File

@@ -18,6 +18,9 @@ abstract class CMUser<Instance, Bot, SubredditEntity> implements IUser {
public abstract accessibleBots(bots: Bot[]): Bot[]
public abstract canAccessSubreddit(val: Bot, name: string): boolean;
public abstract accessibleSubreddits(bot: Bot): SubredditEntity[]
public abstract isSubredditGuest(val: Bot, name: string): boolean;
public abstract isSubredditMod(val: Bot, name: string): boolean;
public abstract getModeratedSubreddits(val: Bot): SubredditEntity[]
}
export default CMUser;

View File

@@ -1,6 +1,6 @@
import {BotInstance, CMInstanceInterface} from "../../interfaces";
import CMUser from "./CMUser";
import {intersect, parseRedditEntity} from "../../../util";
import {BotInstance, CMInstanceInterface} from "../interfaces";
class ClientUser extends CMUser<CMInstanceInterface, BotInstance, string> {
@@ -9,15 +9,15 @@ class ClientUser extends CMUser<CMInstanceInterface, BotInstance, string> {
}
canAccessInstance(val: CMInstanceInterface): boolean {
return this.isInstanceOperator(val) || intersect(this.subreddits, val.subreddits.map(x => parseRedditEntity(x).name)).length > 0;
return this.isInstanceOperator(val) || val.bots.filter(x => x.canUserAccessBot(this.name, this.subreddits)).length > 0;
}
canAccessBot(val: BotInstance): boolean {
return this.isInstanceOperator(val.instance) || intersect(this.subreddits, val.subreddits.map(x => parseRedditEntity(x).name)).length > 0;
return this.isInstanceOperator(val.instance) || val.canUserAccessBot(this.name, this.subreddits);
}
canAccessSubreddit(val: BotInstance, name: string): boolean {
return this.isInstanceOperator(val.instance) || this.subreddits.map(x => x.toLowerCase()).includes(parseRedditEntity(name).name.toLowerCase());
return this.isInstanceOperator(val.instance) || val.canUserAccessSubreddit(name, this.name, this.subreddits);
}
accessibleBots(bots: BotInstance[]): BotInstance[] {
@@ -28,12 +28,32 @@ class ClientUser extends CMUser<CMInstanceInterface, BotInstance, string> {
if (this.isInstanceOperator(x.instance)) {
return true;
}
return intersect(this.subreddits, x.subreddits.map(y => parseRedditEntity(y).name)).length > 0
return x.canUserAccessBot(this.name, this.subreddits);
//return intersect(this.subreddits, x.managers.map(y => parseRedditEntity(y).name)).length > 0
});
}
accessibleSubreddits(bot: BotInstance): string[] {
return this.isInstanceOperator(bot.instance) ? bot.subreddits.map(x => parseRedditEntity(x).name) : intersect(this.subreddits, bot.subreddits.map(x => parseRedditEntity(x).name));
return this.isInstanceOperator(bot.instance) ? bot.getSubreddits() : bot.getAccessibleSubreddits(this.name, this.subreddits);
}
isSubredditGuest(val: BotInstance, name: string): boolean {
const normalName = parseRedditEntity(name).name;
const manager = val.managers.find(x => x.subredditNormal === normalName);
if(manager !== undefined) {
return manager.guests.some(y => y.name.toLowerCase() === this.name.toLowerCase());
}
return false;
}
isSubredditMod(val: BotInstance, name: string): boolean {
const normalName = parseRedditEntity(name).name;
return this.canAccessSubreddit(val, name) && this.subreddits.map(x => parseRedditEntity(name).name).includes(normalName);
}
getModeratedSubreddits(val: BotInstance): string[] {
const normalSubs = this.subreddits.map(x => parseRedditEntity(x).name);
return val.managers.filter(x => normalSubs.includes(x.subredditNormal)).map(x => x.subredditNormal);
}
}

View File

@@ -1,9 +1,9 @@
import {BotInstance, CMInstanceInterface} from "../../interfaces";
import CMUser from "./CMUser";
import {intersect, parseRedditEntity} from "../../../util";
import {App} from "../../../App";
import Bot from "../../../Bot";
import {Manager} from "../../../Subreddit/Manager";
import {BotInstance, CMInstanceInterface} from "../interfaces";
class ServerUser extends CMUser<App, Bot, Manager> {
@@ -16,23 +16,49 @@ class ServerUser extends CMUser<App, Bot, Manager> {
}
canAccessInstance(val: App): boolean {
return this.isOperator || val.bots.filter(x => intersect(this.subreddits, x.subManagers.map(y => y.subreddit.display_name))).length > 0;
return this.isOperator || val.bots.filter(x => x.canUserAccessBot(this.name, this.subreddits)).length > 0;
}
canAccessBot(val: Bot): boolean {
return this.isOperator || intersect(this.subreddits, val.subManagers.map(y => y.subreddit.display_name)).length > 0;
return this.isOperator || val.canUserAccessBot(this.name, this.subreddits);
}
accessibleBots(bots: Bot[]): Bot[] {
return this.isOperator ? bots : bots.filter(x => intersect(this.subreddits, x.subManagers.map(y => y.subreddit.display_name)).length > 0);
return this.isOperator ? bots : bots.filter(x => x.canUserAccessBot(this.name, this.subreddits));
}
canAccessSubreddit(val: Bot, name: string): boolean {
return this.isOperator || this.subreddits.includes(parseRedditEntity(name).name) && val.subManagers.some(y => y.subreddit.display_name.toLowerCase() === parseRedditEntity(name).name.toLowerCase());
const normalName = parseRedditEntity(name).name;
return this.isOperator || this.accessibleSubreddits(val).some(x => x.toNormalizedManager().subredditNormal === normalName);
}
accessibleSubreddits(bot: Bot): Manager[] {
return this.isOperator ? bot.subManagers : bot.subManagers.filter(x => intersect(this.subreddits, [x.subreddit.display_name]).length > 0);
if(this.isOperator) {
return bot.subManagers;
}
const subs = bot.getAccessibleSubreddits(this.name, this.subreddits);
return bot.subManagers.filter(x => subs.includes(x.toNormalizedManager().subredditNormal));
}
isSubredditGuest(val: Bot, name: string): boolean {
const normalName = parseRedditEntity(name).name;
const manager = val.subManagers.find(x => parseRedditEntity(x.subreddit.display_name).name === normalName);
if(manager !== undefined) {
return manager.toNormalizedManager().guests.some(x => x.name === this.name);
}
return false;
}
isSubredditMod(val: Bot, name: string): boolean {
const normalName = parseRedditEntity(name).name;
return val.subManagers.some(x => parseRedditEntity(x.subreddit.display_name).name === normalName) && this.subreddits.map(x => parseRedditEntity(x).name).some(x => x === normalName);
}
getModeratedSubreddits(val: Bot): Manager[] {
const normalSubs = this.subreddits.map(x => parseRedditEntity(x).name);
return val.subManagers.filter(x => normalSubs.includes(x.subreddit.display_name));
}
}

View File

@@ -49,6 +49,7 @@ const sub: SubredditDataResponse = {
heartbeatHuman: "-",
indicator: "-",
logs: [],
guests: [],
maxWorkers: 0,
name: "-",
pollingInfo: [],

View File

@@ -1,6 +1,8 @@
import {RunningState} from "../../Subreddit/Manager";
import {LogInfo, ManagerStats} from "../../Common/interfaces";
import {BotInstance} from "../interfaces";
import {BotConnection, LogInfo, ManagerStats} from "../../Common/interfaces";
import {Guest, GuestAll} from "../../Common/Entities/Guest/GuestInterfaces";
import {URL} from "url";
import {Dayjs} from "dayjs";
export interface BotStats {
startedAtHuman: string,
@@ -50,6 +52,7 @@ export interface SubredditDataResponse {
heartbeatHuman?: string
heartbeat: number
retention: string
guests: (Guest | GuestAll)[]
}
export interface BotStatusResponse {
@@ -77,14 +80,65 @@ export interface IUser {
tokenExpiresAt?: number
}
export interface ManagerResponse {
name: string,
subreddit: string,
guests: Guest[]
}
export interface NormalizedManagerResponse extends ManagerResponse {
subredditNormal: string
}
export interface BotInstanceResponse {
botName: string
//botLink: string
error?: string
managers: ManagerResponse[]
nanny?: string
running: boolean
}
export interface BotInstanceFunctions {
getSubreddits: (normalized?: boolean) => string[]
getAccessibleSubreddits: (user: string, subreddits: string[]) => string[]
getManagerNames: () => string[]
getGuestManagers: (user: string) => NormalizedManagerResponse[]
getGuestSubreddits: (user: string) => string[]
canUserAccessBot: (user: string, subreddits: string[]) => boolean
canUserAccessSubreddit: (subreddit: string, user: string, subreddits: string[]) => boolean
}
export interface BotInstance extends BotInstanceResponse, BotInstanceFunctions {
managers: NormalizedManagerResponse[]
instance: CMInstanceInterface
}
export interface CMInstanceInterface extends BotConnection {
friendly?: string
operators: string[]
operatorDisplay: string
url: URL,
normalUrl: string,
lastCheck?: number
online: boolean
subreddits: string[]
bots: BotInstance[]
error?: string
ranMigrations: boolean
migrationBlocker?: string
invites: string[]
}
export interface HeartbeatResponse {
ranMigrations: boolean
migrationBlocker?: string
subreddits: string[]
operators: string[]
operatorDisplay?: string
friendly?: string
bots: BotInstance[]
bots: BotInstanceResponse[]
invites: string[]
}
@@ -97,4 +151,14 @@ export interface InviteData {
redirectUri: string
creator: string
overwrite?: boolean
initialConfig?: string
expiresAt?: number | Dayjs
guests?: string[]
}
export interface SubredditInviteData {
subreddit: string
guests?: string[]
initialConfig?: string
expiresAt?: number | Dayjs
}

View File

@@ -2,12 +2,26 @@ import {Request, Response, NextFunction} from "express";
import Bot from "../../Bot";
import ServerUser from "../Common/User/ServerUser";
export const authUserCheck = (userRequired: boolean = true) => async (req: Request, res: Response, next: Function) => {
export type AuthEntityType = 'user' | 'operator' | 'machine';
export const authUserCheck = (allowedEntityTypes: AuthEntityType | AuthEntityType[] = ['user']) => async (req: Request, res: Response, next: Function) => {
const types = Array.isArray(allowedEntityTypes) ? allowedEntityTypes : [allowedEntityTypes];
if (req.isAuthenticated()) {
if (userRequired && (req.user as ServerUser).machine) {
return res.status(403).send('Must be authenticated as a user to access this route');
if(types.length === 0) {
return next();
}
return next();
if(types.includes('machine') && (req.user as ServerUser).machine) {
return next();
}
if(types.includes('operator') && req.user.isInstanceOperator(req.botApp)) {
return next();
}
if(types.includes('user') && !(req.user as ServerUser).machine) {
return next();
}
req.logger.error(`User is authenticated but does not sufficient permissions. Required: ${types.join(', ')} | User: ${req.user.name}`);
return res.status(403).send('Must be authenticated to access this route');
} else {
return res.status(401).send('Must be authenticated to access this route');
}
@@ -38,7 +52,7 @@ export const botRoute = (required = true) => async (req: Request, res: Response,
return next();
}
export const subredditRoute = (required = true) => async (req: Request, res: Response, next: Function) => {
export const subredditRoute = (required = true, modRequired = false, guestRequired = false) => async (req: Request, res: Response, next: Function) => {
const bot = req.serverBot;
@@ -57,7 +71,7 @@ export const subredditRoute = (required = true) => async (req: Request, res: Res
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
}
if (!req.user?.canAccessSubreddit(bot, subreddit)) {
if (!req.user?.canAccessSubreddit(bot, subreddit) || (modRequired && !req.user?.isSubredditMod(bot, subreddit)) || (guestRequired && !req.user?.isSubredditGuest(bot, subreddit))) {
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
}

Some files were not shown because too many files have changed in this diff Show More