Compare commits

...

39 Commits

Author SHA1 Message Date
FoxxMD
aaed0d3419 Merge branch 'edge' 2022-01-21 10:46:11 -05:00
FoxxMD
2a77c71645 fix(usernotes): Fix wiki entity handling to avoid unhandled rejection
Since snoowrap's WikiPage isn't a "real" object setting it as a property on the class means if it rejects the whole application crashes. Fix this by building wiki proxy every time we need it before awaiting promise for edit/retrieval so that promise scope is bound to the function we are in (that has try-catch)
2022-01-20 14:10:39 -05:00
FoxxMD
780e5c185e refactor(author filter): Strongly structure comparison/matching data for more consistent manipulation and output
* Use interface for comparison results at both criteria property level and criteria level
* Implement summary functions to build string results of comparisons
* Output all comparisons to debug and provide summaries to verbose (when applicable)
2022-01-20 14:08:54 -05:00
FoxxMD
38e2a4e69a fix(filter): Missing return on flair failure comparison 2022-01-19 15:49:44 -05:00
FoxxMD
7e0c34b6a3 fix(userflair): Fix wrong assignment for css 2022-01-19 13:10:11 -05:00
FoxxMD
e3ceb90d6f fix(filter): Fix default excludeCondition type
* Expected (prior) behavior is that all exclude criteria must pass, not just one
* Fix missing AND condition logic when all conditions pass
2022-01-19 13:09:45 -05:00
FoxxMD
6977e3bcdf feat(author): Add flair template criteria for author/submission
* Add filtering by flairTemplate id for author/submission
* Refactor flair properties for author/submission to accept string or array of strings
2022-01-19 12:48:58 -05:00
FoxxMD
f382cddc2a fix(filter): Change array merging behavior for authorIs defaults to be more sane
* Don't just overwrite (duh)
* Drop any default filters that include object keys that are also present in user-defined filters -- this way user-defined always takes precedence on merge
2022-01-19 11:52:18 -05:00
FoxxMD
99a5642bdf fix(ui): Change time formatting from 12 to 24 hour 2022-01-18 16:49:07 -05:00
FoxxMD
174d832ab0 docs: Pretty up readme header 2022-01-18 16:08:40 -05:00
FoxxMD
3ee7586fe2 fix(approve): Fix touched entity 2022-01-18 13:37:56 -05:00
FoxxMD
e2c724b4ae feat(approve): Implement approving parent submission of comment 2022-01-18 13:37:22 -05:00
FoxxMD
d581f19a36 feat(logs): Use log objects in api to improve parsing client-side
* Add options for /logs endpoint to stream objects instead of strings
* Always return log objects from /status endpoint -- fixes bug where all bots/subreddits got lines from logs that had newlines
* Return context-aware, formatted log lines to client to reduce line length IE if returning to botA -> subA then do not need to include labels for botA,subA #40
* Shorten timestamp to just time and wrap full timestamp in tooltip #40
* Emit log objects to client to reduce parsing complexity (don't have to regex for bot/subreddit name)
2022-01-18 12:59:59 -05:00
FoxxMD
48dea24bea feat: Improve first-run display in ui and add system view
* Fix bugs in UI when bot does not have a name (configured incorrectly)
* Implement instance system log view for operators
2022-01-18 10:38:39 -05:00
FoxxMD
5fc2a693a0 fix(config): Fix empty yaml config document initialization 2022-01-18 00:06:52 -05:00
FoxxMD
7be0722140 fix(bot): Fix limit rate expiration getter when there is no client initialized 2022-01-18 00:06:24 -05:00
FoxxMD
6ab9fe4bf4 feat(config): Implement persisting bots from invite process to application and config
* write to config when bot is added
* replace/add based on existing bot
* implement specify instance from instances user is operator of
* implement specify subreddits to run on using comma-separated list
* rewrite invite flow ending to be more clear on results and next steps
2022-01-17 17:47:27 -05:00
FoxxMD
5811af0342 feat(config): Refactor config parsing to preserve comments and enable writing
* use node-comment and yaml@next to keep comment information intact
* store ast/source version of parsed config for operator
* implement generic yaml/json operator config classes to keep everything organized and simplify marshalling source to js/string
* refactor file parsing and json/yaml parsing to have better single responsibility
2022-01-17 15:51:43 -05:00
FoxxMD
ed2924264a feat(util): Better check for file/dir permissions 2022-01-17 11:18:23 -05:00
FoxxMD
e9394ccf2e refactor(tooling): Ignore sqlite files 2022-01-17 09:52:18 -05:00
FoxxMD
dec72f95c6 docs: Add discord invite link 2022-01-14 16:42:01 -05:00
FoxxMD
bc7eff8928 Merge branch 'edge' 2022-01-14 15:27:09 -05:00
FoxxMD
80c11b2c7f refactor(filter): Consolidate authorIs logic and add additional control to exclude logic
* Add excludeCondition to control how exclude sets are tested (and/or)
* Refactor authorIs logic from check/rule/action into standalone function (DRY)
* Simplify filter defaults -- don't need to specify automoderator since it is always a mod
2022-01-14 10:51:29 -05:00
FoxxMD
e6a2a86828 feat(config): Implement default filter criteria behavior
* Add default behavior config to operator and manager config
* Implement configurable behavior when filter is present on check
* Add defaults to exclude mods and automoderator from checks
2022-01-13 16:46:32 -05:00
FoxxMD
96749be571 refactor(polling): Simplify and cleanup all polling logic
* Remove unused clearProcessing code
* Use same data structures (Map) for storing polling objects in both Manager and Bot to reduce cognitive load and re-use some logic
* Rename "mod" streams to "shared" streams
* Implement detection and updating of polling when manager config changes
* Implement detection and updating of shared streams on manager config update
* Use shared retry handler for manager polling to better handle general reddit api issues (all polling stops faster)
* Move initial polling buffer into polling object (instead of in manager) for better logic encapsulation and add debug logging for it
* Add more debug logging for manager/bot poll building
2022-01-13 11:39:16 -05:00
FoxxMD
6b7e8e7749 feat(polling): Implement shared streams for all polling sources
* Refactor polling config to use new 'shared' string list of polling sources and deprecate 'sharedMod' property
* Refactor how shared sources are built to look for shared intention in manager polling options before creating
* Implement continuity check for comment/submission polling to ensure no activities are missed
* Add debug logging to polling
2022-01-12 15:47:43 -05:00
FoxxMD
43b29432a2 refactor(auth): Refactor auth data structures to consolidate logic
* Add abstract user class with auth methods with implementations for client/server
* Refactor client/server logic to use class methods instead of inline auth checks

Closes #71
2022-01-12 09:57:38 -05:00
FoxxMD
ff84946068 feat(regex): Experimental support for parsing regex expressions from fetched URL
* Support fetching from reddit wiki
* Support fetching from raw URL
* Support parsing and fetching from gist, github blob, and regexr (very experimental)
2022-01-11 14:05:57 -05:00
FoxxMD
7cdde99864 fix(recent): Potential fix for reddit ACID issues on history retrieval 2022-01-11 13:00:51 -05:00
FoxxMD
8eee1fe2e1 fix(recent): Remove code that should have been deleted during refactor
Refactored recent to use batch subreddit testing but forgot to remove old, individual subreddit testing, code so activities were being counted twice
2022-01-11 10:15:16 -05:00
FoxxMD
6fc09864f6 fix: Don't delete property from object
Object passed by ref, duh
2022-01-11 10:13:48 -05:00
FoxxMD
1510980ce3 fix(util): Ensure provided state description is reattached to strong sub state 2022-01-11 10:13:14 -05:00
FoxxMD
56005f0f28 fix(bot): Fix own profile detection when building managers 2022-01-11 09:52:44 -05:00
FoxxMD
03b655515c fix(server): Fix logs not persisting for managers
* Change manager acquisition so all managers belong to a bot before they start logging so all logs are captured correctly
* Fix log capture logic that prevented all subreddits from being populated
2022-01-11 09:45:25 -05:00
FoxxMD
edd874f356 fix(server): Correctly filter bots and managers on auth on server 2022-01-11 09:15:52 -05:00
FoxxMD
7f13debe3b fix(client): Make sure all moderated subreddits are fetched 2022-01-10 16:17:24 -05:00
Matt Foxx
1565bdbf1a Merge pull request #67 from rysie/feature/dry-run-buttons
Run/Dry run buttons
2022-01-10 14:54:42 -05:00
FoxxMD
ec4cee8c77 refactor(ui): Fix and simplify button logic
* Fix url query selector to constrain to sub
* Use shared class between run buttons to simplify class modification and click event
2022-01-10 14:54:17 -05:00
Marcin Macinski
c5b27628b0 feat(ui): Run/Dry run buttons 2022-01-07 23:32:12 +01:00
58 changed files with 3435 additions and 1171 deletions

9
.gitignore vendored
View File

@@ -383,3 +383,12 @@ dist
**/src/**/*.js
!src/Web/assets/public/yaml/*
**/src/**/*.map
/**/*.sqlite
/**/*.bak
*.yaml
*.json5
!src/Schema/*.json
!docs/**/*.json5
!docs/**/*.yaml
!docs/**/*.json

View File

@@ -1,6 +1,7 @@
[![Latest Release](https://img.shields.io/github/v/release/foxxmd/context-mod)](https://github.com/FoxxMD/context-mod/releases)
[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
[![Docker Pulls](https://img.shields.io/docker/pulls/foxxmd/context-mod)](https://hub.docker.com/r/foxxmd/context-mod)
# ContextMod [![Latest Release](https://img.shields.io/github/v/release/foxxmd/context-mod)](https://github.com/FoxxMD/context-mod/releases) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![Docker Pulls](https://img.shields.io/docker/pulls/foxxmd/context-mod)](https://hub.docker.com/r/foxxmd/context-mod)
<img src="/docs/logo.png" align="right"
alt="ContextMod logo" width="180" height="176">
**Context Mod** (CM) is an event-based, [reddit](https://reddit.com) moderation bot built on top of [snoowrap](https://github.com/not-an-aardvark/snoowrap) and written in [typescript](https://www.typescriptlang.org/).

BIN
docs/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

202
package-lock.json generated
View File

@@ -20,6 +20,7 @@
"cache-manager": "^3.4.4",
"cache-manager-redis-store": "^2.0.0",
"commander": "^8.0.0",
"comment-json": "^4.1.1",
"cookie-parser": "^1.3.5",
"dayjs": "^1.10.5",
"deepmerge": "^4.2.2",
@@ -37,7 +38,6 @@
"he": "^1.2.0",
"http-proxy": "^1.18.1",
"image-size": "^1.0.0",
"js-yaml": "^4.1.0",
"json5": "^2.2.0",
"jsonwebtoken": "^8.5.1",
"leven": "^3.1.0",
@@ -67,6 +67,7 @@
"winston-daily-rotate-file": "^4.5.5",
"winston-duplex": "^0.1.1",
"winston-transport": "^4.4.0",
"yaml": "2.0.0-10",
"zlib": "^1.0.5"
},
"devDependencies": {
@@ -96,6 +97,7 @@
"@types/string-similarity": "^4.0.0",
"@types/tcp-port-used": "^1.0.0",
"@types/triple-beam": "^1.3.2",
"ts-essentials": "^9.1.2",
"ts-json-schema-generator": "^0.93.0",
"typescript-json-schema": "^0.50.1"
},
@@ -758,16 +760,16 @@
"integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
"dev": true
},
"node_modules/argparse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="
},
"node_modules/array-flatten": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
"integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI="
},
"node_modules/array-timsort": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/array-timsort/-/array-timsort-1.0.3.tgz",
"integrity": "sha512-/+3GRL7dDAGEfM6TseQk/U+mi18TU2Ms9I3UlLdUMhz2hbvGNTKdj9xniwXfUqgYhHxRx0+8UnKkvlNwVU+cWQ=="
},
"node_modules/arrify": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
@@ -1235,6 +1237,21 @@
"node": ">= 12"
}
},
"node_modules/comment-json": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/comment-json/-/comment-json-4.1.1.tgz",
"integrity": "sha512-v8gmtPvxhBlhdRBLwdHSjGy9BgA23t9H1FctdQKyUrErPjSrJcdDMqBq9B4Irtm7w3TNYLQJNH6ARKnpyag1sA==",
"dependencies": {
"array-timsort": "^1.0.3",
"core-util-is": "^1.0.2",
"esprima": "^4.0.1",
"has-own-prop": "^2.0.0",
"repeat-string": "^1.6.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/component-emitter": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz",
@@ -1298,8 +1315,7 @@
"node_modules/core-util-is": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz",
"integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==",
"optional": true
"integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="
},
"node_modules/cors": {
"version": "2.8.5",
@@ -1547,9 +1563,9 @@
}
},
"node_modules/engine.io": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.0.tgz",
"integrity": "sha512-ErhZOVu2xweCjEfYcTdkCnEYUiZgkAcBBAhW4jbIvNG8SLU3orAqoJCiytZjYF7eTpVmmCrLDjLIEaPlUAs1uw==",
"version": "6.1.1",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.1.tgz",
"integrity": "sha512-AyMc20q8JUUdvKd46+thc9o7yCZ6iC6MoBCChG5Z1XmFMpp+2+y/oKvwpZTUJB0KCjxScw1dV9c2h5pjiYBLuQ==",
"dependencies": {
"@types/cookie": "^0.4.1",
"@types/cors": "^2.8.12",
@@ -1668,6 +1684,18 @@
"node": ">=0.8.0"
}
},
"node_modules/esprima": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
"integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
"bin": {
"esparse": "bin/esparse.js",
"esvalidate": "bin/esvalidate.js"
},
"engines": {
"node": ">=4"
}
},
"node_modules/etag": {
"version": "1.8.1",
"resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
@@ -1888,9 +1916,9 @@
"integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw=="
},
"node_modules/follow-redirects": {
"version": "1.14.6",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.6.tgz",
"integrity": "sha512-fhUl5EwSJbbl8AR+uYL2KQDxLkdSjZGR36xy46AO7cOMTrCMON6Sa28FmAnC2tRTDbd/Uuzz3aJBv7EBN7JH8A==",
"version": "1.14.7",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.7.tgz",
"integrity": "sha512-+hbxoLbFMbRKDwohX8GkTataGqO6Jb7jGwpAlwgy2bIz25XtRm7KEzJM76R1WiNT5SwZkX4Y75SwBolkpmE7iQ==",
"funding": [
{
"type": "individual",
@@ -2084,11 +2112,11 @@
}
},
"node_modules/google-p12-pem": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.1.2.tgz",
"integrity": "sha512-tjf3IQIt7tWCDsa0ofDQ1qqSCNzahXDxdAGJDbruWqu3eCg5CKLYKN+hi0s6lfvzYZ1GDVr+oDF9OOWlDSdf0A==",
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.1.3.tgz",
"integrity": "sha512-MC0jISvzymxePDVembypNefkAQp+DRP7dBE+zNUPaIjEspIlYg0++OrsNr248V9tPbz6iqtZ7rX1hxWA5B8qBQ==",
"dependencies": {
"node-forge": "^0.10.0"
"node-forge": "^1.0.0"
},
"bin": {
"gp12-pem": "build/src/bin/gp12-pem.js"
@@ -2204,6 +2232,14 @@
"node": ">=4"
}
},
"node_modules/has-own-prop": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/has-own-prop/-/has-own-prop-2.0.0.tgz",
"integrity": "sha512-Pq0h+hvsVm6dDEa8x82GnLSYHOzNDt7f0ddFa3FqcQlgzEiptPqL+XrOJNavjOzSYiYWIrgeVYYgGlLmnxwilQ==",
"engines": {
"node": ">=8"
}
},
"node_modules/has-unicode": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
@@ -2488,17 +2524,6 @@
"resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz",
"integrity": "sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0="
},
"node_modules/js-yaml": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
"dependencies": {
"argparse": "^2.0.1"
},
"bin": {
"js-yaml": "bin/js-yaml.js"
}
},
"node_modules/jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
@@ -2911,11 +2936,11 @@
}
},
"node_modules/node-forge": {
"version": "0.10.0",
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz",
"integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==",
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.2.1.tgz",
"integrity": "sha512-Fcvtbb+zBcZXbTTVwqGA5W+MKBj56UjVRevvchv5XrcyXbmNdesfZL37nlcWOfpgHhgmxApw3tQbTr4CqNmX4w==",
"engines": {
"node": ">= 6.0.0"
"node": ">= 6.13.0"
}
},
"node_modules/normalize-url": {
@@ -3381,6 +3406,14 @@
"node": ">=4"
}
},
"node_modules/repeat-string": {
"version": "1.6.1",
"resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz",
"integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=",
"engines": {
"node": ">=0.10"
}
},
"node_modules/request": {
"version": "2.88.2",
"resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
@@ -4083,6 +4116,15 @@
"resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.3.0.tgz",
"integrity": "sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw=="
},
"node_modules/ts-essentials": {
"version": "9.1.2",
"resolved": "https://registry.npmjs.org/ts-essentials/-/ts-essentials-9.1.2.tgz",
"integrity": "sha512-EaSmXsAhEiirrTY1Oaa7TSpei9dzuCuFPmjKRJRPamERYtfaGS8/KpOSbjergLz/Y76/aZlV9i/krgzsuWEBbg==",
"dev": true,
"peerDependencies": {
"typescript": ">=4.1.0"
}
},
"node_modules/ts-json-schema-generator": {
"version": "0.93.0",
"resolved": "https://registry.npmjs.org/ts-json-schema-generator/-/ts-json-schema-generator-0.93.0.tgz",
@@ -4580,6 +4622,14 @@
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"node_modules/yaml": {
"version": "2.0.0-10",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.0.0-10.tgz",
"integrity": "sha512-FHV8s5ODFFQXX/enJEU2EkanNl1UDBUz8oa4k5Qo/sR+Iq7VmhCDkRMb0/mjJCNeAWQ31W8WV6PYStDE4d9EIw==",
"engines": {
"node": ">= 12"
}
},
"node_modules/yargs": {
"version": "16.2.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
@@ -5198,16 +5248,16 @@
"integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
"dev": true
},
"argparse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="
},
"array-flatten": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
"integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI="
},
"array-timsort": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/array-timsort/-/array-timsort-1.0.3.tgz",
"integrity": "sha512-/+3GRL7dDAGEfM6TseQk/U+mi18TU2Ms9I3UlLdUMhz2hbvGNTKdj9xniwXfUqgYhHxRx0+8UnKkvlNwVU+cWQ=="
},
"arrify": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
@@ -5588,6 +5638,18 @@
"resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz",
"integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww=="
},
"comment-json": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/comment-json/-/comment-json-4.1.1.tgz",
"integrity": "sha512-v8gmtPvxhBlhdRBLwdHSjGy9BgA23t9H1FctdQKyUrErPjSrJcdDMqBq9B4Irtm7w3TNYLQJNH6ARKnpyag1sA==",
"requires": {
"array-timsort": "^1.0.3",
"core-util-is": "^1.0.2",
"esprima": "^4.0.1",
"has-own-prop": "^2.0.0",
"repeat-string": "^1.6.1"
}
},
"component-emitter": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz",
@@ -5639,8 +5701,7 @@
"core-util-is": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz",
"integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==",
"optional": true
"integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="
},
"cors": {
"version": "2.8.5",
@@ -5830,9 +5891,9 @@
}
},
"engine.io": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.0.tgz",
"integrity": "sha512-ErhZOVu2xweCjEfYcTdkCnEYUiZgkAcBBAhW4jbIvNG8SLU3orAqoJCiytZjYF7eTpVmmCrLDjLIEaPlUAs1uw==",
"version": "6.1.1",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.1.1.tgz",
"integrity": "sha512-AyMc20q8JUUdvKd46+thc9o7yCZ6iC6MoBCChG5Z1XmFMpp+2+y/oKvwpZTUJB0KCjxScw1dV9c2h5pjiYBLuQ==",
"requires": {
"@types/cookie": "^0.4.1",
"@types/cors": "^2.8.12",
@@ -5912,6 +5973,11 @@
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
"integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ="
},
"esprima": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
"integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A=="
},
"etag": {
"version": "1.8.1",
"resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
@@ -6107,9 +6173,9 @@
"integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw=="
},
"follow-redirects": {
"version": "1.14.6",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.6.tgz",
"integrity": "sha512-fhUl5EwSJbbl8AR+uYL2KQDxLkdSjZGR36xy46AO7cOMTrCMON6Sa28FmAnC2tRTDbd/Uuzz3aJBv7EBN7JH8A=="
"version": "1.14.7",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.7.tgz",
"integrity": "sha512-+hbxoLbFMbRKDwohX8GkTataGqO6Jb7jGwpAlwgy2bIz25XtRm7KEzJM76R1WiNT5SwZkX4Y75SwBolkpmE7iQ=="
},
"forever-agent": {
"version": "0.6.1",
@@ -6250,11 +6316,11 @@
}
},
"google-p12-pem": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.1.2.tgz",
"integrity": "sha512-tjf3IQIt7tWCDsa0ofDQ1qqSCNzahXDxdAGJDbruWqu3eCg5CKLYKN+hi0s6lfvzYZ1GDVr+oDF9OOWlDSdf0A==",
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.1.3.tgz",
"integrity": "sha512-MC0jISvzymxePDVembypNefkAQp+DRP7dBE+zNUPaIjEspIlYg0++OrsNr248V9tPbz6iqtZ7rX1hxWA5B8qBQ==",
"requires": {
"node-forge": "^0.10.0"
"node-forge": "^1.0.0"
}
},
"googleapis-common": {
@@ -6340,6 +6406,11 @@
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
"integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0="
},
"has-own-prop": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/has-own-prop/-/has-own-prop-2.0.0.tgz",
"integrity": "sha512-Pq0h+hvsVm6dDEa8x82GnLSYHOzNDt7f0ddFa3FqcQlgzEiptPqL+XrOJNavjOzSYiYWIrgeVYYgGlLmnxwilQ=="
},
"has-unicode": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
@@ -6551,14 +6622,6 @@
}
}
},
"js-yaml": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
"requires": {
"argparse": "^2.0.1"
}
},
"jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
@@ -6907,9 +6970,9 @@
}
},
"node-forge": {
"version": "0.10.0",
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz",
"integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA=="
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.2.1.tgz",
"integrity": "sha512-Fcvtbb+zBcZXbTTVwqGA5W+MKBj56UjVRevvchv5XrcyXbmNdesfZL37nlcWOfpgHhgmxApw3tQbTr4CqNmX4w=="
},
"normalize-url": {
"version": "6.1.0",
@@ -7258,6 +7321,11 @@
"redis-errors": "^1.0.0"
}
},
"repeat-string": {
"version": "1.6.1",
"resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz",
"integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc="
},
"request": {
"version": "2.88.2",
"resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
@@ -7791,6 +7859,13 @@
"resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.3.0.tgz",
"integrity": "sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw=="
},
"ts-essentials": {
"version": "9.1.2",
"resolved": "https://registry.npmjs.org/ts-essentials/-/ts-essentials-9.1.2.tgz",
"integrity": "sha512-EaSmXsAhEiirrTY1Oaa7TSpei9dzuCuFPmjKRJRPamERYtfaGS8/KpOSbjergLz/Y76/aZlV9i/krgzsuWEBbg==",
"dev": true,
"requires": {}
},
"ts-json-schema-generator": {
"version": "0.93.0",
"resolved": "https://registry.npmjs.org/ts-json-schema-generator/-/ts-json-schema-generator-0.93.0.tgz",
@@ -8175,6 +8250,11 @@
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"yaml": {
"version": "2.0.0-10",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.0.0-10.tgz",
"integrity": "sha512-FHV8s5ODFFQXX/enJEU2EkanNl1UDBUz8oa4k5Qo/sR+Iq7VmhCDkRMb0/mjJCNeAWQ31W8WV6PYStDE4d9EIw=="
},
"yargs": {
"version": "16.2.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",

View File

@@ -35,6 +35,7 @@
"cache-manager": "^3.4.4",
"cache-manager-redis-store": "^2.0.0",
"commander": "^8.0.0",
"comment-json": "^4.1.1",
"cookie-parser": "^1.3.5",
"dayjs": "^1.10.5",
"deepmerge": "^4.2.2",
@@ -52,7 +53,6 @@
"he": "^1.2.0",
"http-proxy": "^1.18.1",
"image-size": "^1.0.0",
"js-yaml": "^4.1.0",
"json5": "^2.2.0",
"jsonwebtoken": "^8.5.1",
"leven": "^3.1.0",
@@ -82,6 +82,7 @@
"winston-daily-rotate-file": "^4.5.5",
"winston-duplex": "^0.1.1",
"winston-transport": "^4.4.0",
"yaml": "2.0.0-10",
"zlib": "^1.0.5"
},
"devDependencies": {
@@ -111,6 +112,7 @@
"@types/string-similarity": "^4.0.0",
"@types/tcp-port-used": "^1.0.0",
"@types/triple-beam": "^1.3.2",
"ts-essentials": "^9.1.2",
"ts-json-schema-generator": "^0.93.0",
"typescript-json-schema": "^0.50.1"
},

View File

@@ -1,31 +1,62 @@
import {ActionJson, ActionConfig} from "./index";
import {ActionJson, ActionConfig, ActionOptions} from "./index";
import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import Snoowrap from "snoowrap";
import {RuleResult} from "../Rule";
import {ActionProcessResult} from "../Common/interfaces";
import Submission from "snoowrap/dist/objects/Submission";
import Comment from "snoowrap/dist/objects/Comment";
export class ApproveAction extends Action {
targets: ApproveTarget[]
getKind() {
return 'Approve';
}
constructor(options: ApproveOptions) {
super(options);
const {
targets = ['self']
} = options;
this.targets = targets;
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
const touchedEntities = [];
//snoowrap typing issue, thinks comments can't be locked
// @ts-ignore
if (item.approved) {
this.logger.warn('Item is already approved');
return {
dryRun,
success: false,
result: 'Item is already approved'
const realTargets = item instanceof Submission ? ['self'] : this.targets;
for(const target of realTargets) {
let targetItem = item;
if(target !== 'self' && item instanceof Comment) {
targetItem = await this.resources.getActivity(this.client.getSubmission(item.link_id));
}
// @ts-ignore
if (item.approved) {
const msg = `${target === 'self' ? 'Item' : 'Comment\'s parent Submission'} is already approved`;
this.logger.warn(msg);
return {
dryRun,
success: false,
result: msg
}
}
if (!dryRun) {
// make sure we have an actual item and not just a plain object from cache
if(target !== 'self' && !(targetItem instanceof Submission)) {
// @ts-ignore
targetItem = await this.client.getSubmission((item as Comment).link_id).fetch();
}
// @ts-ignore
touchedEntities.push(await targetItem.approve());
}
}
if (!dryRun) {
// @ts-ignore
touchedEntities.push(await item.approve());
}
return {
dryRun,
success: true,
@@ -34,8 +65,20 @@ export class ApproveAction extends Action {
}
}
export interface ApproveActionConfig extends ActionConfig {
export type ApproveTarget = 'self' | 'parent';
export interface ApproveOptions extends ApproveActionConfig, ActionOptions {}
export interface ApproveActionConfig extends ActionConfig {
/**
* Specify which Activities to approve
*
* This setting is only applicable if the Activity being acted on is a **comment**. On a **submission** the setting does nothing
*
* * self => approve activity being checked (comment)
* * parent => approve parent (submission) of activity being checked (comment)
* */
targets?: ApproveTarget[]
}
/**

View File

@@ -12,7 +12,7 @@ export class UserFlairAction extends Action {
super(options);
this.text = options.text === null || options.text === '' ? undefined : options.text;
this.css = options.css === null || options.text === '' ? undefined : options.text;
this.css = options.css === null || options.css === '' ? undefined : options.css;
this.flair_template_id = options.flair_template_id === null || options.flair_template_id === '' ? undefined : options.flair_template_id;
}

View File

@@ -1,7 +1,7 @@
import {Comment, Submission} from "snoowrap";
import {Logger} from "winston";
import {RuleResult} from "../Rule";
import {SubredditResources} from "../Subreddit/SubredditResources";
import {checkAuthorFilter, SubredditResources} from "../Subreddit/SubredditResources";
import {ActionProcessResult, ActionResult, ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
import Author, {AuthorOptions} from "../Author/Author";
import {mergeArr} from "../util";
@@ -28,6 +28,7 @@ export abstract class Action {
subredditName,
dryRun = false,
authorIs: {
excludeCondition = 'OR',
include = [],
exclude = [],
} = {},
@@ -42,6 +43,7 @@ export abstract class Action {
this.logger = logger.child({labels: [`Action ${this.getActionUniqueName()}`]}, mergeArr);
this.authorIs = {
excludeCondition,
exclude: exclude.map(x => new Author(x)),
include: include.map(x => new Author(x)),
}
@@ -72,27 +74,10 @@ export abstract class Action {
actRes.runReason = `Activity did not pass 'itemIs' test, Action not run`;
return actRes;
}
if (this.authorIs.include !== undefined && this.authorIs.include.length > 0) {
for (const auth of this.authorIs.include) {
if (await this.resources.testAuthorCriteria(item, auth)) {
actRes.run = true;
const results = await this.process(item, ruleResults, runtimeDryrun);
return {...actRes, ...results};
}
}
this.logger.verbose('Inclusive author criteria not matched, Action not run');
actRes.runReason = 'Inclusive author criteria not matched';
return actRes;
} else if (this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
for (const auth of this.authorIs.exclude) {
if (await this.resources.testAuthorCriteria(item, auth, false)) {
actRes.run = true;
const results = await this.process(item, ruleResults, runtimeDryrun);
return {...actRes, ...results};
}
}
this.logger.verbose('Exclusive author criteria not matched, Action not run');
actRes.runReason = 'Exclusive author criteria not matched';
const [authFilterResult, authFilterType] = await checkAuthorFilter(item, this.authorIs, this.resources, this.logger);
if(!authFilterResult) {
this.logger.verbose(`${authFilterType} author criteria not matched, Action not run`);
actRes.runReason = `${authFilterType} author criteria not matched`;
return actRes;
}

View File

@@ -1,7 +1,7 @@
import winston, {Logger} from "winston";
import dayjs, {Dayjs} from "dayjs";
import {getLogger} from "./Utils/loggerFactory";
import {Invokee, OperatorConfig} from "./Common/interfaces";
import {Invokee, OperatorConfig, OperatorConfigWithFileContext, OperatorFileConfig} from "./Common/interfaces";
import Bot from "./Bot";
import LoggedError from "./Utils/LoggedError";
import {sleep} from "./util";
@@ -14,7 +14,10 @@ export class App {
error: any;
constructor(config: OperatorConfig) {
config: OperatorConfig;
fileConfig: OperatorFileConfig;
constructor(config: OperatorConfigWithFileContext) {
const {
operator: {
name,
@@ -23,6 +26,11 @@ export class App {
bots = [],
} = config;
const {fileConfig, ...rest} = config;
this.config = rest;
this.fileConfig = fileConfig;
this.logger = getLogger(config.logging);
this.logger.info(`Operators: ${name.length === 0 ? 'None Specified' : name.join(', ')}`)

View File

@@ -1,5 +1,5 @@
import {UserNoteCriteria} from "../Rule";
import {CompareValue, CompareValueOrPercent, DurationComparor} from "../Common/interfaces";
import {CompareValue, CompareValueOrPercent, DurationComparor, JoinOperands} from "../Common/interfaces";
import {parseStringToRegex} from "../util";
/**
@@ -12,7 +12,17 @@ export interface AuthorOptions {
* */
include?: AuthorCriteria[];
/**
* Only runs if `include` is not present. Will "pass" if any of set of the AuthorCriteria **does not** pass
* * OR => if ANY exclude condition "does not" pass then the exclude test passes
* * AND => if ALL exclude conditions "do not" pass then the exclude test passes
*
* Defaults to OR
* @default OR
* */
excludeCondition?: JoinOperands
/**
* Only runs if `include` is not present. Each AuthorCriteria is comprised of conditions that the Author being checked must "not" pass. See excludeCondition for set behavior
*
* EX: `isMod: true, name: Automoderator` => Will pass if the Author IS NOT a mod and IS NOT named Automoderator
* */
exclude?: AuthorCriteria[];
}
@@ -36,15 +46,20 @@ export interface AuthorCriteria {
* */
name?: string[],
/**
* A list of (user) flair css class values from the subreddit to match against
* A (user) flair css class (or list of) from the subreddit to match against
* @examples ["red"]
* */
flairCssClass?: string[],
flairCssClass?: string | string[],
/**
* A list of (user) flair text values from the subreddit to match against
* A (user) flair text value (or list of) from the subreddit to match against
* @examples ["Approved"]
* */
flairText?: string[],
flairText?: string | string[],
/**
* A (user) flair template id (or list of) from the subreddit to match against
* */
flairTemplate?: string | string[]
/**
* Is the author a moderator?
* */
@@ -136,8 +151,12 @@ export class Author implements AuthorCriteria {
constructor(options: AuthorCriteria) {
this.name = options.name;
this.flairCssClass = options.flairCssClass;
this.flairText = options.flairText;
if(options.flairCssClass !== undefined) {
this.flairCssClass = typeof options.flairCssClass === 'string' ? [options.flairCssClass] : options.flairCssClass;
}
if(options.flairText !== undefined) {
this.flairText = typeof options.flairText === 'string' ? [options.flairText] : options.flairText;
}
this.isMod = options.isMod;
this.userNotes = options.userNotes;
this.age = options.age;

View File

@@ -3,20 +3,30 @@ import {Logger} from "winston";
import dayjs, {Dayjs} from "dayjs";
import {Duration} from "dayjs/plugin/duration";
import EventEmitter from "events";
import {BotInstanceConfig, Invokee, PAUSED, RUNNING, STOPPED, SYSTEM, USER} from "../Common/interfaces";
import {
BotInstanceConfig,
FilterCriteriaDefaults,
Invokee,
PAUSED,
PollOn,
RUNNING,
STOPPED,
SYSTEM,
USER
} from "../Common/interfaces";
import {
createRetryHandler,
formatNumber,
mergeArr,
parseBool,
parseDuration,
parseSubredditName,
parseSubredditName, RetryOptions,
sleep,
snooLogWrapper
} from "../util";
import {Manager} from "../Subreddit/Manager";
import {ExtendedSnoowrap, ProxiedSnoowrap} from "../Utils/SnoowrapClients";
import {ModQueueStream, UnmoderatedStream} from "../Subreddit/Streams";
import {CommentStream, ModQueueStream, SPoll, SubmissionStream, UnmoderatedStream} from "../Subreddit/Streams";
import {BotResourcesManager} from "../Subreddit/SubredditResources";
import LoggedError from "../Utils/LoggedError";
import pEvent from "p-event";
@@ -33,6 +43,7 @@ class Bot {
running: boolean = false;
subreddits: string[];
excludeSubreddits: string[];
filterCriteriaDefaults?: FilterCriteriaDefaults
subManagers: Manager[] = [];
heartbeatInterval: number;
nextHeartbeat: Dayjs = dayjs();
@@ -43,6 +54,7 @@ class Bot {
nannyMode?: 'soft' | 'hard';
nannyRunning: boolean = false;
nextNannyCheck: Dayjs = dayjs().add(10, 'second');
sharedStreamRetryHandler: Function;
nannyRetryHandler: Function;
managerRetryHandler: Function;
nextExpiration: Dayjs = dayjs();
@@ -51,7 +63,7 @@ class Bot {
botAccount?: string;
maxWorkers: number;
startedAt: Dayjs = dayjs();
sharedModqueue: boolean = false;
sharedStreams: PollOn[] = [];
streamListedOnce: string[] = [];
stagger: number;
@@ -78,6 +90,7 @@ class Bot {
const {
notifications,
name,
filterCriteriaDefaults,
subreddits: {
names = [],
exclude = [],
@@ -98,7 +111,7 @@ class Bot {
debug,
},
polling: {
sharedMod,
shared = [],
stagger = 2000,
},
queue: {
@@ -123,7 +136,8 @@ class Bot {
this.hardLimit = hardLimit;
this.wikiLocation = wikiConfig;
this.heartbeatInterval = heartbeatInterval;
this.sharedModqueue = sharedMod;
this.filterCriteriaDefaults = filterCriteriaDefaults;
this.sharedStreams = shared;
if(name !== undefined) {
this.botName = name;
}
@@ -190,56 +204,12 @@ class Bot {
}
}
const retryHandler = createRetryHandler({maxRequestRetry: 8, maxOtherRetry: 2}, this.logger);
this.sharedStreamRetryHandler = createRetryHandler({maxRequestRetry: 8, maxOtherRetry: 2}, this.logger);
this.nannyRetryHandler = createRetryHandler({maxRequestRetry: 5, maxOtherRetry: 1}, this.logger);
this.managerRetryHandler = createRetryHandler({maxRequestRetry: 8, maxOtherRetry: 8, waitOnRetry: false, clearRetryCountAfter: 2}, this.logger);
this.stagger = stagger ?? 2000;
const modStreamErrorListener = (name: string) => async (err: any) => {
this.logger.error(`Polling error occurred on stream ${name.toUpperCase()}`, err);
const shouldRetry = await retryHandler(err);
if(shouldRetry) {
defaultUnmoderatedStream.startInterval();
} else {
for(const m of this.subManagers) {
if(m.modStreamCallbacks.size > 0) {
m.notificationManager.handle('runStateChanged', `${name.toUpperCase()} Polling Stopped`, 'Encountered too many errors from Reddit while polling. Will try to restart on next heartbeat.');
}
}
this.logger.error(`Mod stream ${name.toUpperCase()} encountered too many errors while polling. Will try to restart on next heartbeat.`);
}
}
const modStreamListingListener = (name: string) => async (listing: (Comment|Submission)[]) => {
// dole out in order they were received
if(!this.streamListedOnce.includes(name)) {
this.streamListedOnce.push(name);
return;
}
for(const i of listing) {
const foundManager = this.subManagers.find(x => x.subreddit.display_name === i.subreddit.display_name && x.modStreamCallbacks.get(name) !== undefined);
if(foundManager !== undefined) {
foundManager.modStreamCallbacks.get(name)(i);
if(stagger !== undefined) {
await sleep(stagger);
}
}
}
}
const defaultUnmoderatedStream = new UnmoderatedStream(this.client, {subreddit: 'mod', limit: 100, clearProcessed: { size: 100, retain: 100 }});
// @ts-ignore
defaultUnmoderatedStream.on('error', modStreamErrorListener('unmoderated'));
defaultUnmoderatedStream.on('listing', modStreamListingListener('unmoderated'));
const defaultModqueueStream = new ModQueueStream(this.client, {subreddit: 'mod', limit: 100, clearProcessed: { size: 100, retain: 100 }});
// @ts-ignore
defaultModqueueStream.on('error', modStreamErrorListener('modqueue'));
defaultModqueueStream.on('listing', modStreamListingListener('modqueue'));
this.cacheManager.modStreams.set('unmoderated', defaultUnmoderatedStream);
this.cacheManager.modStreams.set('modqueue', defaultModqueueStream);
process.on('uncaughtException', (e) => {
this.error = e;
});
@@ -263,6 +233,38 @@ class Bot {
});
}
createSharedStreamErrorListener = (name: string) => async (err: any) => {
this.logger.error(`Polling error occurred on stream ${name.toUpperCase()}`, err);
const shouldRetry = await this.sharedStreamRetryHandler(err);
if(shouldRetry) {
(this.cacheManager.modStreams.get(name) as SPoll<any>).startInterval(false);
} else {
for(const m of this.subManagers) {
if(m.sharedStreamCallbacks.size > 0) {
m.notificationManager.handle('runStateChanged', `${name.toUpperCase()} Polling Stopped`, 'Encountered too many errors from Reddit while polling. Will try to restart on next heartbeat.');
}
}
this.logger.error(`Mod stream ${name.toUpperCase()} encountered too many errors while polling. Will try to restart on next heartbeat.`);
}
}
createSharedStreamListingListener = (name: string) => async (listing: (Comment|Submission)[]) => {
// dole out in order they were received
if(!this.streamListedOnce.includes(name)) {
this.streamListedOnce.push(name);
return;
}
for(const i of listing) {
const foundManager = this.subManagers.find(x => x.subreddit.display_name === i.subreddit.display_name && x.sharedStreamCallbacks.get(name) !== undefined);
if(foundManager !== undefined) {
foundManager.sharedStreamCallbacks.get(name)(i);
if(this.stagger !== undefined) {
await sleep(this.stagger);
}
}
}
}
async onTerminate(reason = 'The application was shutdown') {
for(const m of this.subManagers) {
await m.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
@@ -316,7 +318,7 @@ class Bot {
while(!subListing.isFinished) {
subListing = await subListing.fetchMore({amount: 100});
}
availSubs = subListing;
availSubs = subListing.filter(x => x.display_name !== `u_${user.name}`);
this.logger.info(`u/${user.name} is a moderator of these subreddits: ${availSubs.map(x => x.display_name_prefixed).join(', ')}`);
@@ -336,38 +338,164 @@ class Bot {
}
} else {
if(this.excludeSubreddits.length > 0) {
this.logger.info(`Will run on all moderated subreddits but user-defined excluded: ${this.excludeSubreddits.join(', ')}`);
this.logger.info(`Will run on all moderated subreddits but own profile and user-defined excluded: ${this.excludeSubreddits.join(', ')}`);
const normalExcludes = this.excludeSubreddits.map(x => x.toLowerCase());
subsToRun = availSubs.filter(x => !normalExcludes.includes(x.display_name.toLowerCase()));
} else {
this.logger.info(`No user-defined subreddit constraints detected, will run on all moderated subreddits EXCEPT own profile (${this.botAccount})`);
subsToRun = availSubs.filter(x => x.display_name_prefixed !== this.botAccount);
subsToRun = availSubs;
}
}
// get configs for subs we want to run on and build/validate them
for (const sub of subsToRun) {
try {
this.subManagers.push(await this.createManager(sub));
this.subManagers.push(this.createManager(sub));
} catch (err: any) {
}
}
for(const m of this.subManagers) {
try {
await this.initManager(m);
} catch (err: any) {
}
}
this.parseSharedStreams();
}
async createManager(sub: Subreddit): Promise<Manager> {
const manager = new Manager(sub, this.client, this.logger, this.cacheManager, {dryRun: this.dryRun, sharedModqueue: this.sharedModqueue, wikiLocation: this.wikiLocation, botName: this.botName as string, maxWorkers: this.maxWorkers});
parseSharedStreams() {
const sharedCommentsSubreddits = !this.sharedStreams.includes('newComm') ? [] : this.subManagers.filter(x => x.isPollingShared('newComm')).map(x => x.subreddit.display_name);
if (sharedCommentsSubreddits.length > 0) {
const stream = this.cacheManager.modStreams.get('newComm');
if (stream === undefined || stream.subreddit !== sharedCommentsSubreddits.join('+')) {
let processed;
if (stream !== undefined) {
this.logger.info('Restarting SHARED COMMENT STREAM due to a subreddit config change');
stream.end();
processed = stream.processed;
}
if (sharedCommentsSubreddits.length > 100) {
this.logger.warn(`SHARED COMMENT STREAM => Reddit can only combine 100 subreddits for getting new Comments but this bot has ${sharedCommentsSubreddits.length}`);
}
const defaultCommentStream = new CommentStream(this.client, {
subreddit: sharedCommentsSubreddits.join('+'),
limit: 100,
enforceContinuity: true,
logger: this.logger,
processed,
label: 'Shared Polling'
});
// @ts-ignore
defaultCommentStream.on('error', this.createSharedStreamErrorListener('newComm'));
defaultCommentStream.on('listing', this.createSharedStreamListingListener('newComm'));
this.cacheManager.modStreams.set('newComm', defaultCommentStream);
}
} else {
const stream = this.cacheManager.modStreams.get('newComm');
if (stream !== undefined) {
stream.end();
}
}
const sharedSubmissionsSubreddits = !this.sharedStreams.includes('newSub') ? [] : this.subManagers.filter(x => x.isPollingShared('newSub')).map(x => x.subreddit.display_name);
if (sharedSubmissionsSubreddits.length > 0) {
const stream = this.cacheManager.modStreams.get('newSub');
if (stream === undefined || stream.subreddit !== sharedSubmissionsSubreddits.join('+')) {
let processed;
if (stream !== undefined) {
this.logger.info('Restarting SHARED SUBMISSION STREAM due to a subreddit config change');
stream.end();
processed = stream.processed;
}
if (sharedSubmissionsSubreddits.length > 100) {
this.logger.warn(`SHARED SUBMISSION STREAM => Reddit can only combine 100 subreddits for getting new Submissions but this bot has ${sharedSubmissionsSubreddits.length}`);
}
const defaultSubStream = new SubmissionStream(this.client, {
subreddit: sharedSubmissionsSubreddits.join('+'),
limit: 100,
enforceContinuity: true,
logger: this.logger,
processed,
label: 'Shared Polling'
});
// @ts-ignore
defaultSubStream.on('error', this.createSharedStreamErrorListener('newSub'));
defaultSubStream.on('listing', this.createSharedStreamListingListener('newSub'));
this.cacheManager.modStreams.set('newSub', defaultSubStream);
}
} else {
const stream = this.cacheManager.modStreams.get('newSub');
if (stream !== undefined) {
stream.end();
}
}
const isUnmoderatedShared = !this.sharedStreams.includes('unmoderated') ? false : this.subManagers.some(x => x.isPollingShared('unmoderated'));
const unmoderatedstream = this.cacheManager.modStreams.get('unmoderated');
if (isUnmoderatedShared && unmoderatedstream === undefined) {
const defaultUnmoderatedStream = new UnmoderatedStream(this.client, {
subreddit: 'mod',
limit: 100,
logger: this.logger,
label: 'Shared Polling'
});
// @ts-ignore
defaultUnmoderatedStream.on('error', this.createSharedStreamErrorListener('unmoderated'));
defaultUnmoderatedStream.on('listing', this.createSharedStreamListingListener('unmoderated'));
this.cacheManager.modStreams.set('unmoderated', defaultUnmoderatedStream);
} else if (!isUnmoderatedShared && unmoderatedstream !== undefined) {
unmoderatedstream.end();
}
const isModqueueShared = !this.sharedStreams.includes('modqueue') ? false : this.subManagers.some(x => x.isPollingShared('modqueue'));
const modqueuestream = this.cacheManager.modStreams.get('modqueue');
if (isModqueueShared && modqueuestream === undefined) {
const defaultModqueueStream = new ModQueueStream(this.client, {
subreddit: 'mod',
limit: 100,
logger: this.logger,
label: 'Shared Polling'
});
// @ts-ignore
defaultModqueueStream.on('error', this.createSharedStreamErrorListener('modqueue'));
defaultModqueueStream.on('listing', this.createSharedStreamListingListener('modqueue'));
this.cacheManager.modStreams.set('modqueue', defaultModqueueStream);
} else if (isModqueueShared && modqueuestream !== undefined) {
modqueuestream.end();
}
}
async initManager(manager: Manager) {
try {
await manager.parseConfiguration('system', true, {suppressNotification: true});
await manager.parseConfiguration('system', true, {suppressNotification: true, suppressChangeEvent: true});
} catch (err: any) {
if (!(err instanceof LoggedError)) {
this.logger.error(`Config was not valid:`, {subreddit: sub.display_name_prefixed});
this.logger.error(err, {subreddit: sub.display_name_prefixed});
this.logger.error(`Config was not valid:`, {subreddit: manager.subreddit.display_name_prefixed});
this.logger.error(err, {subreddit: manager.subreddit.display_name_prefixed});
err.logged = true;
}
}
}
createManager(sub: Subreddit): Manager {
const manager = new Manager(sub, this.client, this.logger, this.cacheManager, {
dryRun: this.dryRun,
sharedStreams: this.sharedStreams,
wikiLocation: this.wikiLocation,
botName: this.botName as string,
maxWorkers: this.maxWorkers,
filterCriteriaDefaults: this.filterCriteriaDefaults,
});
// all errors from managers will count towards bot-level retry count
manager.on('error', async (err) => await this.panicOnRetries(err));
manager.on('configChange', async () => {
this.parseSharedStreams();
await this.runSharedStreams(false);
});
return manager;
}
@@ -404,11 +532,12 @@ class Bot {
const sub = await this.client.getSubreddit(name);
this.logger.info(`Attempting to add manager for r/${name}`);
try {
const manager = await this.createManager(sub);
const manager = this.createManager(sub);
this.logger.info(`Starting manager for r/${name}`);
this.subManagers.push(manager);
await this.initManager(manager);
await manager.start('system', {reason: 'Caused by creation due to moderator invite'});
await this.runModStreams();
await this.runSharedStreams();
} catch (err: any) {
if (!(err instanceof LoggedError)) {
this.logger.error(err);
@@ -426,14 +555,14 @@ class Bot {
}
}
async runModStreams(notify = false) {
async runSharedStreams(notify = false) {
for(const [k,v] of this.cacheManager.modStreams) {
if(!v.running && this.subManagers.some(x => x.modStreamCallbacks.get(k) !== undefined)) {
if(!v.running && this.subManagers.some(x => x.sharedStreamCallbacks.get(k) !== undefined)) {
v.startInterval();
this.logger.info(`Starting default ${k.toUpperCase()} mod stream`);
this.logger.info(`Starting ${k.toUpperCase()} shared polling`);
if(notify) {
for(const m of this.subManagers) {
if(m.modStreamCallbacks.size > 0) {
if(m.sharedStreamCallbacks.size > 0) {
await m.notificationManager.handle('runStateChanged', `${k.toUpperCase()} Polling Started`, 'Polling was successfully restarted on heartbeat.');
}
}
@@ -457,7 +586,7 @@ class Bot {
}
}
await this.runModStreams();
await this.runSharedStreams();
this.nextNannyCheck = dayjs().add(10, 'second');
this.nextHeartbeat = dayjs().add(this.heartbeatInterval, 'second');
@@ -547,7 +676,7 @@ class Bot {
}
}
}
await this.runModStreams(true);
await this.runSharedStreams(true);
}
async runApiNanny() {

View File

@@ -28,7 +28,7 @@ import * as RuleSchema from '../Schema/Rule.json';
import * as RuleSetSchema from '../Schema/RuleSet.json';
import * as ActionSchema from '../Schema/Action.json';
import {ActionObjectJson, RuleJson, RuleObjectJson, ActionJson as ActionTypeJson} from "../Common/types";
import {SubredditResources} from "../Subreddit/SubredditResources";
import {checkAuthorFilter, SubredditResources} from "../Subreddit/SubredditResources";
import {Author, AuthorCriteria, AuthorOptions} from '..';
import {ExtendedSnoowrap} from '../Utils/SnoowrapClients';
@@ -43,10 +43,7 @@ export abstract class Check implements ICheck {
rules: Array<RuleSet | Rule> = [];
logger: Logger;
itemIs: TypedActivityStates;
authorIs: {
include: AuthorCriteria[],
exclude: AuthorCriteria[]
};
authorIs: AuthorOptions;
cacheUserResult: Required<UserResultCacheOptions>;
dryRun?: boolean;
notifyOnTrigger: boolean;
@@ -69,6 +66,7 @@ export abstract class Check implements ICheck {
itemIs = [],
authorIs: {
include = [],
excludeCondition,
exclude = [],
} = {},
dryRun,
@@ -89,6 +87,7 @@ export abstract class Check implements ICheck {
this.condition = condition;
this.itemIs = itemIs;
this.authorIs = {
excludeCondition,
exclude: exclude.map(x => new Author(x)),
include: include.map(x => new Author(x)),
}
@@ -159,7 +158,7 @@ export abstract class Check implements ICheck {
runStats.push(`${this.actions.length} Actions`);
// not sure if this should be info or verbose
this.logger.info(`=${this.enabled ? 'Enabled' : 'Disabled'}= ${type.toUpperCase()} (${this.condition})${this.notifyOnTrigger ? ' ||Notify on Trigger|| ' : ''} => ${runStats.join(' | ')}${this.description !== undefined ? ` => ${this.description}` : ''}`);
if (this.rules.length === 0 && this.itemIs.length === 0 && this.authorIs.exclude.length === 0 && this.authorIs.include.length === 0) {
if (this.rules.length === 0 && this.itemIs.length === 0 && this.authorIs.exclude?.length === 0 && this.authorIs.include?.length === 0) {
this.logger.warn('No rules, item tests, or author test found -- this check will ALWAYS PASS!');
}
let ruleSetIndex = 1;
@@ -202,30 +201,9 @@ export abstract class Check implements ICheck {
this.logger.verbose(`${FAIL} => Item did not pass 'itemIs' test`);
return [false, allRuleResults];
}
let authorPass = null;
if (this.authorIs.include !== undefined && this.authorIs.include.length > 0) {
for (const auth of this.authorIs.include) {
if (await this.resources.testAuthorCriteria(item, auth)) {
authorPass = true;
break;
}
}
if (!authorPass) {
this.logger.verbose(`${FAIL} => Inclusive author criteria not matched`);
return Promise.resolve([false, allRuleResults]);
}
}
if (authorPass === null && this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
for (const auth of this.authorIs.exclude) {
if (await this.resources.testAuthorCriteria(item, auth, false)) {
authorPass = true;
break;
}
}
if (!authorPass) {
this.logger.verbose(`${FAIL} => Exclusive author criteria not matched`);
return Promise.resolve([false, allRuleResults]);
}
const [authFilterResult, authFilterType] = await checkAuthorFilter(item, this.authorIs, this.resources, this.logger);
if(!authFilterResult) {
return Promise.resolve([false, allRuleResults]);
}
if (this.rules.length === 0) {

View File

@@ -0,0 +1,27 @@
import {ConfigFormat} from "../types";
export interface ConfigDocumentInterface<DocumentType> {
format: ConfigFormat;
parsed: DocumentType
//parsingError: Error | string;
raw: string;
location?: string;
toString(): string;
toJS(): object;
}
abstract class AbstractConfigDocument<DocumentType> implements ConfigDocumentInterface<DocumentType> {
public abstract format: ConfigFormat;
public abstract parsed: DocumentType;
//public abstract parsingError: Error | string;
constructor(public raw: string, public location?: string) {
}
public abstract toString(): string;
public abstract toJS(): object;
}
export default AbstractConfigDocument;

View File

@@ -0,0 +1,30 @@
import AbstractConfigDocument from "./AbstractConfigDocument";
import {stringify, parse} from 'comment-json';
import JSON5 from 'json5';
import {ConfigFormat} from "../types";
import {OperatorJsonConfig} from "../interfaces";
class JsonConfigDocument extends AbstractConfigDocument<OperatorJsonConfig> {
public parsed: OperatorJsonConfig;
protected cleanParsed: OperatorJsonConfig;
public format: ConfigFormat;
public constructor(raw: string, location?: string) {
super(raw, location);
this.parsed = parse(raw);
this.cleanParsed = JSON5.parse(raw);
this.format = 'json';
}
public toJS(): OperatorJsonConfig {
return this.cleanParsed;
}
public toString(): string {
return stringify(this.parsed, null, 1);
}
}
export default JsonConfigDocument;

View File

@@ -0,0 +1,54 @@
import YamlConfigDocument from "../YamlConfigDocument";
import JsonConfigDocument from "../JsonConfigDocument";
import {YAMLMap, YAMLSeq} from "yaml";
import {BotInstanceJsonConfig, OperatorJsonConfig} from "../../interfaces";
import {assign} from 'comment-json';
export interface OperatorConfigDocumentInterface {
addBot(botData: BotInstanceJsonConfig): void;
toJS(): OperatorJsonConfig;
}
export class YamlOperatorConfigDocument extends YamlConfigDocument implements OperatorConfigDocumentInterface {
addBot(botData: BotInstanceJsonConfig) {
const bots = this.parsed.get('bots') as YAMLSeq;
if (bots === undefined) {
this.parsed.add({key: 'bots', value: [botData]});
} else if (botData.name !== undefined) {
// overwrite if we find an existing
const existingIndex = bots.items.findIndex(x => (x as YAMLMap).get('name') === botData.name);
if (existingIndex !== -1) {
this.parsed.setIn(['bots', existingIndex], botData);
} else {
this.parsed.addIn(['bots'], botData);
}
} else {
this.parsed.addIn(['bots'], botData);
}
}
toJS(): OperatorJsonConfig {
return super.toJS();
}
}
export class JsonOperatorConfigDocument extends JsonConfigDocument implements OperatorConfigDocumentInterface {
addBot(botData: BotInstanceJsonConfig) {
if (this.parsed.bots === undefined) {
this.parsed.bots = [botData];
} else if (botData.name !== undefined) {
const existingIndex = this.parsed.bots.findIndex(x => x.name === botData.name);
if (existingIndex !== -1) {
this.parsed.bots[existingIndex] = assign(this.parsed.bots[existingIndex], botData);
} else {
this.parsed.bots.push(botData);
}
} else {
this.parsed.bots.push(botData);
}
}
toJS(): OperatorJsonConfig {
return super.toJS();
}
}

View File

@@ -0,0 +1,24 @@
import AbstractConfigDocument from "./AbstractConfigDocument";
import {Document, parseDocument} from 'yaml';
import {ConfigFormat} from "../types";
class YamlConfigDocument extends AbstractConfigDocument<Document> {
public parsed: Document;
public format: ConfigFormat;
public constructor(raw: string, location?: string) {
super(raw, location);
this.parsed = parseDocument(raw);
this.format = 'yaml';
}
public toJS(): object {
return this.parsed.toJS();
}
public toString(): string {
return this.parsed.toString();
}
}
export default YamlConfigDocument;

View File

@@ -1,4 +1,4 @@
import {HistoricalStats} from "./interfaces";
import {HistoricalStats, FilterCriteriaDefaults} from "./interfaces";
export const cacheOptDefaults = {ttl: 60, max: 500, checkPeriod: 600};
export const cacheTTLDefaults = {authorTTL: 60, userNotesTTL: 300, wikiTTL: 300, submissionTTL: 60, commentTTL: 60, filterCriteriaTTL: 60, subredditTTL: 600, selfTTL: 60};
@@ -29,3 +29,13 @@ export const createHistoricalDefaults = (): HistoricalStats => {
actionsRun: new Map(),
};
}
export const filterCriteriaDefault: FilterCriteriaDefaults = {
authorIs: {
exclude: [
{
isMod: true
}
]
}
}

View File

@@ -8,6 +8,11 @@ import {IncomingMessage} from "http";
import Submission from "snoowrap/dist/objects/Submission";
import Comment from "snoowrap/dist/objects/Comment";
import RedditUser from "snoowrap/dist/objects/RedditUser";
import {AuthorCriteria, AuthorOptions} from "../Author/Author";
import {ConfigFormat} from "./types";
import AbstractConfigDocument, {ConfigDocumentInterface} from "./Config/AbstractConfigDocument";
import {Document as YamlDocument} from 'yaml';
import {JsonOperatorConfigDocument, YamlOperatorConfigDocument} from "./Config/Operator";
/**
* An ISO 8601 Duration
@@ -489,38 +494,6 @@ export type PollOn = 'unmoderated' | 'modqueue' | 'newSub' | 'newComm';
export interface PollingOptionsStrong extends PollingOptions {
limit: number,
interval: number,
clearProcessed: ClearProcessedOptions
}
/**
* For very long-running, high-volume subreddits clearing the list of processed activities helps manage memory bloat
*
* All of these options have default values based on the limit and/or interval set for polling options on each subreddit stream. They only need to modified if the defaults are not sufficient.
*
* If both `after` and `size` are defined whichever is hit first will trigger the list to clear. `after` will be reset after ever clear.
* */
export interface ClearProcessedOptions {
/**
* An interval the processed list should be cleared after.
*
* * EX `9 days`
* * EX `3 months`
* * EX `5 minutes`
* @pattern ^\s*(?<time>\d+)\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\s*$
* */
after?: string,
/**
* Number of activities found in processed list after which the list should be cleared.
*
* Defaults to the `limit` value from `PollingOptions`
* */
size?: number,
/**
* The number of activities to retain in processed list after clearing.
*
* Defaults to `limit` value from `PollingOptions`
* */
retain?: number,
}
export interface PollingDefaults {
@@ -594,8 +567,6 @@ export interface PollingOptions extends PollingDefaults {
*
* */
pollOn: 'unmoderated' | 'modqueue' | 'newSub' | 'newComm'
clearProcessed?: ClearProcessedOptions
}
export interface TTLConfig {
@@ -855,6 +826,13 @@ export interface ManagerOptions {
notifications?: NotificationConfig
credentials?: ThirdPartyCredentialsJsonConfig
/**
* Set the default filter criteria for all checks. If this property is specified it will override any defaults passed from the bot's config
*
* Default behavior is to exclude all mods and automoderator from checks
* */
filterCriteriaDefaults?: FilterCriteriaDefaults
}
/**
@@ -965,8 +943,9 @@ export interface SubmissionState extends ActivityState {
* */
title?: string
link_flair_text?: string
link_flair_css_class?: string
link_flair_text?: string | string[]
link_flair_css_class?: string | string[]
flairTemplate?: string | string[]
}
// properties calculated/derived by CM -- not provided as plain values by reddit
@@ -1220,6 +1199,7 @@ export interface Notifier {
export interface ManagerStateChangeOption {
reason?: string
suppressNotification?: boolean
suppressChangeEvent?: boolean
}
/**
@@ -1351,6 +1331,27 @@ export interface SnoowrapOptions {
debug?: boolean,
}
export type FilterCriteriaDefaultBehavior = 'replace' | 'merge';
export interface FilterCriteriaDefaults {
itemIs?: TypedActivityStates
/**
* Determine how itemIs defaults behave when itemIs is present on the check
*
* * merge => adds defaults to check's itemIs
* * replace => check itemIs will replace defaults (no defaults used)
* */
itemIsBehavior?: FilterCriteriaDefaultBehavior
/**
* Determine how authorIs defaults behave when authorIs is present on the check
*
* * merge => merges defaults with check's authorIs
* * replace => check authorIs will replace defaults (no defaults used)
* */
authorIs?: AuthorOptions
authorIsBehavior?: FilterCriteriaDefaultBehavior
}
/**
* The configuration for an **individual reddit account** ContextMod will run as a bot.
*
@@ -1379,6 +1380,13 @@ export interface BotInstanceJsonConfig {
* */
snoowrap?: SnoowrapOptions
/**
* Define the default behavior for all filter criteria on all checks in all subreddits
*
* Defaults to exclude mods and automoderator from checks
* */
filterCriteriaDefaults?: FilterCriteriaDefaults
/**
* Settings related to bot behavior for subreddits it is managing
* */
@@ -1446,18 +1454,31 @@ export interface BotInstanceJsonConfig {
* */
polling?: PollingDefaults & {
/**
* If set to `true` all subreddits polling unmoderated/modqueue with default polling settings will share a request to "r/mod"
* otherwise each subreddit will poll its own mod view
* DEPRECATED: See `shared`
*
* Using the ENV or ARG will sett `unmoderated` and `modqueue` on `shared`
*
* * ENV => `SHARE_MOD`
* * ARG => `--shareMod`
*
* @default false
* @deprecated
* */
sharedMod?: boolean,
/**
* If sharing a mod stream stagger pushing relevant Activities to individual subreddits.
* Set which polling sources should be shared among subreddits using default polling settings for that source
*
* * For `unmoderated and `modqueue` the bot will poll on **r/mod** for new activities
* * For `newSub` and `newComm` all subreddits sharing the source will be combined to poll like **r/subreddit1+subreddit2/new**
*
* If set to `true` all polling sources will be shared, otherwise specify which sourcs should be shared as a list
*
* */
shared?: PollOn[] | true,
/**
* If sharing a stream staggers pushing relevant Activities to individual subreddits.
*
* Useful when running many subreddits and rules are potentially cpu/memory/traffic heavy -- allows spreading out load
* */
@@ -1769,7 +1790,7 @@ export interface BotInstanceConfig extends BotInstanceJsonConfig {
heartbeatInterval: number,
},
polling: {
sharedMod: boolean,
shared: PollOn[],
stagger?: number,
limit: number,
interval: number,
@@ -1821,6 +1842,15 @@ export interface OperatorConfig extends OperatorJsonConfig {
credentials: ThirdPartyCredentialsJsonConfig
}
export interface OperatorFileConfig {
document: YamlOperatorConfigDocument | JsonOperatorConfigDocument
isWriteable?: boolean
}
export interface OperatorConfigWithFileContext extends OperatorConfig {
fileConfig: OperatorFileConfig
}
//export type OperatorConfig = Required<OperatorJsonConfig>;
interface CacheTypeStat {
@@ -2007,3 +2037,27 @@ export interface StringComparisonOptions {
lengthWeight?: number,
transforms?: ((str: string) => string)[]
}
export interface FilterCriteriaPropertyResult<T> {
property: keyof T
expected: (string | boolean | number)[]
found?: string | boolean | number | null
passed?: null | boolean
reason?: string
behavior: FilterBehavior
}
export interface FilterCriteriaResult<T> {
behavior: FilterBehavior
criteria: T//AuthorCriteria | TypedActivityStates
propertyResults: FilterCriteriaPropertyResult<T>[]
passed: boolean
}
export type FilterBehavior = 'include' | 'exclude'
export interface FilterResult<T> {
criteriaResults: FilterCriteriaResult<T>[]
join: JoinOperands
passed: boolean
}

View File

@@ -28,3 +28,5 @@ export type SetRandomInterval = (
minDelay: number,
maxDelay: number,
) => { clear: () => void };
export type ConfigFormat = 'json' | 'yaml';

View File

@@ -1,12 +1,12 @@
import {Logger} from "winston";
import {
buildCacheOptionsFromProvider, buildCachePrefix,
createAjvFactory,
createAjvFactory, fileOrDirectoryIsWriteable,
mergeArr,
normalizeName,
overwriteMerge,
parseBool, randomId,
readConfigFile,
parseBool, parseFromJsonOrYamlToObject, randomId,
readConfigFile, removeFromSourceIfKeysExistsInDestination,
removeUndefinedKeys
} from "./util";
import {CommentCheck} from "./Check/CommentCheck";
@@ -31,19 +31,33 @@ import {
CacheOptions,
BotInstanceJsonConfig,
BotInstanceConfig,
RequiredWebRedditCredentials, RedditCredentials, BotCredentialsJsonConfig, BotCredentialsConfig
RequiredWebRedditCredentials,
RedditCredentials,
BotCredentialsJsonConfig,
BotCredentialsConfig,
FilterCriteriaDefaults, TypedActivityStates, OperatorFileConfig
} from "./Common/interfaces";
import {isRuleSetJSON, RuleSetJson, RuleSetObjectJson} from "./Rule/RuleSet";
import deepEqual from "fast-deep-equal";
import {ActionJson, ActionObjectJson, RuleJson, RuleObjectJson} from "./Common/types";
import {ActionJson, ActionObjectJson, ConfigFormat, RuleJson, RuleObjectJson} from "./Common/types";
import {isActionJson} from "./Action";
import {getLogger} from "./Utils/loggerFactory";
import {GetEnvVars} from 'env-cmd';
import {operatorConfig} from "./Utils/CommandConfig";
import merge from 'deepmerge';
import * as process from "process";
import {cacheOptDefaults, cacheTTLDefaults} from "./Common/defaults";
import {cacheOptDefaults, cacheTTLDefaults, filterCriteriaDefault} from "./Common/defaults";
import objectHash from "object-hash";
import {AuthorCriteria, AuthorOptions} from "./Author/Author";
import path from 'path';
import {
JsonOperatorConfigDocument,
OperatorConfigDocumentInterface,
YamlOperatorConfigDocument
} from "./Common/Config/Operator";
import SimpleError from "./Utils/SimpleError";
import {ConfigDocumentInterface} from "./Common/Config/AbstractConfigDocument";
import {Document as YamlDocument} from "yaml";
export interface ConfigBuilderOptions {
logger: Logger,
@@ -115,22 +129,51 @@ export class ConfigBuilder {
return validConfig as JSONConfig;
}
parseToStructured(config: JSONConfig): CheckStructuredJson[] {
parseToStructured(config: JSONConfig, filterCriteriaDefaultsFromBot?: FilterCriteriaDefaults): CheckStructuredJson[] {
let namedRules: Map<string, RuleObjectJson> = new Map();
let namedActions: Map<string, ActionObjectJson> = new Map();
const {checks = []} = config;
const {checks = [], filterCriteriaDefaults} = config;
for (const c of checks) {
const {rules = []} = c;
namedRules = extractNamedRules(rules, namedRules);
namedActions = extractNamedActions(c.actions, namedActions);
}
const filterDefs = filterCriteriaDefaults ?? filterCriteriaDefaultsFromBot;
const {
authorIsBehavior = 'merge',
itemIsBehavior = 'merge',
authorIs: authorIsDefault = {},
itemIs: itemIsDefault = []
} = filterDefs || {};
const structuredChecks: CheckStructuredJson[] = [];
for (const c of checks) {
const {rules = []} = c;
const {rules = [], authorIs = {}, itemIs = []} = c;
const strongRules = insertNamedRules(rules, namedRules);
const strongActions = insertNamedActions(c.actions, namedActions);
const strongCheck = {...c, rules: strongRules, actions: strongActions} as CheckStructuredJson;
let derivedAuthorIs: AuthorOptions = authorIsDefault;
if (authorIsBehavior === 'merge') {
derivedAuthorIs = merge.all([authorIs, authorIsDefault], {arrayMerge: removeFromSourceIfKeysExistsInDestination});
} else if (Object.keys(authorIs).length > 0) {
derivedAuthorIs = authorIs;
}
let derivedItemIs: TypedActivityStates = itemIsDefault;
if (itemIsBehavior === 'merge') {
derivedItemIs = [...itemIs, ...itemIsDefault];
} else if (itemIs.length > 0) {
derivedItemIs = itemIs;
}
const strongCheck = {
...c,
authorIs: derivedAuthorIs,
itemIs: derivedItemIs,
rules: strongRules,
actions: strongActions
} as CheckStructuredJson;
structuredChecks.push(strongCheck);
}
@@ -146,10 +189,6 @@ export const buildPollingOptions = (values: (string | PollingOptions)[]): Pollin
pollOn: v as PollOn,
interval: DEFAULT_POLLING_INTERVAL,
limit: DEFAULT_POLLING_LIMIT,
clearProcessed: {
size: DEFAULT_POLLING_LIMIT,
retain: DEFAULT_POLLING_LIMIT,
}
});
} else {
const {
@@ -157,14 +196,12 @@ export const buildPollingOptions = (values: (string | PollingOptions)[]): Pollin
interval = DEFAULT_POLLING_INTERVAL,
limit = DEFAULT_POLLING_LIMIT,
delayUntil,
clearProcessed = {size: limit, retain: limit},
} = v;
opts.push({
pollOn: p as PollOn,
interval,
limit,
delayUntil,
clearProcessed
});
}
}
@@ -299,7 +336,7 @@ export const parseDefaultBotInstanceFromArgs = (args: any): BotInstanceJsonConfi
heartbeatInterval: heartbeat,
},
polling: {
sharedMod,
shared: sharedMod ? ['unmoderated', 'modqueue'] : undefined,
},
nanny: {
softLimit,
@@ -402,7 +439,7 @@ export const parseDefaultBotInstanceFromEnv = (): BotInstanceJsonConfig => {
heartbeatInterval: process.env.HEARTBEAT !== undefined ? parseInt(process.env.HEARTBEAT) : undefined,
},
polling: {
sharedMod: parseBool(process.env.SHARE_MOD),
shared: parseBool(process.env.SHARE_MOD) ? ['unmoderated', 'modqueue'] : undefined,
},
nanny: {
softLimit: process.env.SOFT_LIMIT !== undefined ? parseInt(process.env.SOFT_LIMIT) : undefined,
@@ -448,9 +485,9 @@ export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
},
},
credentials: {
youtube: {
apiKey: process.env.YOUTUBE_API_KEY
}
youtube: {
apiKey: process.env.YOUTUBE_API_KEY
}
}
}
@@ -463,7 +500,7 @@ export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
// Actual ENVs (from environment)
// json config
// args from cli
export const parseOperatorConfigFromSources = async (args: any): Promise<OperatorJsonConfig> => {
export const parseOperatorConfigFromSources = async (args: any): Promise<[OperatorJsonConfig, OperatorFileConfig]> => {
const {logLevel = process.env.LOG_LEVEL, logDir = process.env.LOG_DIR || false} = args || {};
const envPath = process.env.OPERATOR_ENV;
@@ -494,24 +531,84 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
//swallow silently for now 😬
}
const {operatorConfig = process.env.OPERATOR_CONFIG} = args;
const {operatorConfig = (process.env.OPERATOR_CONFIG ?? path.resolve(__dirname, '../config.yaml'))} = args;
let configFromFile: OperatorJsonConfig = {};
if (operatorConfig !== undefined) {
let rawConfig;
try {
rawConfig = await readConfigFile(operatorConfig, {log: initLogger}) as object;
} catch (err: any) {
initLogger.error('Cannot continue app startup because operator config file was not parseable.');
let fileConfigFormat: ConfigFormat | undefined = undefined;
let fileConfig: object = {};
let rawConfig: string = '';
let configDoc: YamlOperatorConfigDocument | JsonOperatorConfigDocument;
let writeable = false;
try {
writeable = await fileOrDirectoryIsWriteable(operatorConfig);
} catch (e) {
initLogger.warn(`Issue while parsing operator config file location: ${e} \n This is only a problem if you do not have a config file but are planning on adding bots interactively.`);
}
try {
const [rawConfigValue, format] = await readConfigFile(operatorConfig, {log: initLogger});
rawConfig = rawConfigValue ?? '';
fileConfigFormat = format as ConfigFormat;
} catch (err: any) {
const {code} = err;
if (code === 'ENOENT') {
initLogger.warn('No operator config file found but will continue');
if (err.extension !== undefined) {
fileConfigFormat = err.extension
}
} else {
initLogger.error('Cannot continue app startup because operator config file exists but was not parseable.');
err.logged = true;
throw err;
}
}
const [format, doc, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(rawConfig, {
location: operatorConfig,
jsonDocFunc: (content, location) => new JsonOperatorConfigDocument(content, location),
yamlDocFunc: (content, location) => new YamlOperatorConfigDocument(content, location)
});
if (format !== undefined && fileConfigFormat === undefined) {
fileConfigFormat = 'yaml';
}
if (doc === undefined && rawConfig !== '') {
initLogger.error(`Could not parse file contents at ${operatorConfig} as JSON or YAML (likely it is ${fileConfigFormat}):`);
initLogger.error(jsonErr);
initLogger.error(yamlErr);
throw new SimpleError(`Could not parse file contents at ${operatorConfig} as JSON or YAML`);
} else if (doc === undefined && rawConfig === '') {
// create an empty doc
if(fileConfigFormat === 'json') {
configDoc = new JsonOperatorConfigDocument('{}', operatorConfig);
} else {
configDoc = new YamlOperatorConfigDocument('', operatorConfig);
configDoc.parsed = new YamlDocument({});
}
configFromFile = {};
} else {
configDoc = doc as (YamlOperatorConfigDocument | JsonOperatorConfigDocument);
try {
configFromFile = validateJson(rawConfig, operatorSchema, initLogger) as OperatorJsonConfig;
configFromFile = validateJson(configDoc.toJS(), operatorSchema, initLogger) as OperatorJsonConfig;
const {bots = []} = configFromFile || {};
for (const b of bots) {
const {
polling: {
sharedMod
} = {}
} = b;
if (sharedMod !== undefined) {
initLogger.warn(`'sharedMod' bot config property is DEPRECATED and will be removed in next minor version. Use 'shared' property instead (see docs)`);
break;
}
}
} catch (err: any) {
initLogger.error('Cannot continue app startup because operator config file was not valid.');
throw err;
}
}
const opConfigFromArgs = parseOpConfigFromArgs(args);
const opConfigFromEnv = parseOpConfigFromEnv();
@@ -538,7 +635,10 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
botInstances = botInstancesFromFile.map(x => merge.all([defaultBotInstance, x], {arrayMerge: overwriteMerge}));
}
return removeUndefinedKeys({...mergedConfig, bots: botInstances}) as OperatorJsonConfig;
return [removeUndefinedKeys({...mergedConfig, bots: botInstances}) as OperatorJsonConfig, {
document: configDoc,
isWriteable: writeable
}];
}
export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): OperatorConfig => {
@@ -624,154 +724,6 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
}
}
let hydratedBots: BotInstanceConfig[] = bots.map(x => {
const {
name: botName,
polling: {
sharedMod = false,
stagger,
limit = 100,
interval = 30,
} = {},
queue: {
maxWorkers = 1,
} = {},
caching,
nanny: {
softLimit = 250,
hardLimit = 50
} = {},
snoowrap = snoowrapOp,
credentials = {},
subreddits: {
names = [],
exclude = [],
wikiConfig = 'botconfig/contextbot',
dryRun,
heartbeatInterval = 300,
} = {},
} = x;
let botCache: StrongCache;
let botActionedEventsDefault: number;
if (caching === undefined) {
botCache = {
...cacheTTLDefaults,
actionedEventsDefault: opActionedEventsDefault,
actionedEventsMax: opActionedEventsMax,
provider: {...defaultProvider}
};
} else {
const {
provider,
actionedEventsMax = opActionedEventsMax,
actionedEventsDefault = opActionedEventsDefault,
...restConfig
} = caching;
botActionedEventsDefault = actionedEventsDefault;
if (actionedEventsMax !== undefined) {
botActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
}
if (typeof provider === 'string') {
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: botActionedEventsDefault,
provider: {
store: provider as CacheProvider,
...cacheOptDefaults
}
}
} else {
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: botActionedEventsDefault,
actionedEventsMax,
provider: {
store,
...cacheOptDefaults,
...rest,
},
}
}
}
let botCreds: BotCredentialsConfig;
if((credentials as any).clientId !== undefined) {
const creds = credentials as RedditCredentials;
const {
clientId: ci,
clientSecret: cs,
...restCred
} = creds;
botCreds = {
reddit: {
clientId: (ci as string),
clientSecret: (cs as string),
...restCred,
}
}
} else {
const creds = credentials as BotCredentialsJsonConfig;
const {
reddit: {
clientId: ci,
clientSecret: cs,
...restRedditCreds
},
...rest
} = creds;
botCreds = {
reddit: {
clientId: (ci as string),
clientSecret: (cs as string),
...restRedditCreds,
},
...rest
}
}
if (botCache.provider.prefix === undefined || botCache.provider.prefix === defaultProvider.prefix) {
// need to provide unique prefix to bot
botCache.provider.prefix = buildCachePrefix([botCache.provider.prefix, 'bot', (botName || objectHash.sha1(botCreds))]);
}
return {
name: botName,
snoowrap,
subreddits: {
names,
exclude,
wikiConfig,
heartbeatInterval,
dryRun,
},
credentials: botCreds,
caching: botCache,
polling: {
sharedMod,
stagger,
limit,
interval,
},
queue: {
maxWorkers,
},
nanny: {
softLimit,
hardLimit
}
}
});
const defaultOperators = typeof name === 'string' ? [name] : name;
const config: OperatorConfig = {
@@ -808,9 +760,175 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
secret: apiSecret,
friendly
},
bots: hydratedBots,
bots: [],
credentials,
};
config.bots = bots.map(x => buildBotConfig(x, config));
return config;
}
export const buildBotConfig = (data: BotInstanceJsonConfig, opConfig: OperatorConfig): BotInstanceConfig => {
const {
snoowrap: snoowrapOp,
caching: {
actionedEventsMax: opActionedEventsMax,
actionedEventsDefault: opActionedEventsDefault = 25,
provider: defaultProvider,
} = {}
} = opConfig;
const {
name: botName,
filterCriteriaDefaults = filterCriteriaDefault,
polling: {
sharedMod,
shared = [],
stagger,
limit = 100,
interval = 30,
} = {},
queue: {
maxWorkers = 1,
} = {},
caching,
nanny: {
softLimit = 250,
hardLimit = 50
} = {},
snoowrap = snoowrapOp,
credentials = {},
subreddits: {
names = [],
exclude = [],
wikiConfig = 'botconfig/contextbot',
dryRun,
heartbeatInterval = 300,
} = {},
} = data;
let botCache: StrongCache;
let botActionedEventsDefault: number;
if (caching === undefined) {
botCache = {
...cacheTTLDefaults,
actionedEventsDefault: opActionedEventsDefault,
actionedEventsMax: opActionedEventsMax,
provider: {...defaultProvider as CacheOptions}
};
} else {
const {
provider,
actionedEventsMax = opActionedEventsMax,
actionedEventsDefault = opActionedEventsDefault,
...restConfig
} = caching;
botActionedEventsDefault = actionedEventsDefault;
if (actionedEventsMax !== undefined) {
botActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
}
if (typeof provider === 'string') {
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: botActionedEventsDefault,
provider: {
store: provider as CacheProvider,
...cacheOptDefaults
}
}
} else {
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: botActionedEventsDefault,
actionedEventsMax,
provider: {
store,
...cacheOptDefaults,
...rest,
},
}
}
}
let botCreds: BotCredentialsConfig;
if ((credentials as any).clientId !== undefined) {
const creds = credentials as RedditCredentials;
const {
clientId: ci,
clientSecret: cs,
...restCred
} = creds;
botCreds = {
reddit: {
clientId: (ci as string),
clientSecret: (cs as string),
...restCred,
}
}
} else {
const creds = credentials as BotCredentialsJsonConfig;
const {
reddit: {
clientId: ci,
clientSecret: cs,
...restRedditCreds
},
...rest
} = creds;
botCreds = {
reddit: {
clientId: (ci as string),
clientSecret: (cs as string),
...restRedditCreds,
},
...rest
}
}
if (botCache.provider.prefix === undefined || botCache.provider.prefix === (defaultProvider as CacheOptions).prefix) {
// need to provide unique prefix to bot
botCache.provider.prefix = buildCachePrefix([botCache.provider.prefix, 'bot', (botName || objectHash.sha1(botCreds))]);
}
let realShared = shared === true ? ['unmoderated', 'modqueue', 'newComm', 'newSub'] : shared;
if (sharedMod === true) {
realShared.push('unmoderated');
realShared.push('modqueue');
}
return {
name: botName,
snoowrap: snoowrap || {},
filterCriteriaDefaults,
subreddits: {
names,
exclude,
wikiConfig,
heartbeatInterval,
dryRun,
},
credentials: botCreds,
caching: botCache,
polling: {
shared: [...new Set(realShared)] as PollOn[],
stagger,
limit,
interval,
},
queue: {
maxWorkers,
},
nanny: {
softLimit,
hardLimit
}
}
}

View File

@@ -2,6 +2,7 @@ import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
import {Comment} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {Author, AuthorCriteria} from "../Author/Author";
import {checkAuthorFilter} from "../Subreddit/SubredditResources";
/**
* Checks the author of the Activity against AuthorCriteria. This differs from a Rule's AuthorOptions as this is a full Rule and will only pass/fail, not skip.
@@ -59,20 +60,8 @@ export class AuthorRule extends Rule {
}
protected async process(item: Comment | Submission): Promise<[boolean, RuleResult]> {
if (this.include.length > 0) {
for (const auth of this.include) {
if (await this.resources.testAuthorCriteria(item, auth)) {
return Promise.resolve([true, this.getResult(true)]);
}
}
return Promise.resolve([false, this.getResult(false)]);
}
for (const auth of this.exclude) {
if (await this.resources.testAuthorCriteria(item, auth, false)) {
return Promise.resolve([true, this.getResult(true)]);
}
}
return Promise.resolve([false, this.getResult(false)]);
const [result, filterType] = await checkAuthorFilter(item, {include: this.include, exclude: this.exclude}, this.resources, this.logger);
return Promise.resolve([result, this.getResult(result)]);
}
}

View File

@@ -1,6 +1,7 @@
import {Rule, RuleJSONConfig, RuleOptions, RulePremise, RuleResult} from "./index";
import {Comment, VoteableContent} from "snoowrap";
import {VoteableContent} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import Comment from "snoowrap/dist/objects/Comment";
import as from 'async';
import pMap from 'p-map';
// @ts-ignore
@@ -23,7 +24,7 @@ import {
parseSubredditName,
parseUsableLinkIdentifier,
PASS, sleep,
toStrongSubredditState
toStrongSubredditState, windowToActivityWindowCriteria
} from "../util";
import {
ActivityWindow,
@@ -115,15 +116,42 @@ export class RecentActivityRule extends Rule {
async process(item: Submission | Comment): Promise<[boolean, RuleResult]> {
let activities;
// ACID is a bitch
// reddit may not return the activity being checked in the author's recent history due to availability/consistency issues or *something*
// so make sure we add it in if config is checking the same type and it isn't included
// TODO refactor this for SubredditState everywhere branch
let shouldIncludeSelf = true;
const strongWindow = windowToActivityWindowCriteria(this.window);
const {
subreddits: {
include = [],
exclude = []
} = {}
} = strongWindow;
if (include.length > 0 && !include.some(x => x.toLocaleLowerCase() === item.subreddit.display_name.toLocaleLowerCase())) {
shouldIncludeSelf = false;
} else if (exclude.length > 0 && exclude.some(x => x.toLocaleLowerCase() === item.subreddit.display_name.toLocaleLowerCase())) {
shouldIncludeSelf = false;
}
switch (this.lookAt) {
case 'comments':
activities = await this.resources.getAuthorComments(item.author, {window: this.window});
if (shouldIncludeSelf && item instanceof Comment && !activities.some(x => x.name === item.name)) {
activities.unshift(item);
}
break;
case 'submissions':
activities = await this.resources.getAuthorSubmissions(item.author, {window: this.window});
if (shouldIncludeSelf && item instanceof Submission && !activities.some(x => x.name === item.name)) {
activities.unshift(item);
}
break;
default:
activities = await this.resources.getAuthorActivities(item.author, {window: this.window});
if (shouldIncludeSelf && !activities.some(x => x.name === item.name)) {
activities.unshift(item);
}
break;
}
@@ -316,34 +344,6 @@ export class RecentActivityRule extends Rule {
}
}
for (const activity of viableActivity) {
if (asSubmission(activity) && submissionState !== undefined) {
if (!(await this.resources.testItemCriteria(activity, [submissionState]))) {
continue;
}
} else if (commentState !== undefined) {
if (!(await this.resources.testItemCriteria(activity, [commentState]))) {
continue;
}
}
let inSubreddits = false;
for (const ss of subStates) {
const res = await this.resources.testSubredditCriteria(activity, ss);
if (res) {
inSubreddits = true;
break;
}
}
if (inSubreddits) {
currCount++;
combinedKarma += activity.score;
const pSub = getActivitySubredditName(activity);
if (!presentSubs.includes(pSub)) {
presentSubs.push(pSub);
}
}
}
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(threshold);
let sum = {
subsWithActivity: presentSubs,

View File

@@ -152,7 +152,8 @@ export class RegexRule extends Rule {
}, []);
// check regex
const reg = parseStringToRegex(regex, 'g');
const regexContent = await this.resources.getContent(regex);
const reg = parseStringToRegex(regexContent, 'g');
if(reg === undefined) {
throw new SimpleError(`Value given for regex on Criteria ${name} was not valid: ${regex}`);
}
@@ -257,7 +258,7 @@ export class RegexRule extends Rule {
const critResults = {
criteria: {
name,
regex,
regex: regex !== regexContent ? `${regex} from ${regexContent}` : regex,
testOn,
matchThreshold,
activityMatchThreshold,

View File

@@ -2,7 +2,7 @@ import Snoowrap, {Comment} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {Logger} from "winston";
import {findResultByPremise, mergeArr} from "../util";
import {SubredditResources} from "../Subreddit/SubredditResources";
import {checkAuthorFilter, SubredditResources} from "../Subreddit/SubredditResources";
import {ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
import Author, {AuthorOptions} from "../Author/Author";
@@ -65,6 +65,7 @@ export abstract class Rule implements IRule, Triggerable {
name = this.getKind(),
logger,
authorIs: {
excludeCondition = 'OR',
include = [],
exclude = [],
} = {},
@@ -78,6 +79,7 @@ export abstract class Rule implements IRule, Triggerable {
this.client = client;
this.authorIs = {
excludeCondition,
exclude: exclude.map(x => new Author(x)),
include: include.map(x => new Author(x)),
}
@@ -99,23 +101,10 @@ export abstract class Rule implements IRule, Triggerable {
this.logger.verbose(`(Skipped) Item did not pass 'itemIs' test`);
return Promise.resolve([null, this.getResult(null, {result: `Item did not pass 'itemIs' test`})]);
}
if (this.authorIs.include !== undefined && this.authorIs.include.length > 0) {
for (const auth of this.authorIs.include) {
if (await this.resources.testAuthorCriteria(item, auth)) {
return this.process(item);
}
}
this.logger.verbose('(Skipped) Inclusive author criteria not matched');
return Promise.resolve([null, this.getResult(null, {result: 'Inclusive author criteria not matched'})]);
}
if (this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
for (const auth of this.authorIs.exclude) {
if (await this.resources.testAuthorCriteria(item, auth, false)) {
return this.process(item);
}
}
this.logger.verbose('(Skipped) Exclusive author criteria not matched');
return Promise.resolve([null, this.getResult(null, {result: 'Exclusive author criteria not matched'})]);
const [authFilterResult, authFilterType] = await checkAuthorFilter(item, this.authorIs, this.resources, this.logger);
if(!authFilterResult) {
this.logger.verbose(`(Skipped) ${authFilterType} Author criteria not matched`);
return Promise.resolve([null, this.getResult(null, {result: `${authFilterType} author criteria not matched`})]);
}
} catch (err: any) {
this.logger.error('Error occurred during Rule pre-process checks');

View File

@@ -50,24 +50,52 @@
]
},
"flairCssClass": {
"description": "A list of (user) flair css class values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair css class (or list of) from the subreddit to match against",
"examples": [
"red"
]
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"items": {
"type": "string"
},
"type": "array"
"description": "A (user) flair template id (or list of) from the subreddit to match against"
},
"flairText": {
"description": "A list of (user) flair text values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair text value (or list of) from the subreddit to match against",
"examples": [
"Approved"
],
"items": {
"type": "string"
},
"type": "array"
]
},
"isMod": {
"description": "Is the author a moderator?",
@@ -131,12 +159,21 @@
],
"properties": {
"exclude": {
"description": "Only runs if `include` is not present. Will \"pass\" if any of set of the AuthorCriteria **does not** pass",
"description": "Only runs if `include` is not present. Each AuthorCriteria is comprised of conditions that the Author being checked must \"not\" pass. See excludeCondition for set behavior\n\nEX: `isMod: true, name: Automoderator` => Will pass if the Author IS NOT a mod and IS NOT named Automoderator",
"items": {
"$ref": "#/definitions/AuthorCriteria"
},
"type": "array"
},
"excludeCondition": {
"default": "OR",
"description": "* OR => if ANY exclude condition \"does not\" pass then the exclude test passes\n* AND => if ALL exclude conditions \"do not\" pass then the exclude test passes\n\nDefaults to OR",
"enum": [
"AND",
"OR"
],
"type": "string"
},
"include": {
"description": "Will \"pass\" if any set of AuthorCriteria passes",
"items": {
@@ -240,14 +277,47 @@
"filtered": {
"type": "boolean"
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"is_self": {
"type": "boolean"
},
"link_flair_css_class": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"link_flair_text": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"locked": {
"type": "boolean"

View File

@@ -222,6 +222,17 @@
],
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
"type": "string"
},
"targets": {
"description": "Specify which Activities to approve\n\nThis setting is only applicable if the Activity being acted on is a **comment**. On a **submission** the setting does nothing\n\n* self => approve activity being checked (comment)\n* parent => approve parent (submission) of activity being checked (comment)",
"items": {
"enum": [
"parent",
"self"
],
"type": "string"
},
"type": "array"
}
},
"required": [
@@ -505,24 +516,52 @@
]
},
"flairCssClass": {
"description": "A list of (user) flair css class values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair css class (or list of) from the subreddit to match against",
"examples": [
"red"
]
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"items": {
"type": "string"
},
"type": "array"
"description": "A (user) flair template id (or list of) from the subreddit to match against"
},
"flairText": {
"description": "A list of (user) flair text values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair text value (or list of) from the subreddit to match against",
"examples": [
"Approved"
],
"items": {
"type": "string"
},
"type": "array"
]
},
"isMod": {
"description": "Is the author a moderator?",
@@ -586,12 +625,21 @@
],
"properties": {
"exclude": {
"description": "Only runs if `include` is not present. Will \"pass\" if any of set of the AuthorCriteria **does not** pass",
"description": "Only runs if `include` is not present. Each AuthorCriteria is comprised of conditions that the Author being checked must \"not\" pass. See excludeCondition for set behavior\n\nEX: `isMod: true, name: Automoderator` => Will pass if the Author IS NOT a mod and IS NOT named Automoderator",
"items": {
"$ref": "#/definitions/AuthorCriteria"
},
"type": "array"
},
"excludeCondition": {
"default": "OR",
"description": "* OR => if ANY exclude condition \"does not\" pass then the exclude test passes\n* AND => if ALL exclude conditions \"do not\" pass then the exclude test passes\n\nDefaults to OR",
"enum": [
"AND",
"OR"
],
"type": "string"
},
"include": {
"description": "Will \"pass\" if any set of AuthorCriteria passes",
"items": {
@@ -981,25 +1029,6 @@
],
"type": "string"
},
"ClearProcessedOptions": {
"description": "For very long-running, high-volume subreddits clearing the list of processed activities helps manage memory bloat\n\nAll of these options have default values based on the limit and/or interval set for polling options on each subreddit stream. They only need to modified if the defaults are not sufficient.\n\nIf both `after` and `size` are defined whichever is hit first will trigger the list to clear. `after` will be reset after ever clear.",
"properties": {
"after": {
"description": "An interval the processed list should be cleared after.\n\n* EX `9 days`\n* EX `3 months`\n* EX `5 minutes`",
"pattern": "^\\s*(?<time>\\d+)\\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\\s*$",
"type": "string"
},
"retain": {
"description": "The number of activities to retain in processed list after clearing.\n\nDefaults to `limit` value from `PollingOptions`",
"type": "number"
},
"size": {
"description": "Number of activities found in processed list after which the list should be cleared.\n\nDefaults to the `limit` value from `PollingOptions`",
"type": "number"
}
},
"type": "object"
},
"CommentActionJson": {
"description": "Reply to the Activity. For a submission the reply will be a top-level comment.",
"properties": {
@@ -1436,6 +1465,61 @@
},
"type": "object"
},
"FilterCriteriaDefaults": {
"properties": {
"authorIs": {
"$ref": "#/definitions/AuthorOptions",
"description": "Determine how authorIs defaults behave when authorIs is present on the check\n\n* merge => merges defaults with check's authorIs\n* replace => check authorIs will replace defaults (no defaults used)",
"examples": [
{
"include": [
{
"flairText": [
"Contributor",
"Veteran"
]
},
{
"isMod": true
}
]
}
]
},
"authorIsBehavior": {
"enum": [
"merge",
"replace"
],
"type": "string"
},
"itemIs": {
"anyOf": [
{
"items": {
"$ref": "#/definitions/SubmissionState"
},
"type": "array"
},
{
"items": {
"$ref": "#/definitions/CommentState"
},
"type": "array"
}
]
},
"itemIsBehavior": {
"description": "Determine how itemIs defaults behave when itemIs is present on the check\n\n* merge => adds defaults to check's itemIs\n* replace => check itemIs will replace defaults (no defaults used)",
"enum": [
"merge",
"replace"
],
"type": "string"
}
},
"type": "object"
},
"FlairActionJson": {
"description": "Flair the Submission",
"properties": {
@@ -2110,10 +2194,6 @@
}
],
"properties": {
"clearProcessed": {
"$ref": "#/definitions/ClearProcessedOptions",
"description": "For very long-running, high-volume subreddits clearing the list of processed activities helps manage memory bloat\n\nAll of these options have default values based on the limit and/or interval set for polling options on each subreddit stream. They only need to modified if the defaults are not sufficient.\n\nIf both `after` and `size` are defined whichever is hit first will trigger the list to clear. `after` will be reset after ever clear."
},
"delayUntil": {
"description": "Delay processing Activity until it is `N` seconds old\n\nUseful if there are other bots that may process an Activity and you want this bot to run first/last/etc.\n\nIf the Activity is already `N` seconds old when it is initially retrieved no refresh of the Activity occurs (no API request is made) and it is immediately processed.",
"type": "number"
@@ -3398,14 +3478,47 @@
"filtered": {
"type": "boolean"
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"is_self": {
"type": "boolean"
},
"link_flair_css_class": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"link_flair_text": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"locked": {
"type": "boolean"
@@ -3798,6 +3911,10 @@
],
"type": "boolean"
},
"filterCriteriaDefaults": {
"$ref": "#/definitions/FilterCriteriaDefaults",
"description": "Set the default filter criteria for all checks. If this property is specified it will override any defaults passed from the bot's config\n\nDefault behavior is to exclude all mods and automoderator from checks"
},
"footer": {
"anyOf": [
{

View File

@@ -1,6 +1,189 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"AuthorCriteria": {
"additionalProperties": false,
"description": "Criteria with which to test against the author of an Activity. The outcome of the test is based on:\n\n1. All present properties passing and\n2. If a property is a list then any value from the list matching",
"examples": [
{
"flairText": [
"Contributor",
"Veteran"
],
"isMod": true,
"name": [
"FoxxMD",
"AnotherUser"
]
}
],
"minProperties": 1,
"properties": {
"age": {
"description": "Test the age of the Author's account (when it was created) against this comparison\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>`\n\n* EX `> 100 days` => Passes if Author's account is older than 100 days\n* EX `<= 2 months` => Passes if Author's account is younger than or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\\s*$",
"type": "string"
},
"commentKarma": {
"description": "A string containing a comparison operator and a value to compare karma against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 comment karma\n* EX `<= 75%` => comment karma is less than or equal to 75% of **all karma**",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"description": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "An (array of) string/regular expression to test contents of an Author's profile description against\n\nIf no flags are specified then the **insensitive** flag is used by default\n\nIf using an array then if **any** value in the array passes the description test passes",
"examples": [
[
"/test$/i",
"look for this string literal"
]
]
},
"flairCssClass": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair css class (or list of) from the subreddit to match against",
"examples": [
"red"
]
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair template id (or list of) from the subreddit to match against"
},
"flairText": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair text value (or list of) from the subreddit to match against",
"examples": [
"Approved"
]
},
"isMod": {
"description": "Is the author a moderator?",
"type": "boolean"
},
"linkKarma": {
"description": "A string containing a comparison operator and a value to compare link karma against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 link karma\n* EX `<= 75%` => link karma is less than or equal to 75% of **all karma**",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"name": {
"description": "A list of reddit usernames (case-insensitive) to match against. Do not include the \"u/\" prefix\n\n EX to match against /u/FoxxMD and /u/AnotherUser use [\"FoxxMD\",\"AnotherUser\"]",
"examples": [
"FoxxMD",
"AnotherUser"
],
"items": {
"type": "string"
},
"type": "array"
},
"shadowBanned": {
"description": "Is the author shadowbanned?\n\nThis is determined by trying to retrieve the author's profile. If a 404 is returned it is likely they are shadowbanned",
"type": "boolean"
},
"totalKarma": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"userNotes": {
"description": "A list of UserNote properties to check against the User Notes attached to this Author in this Subreddit (must have Toolbox enabled and used User Notes at least once)",
"items": {
"$ref": "#/definitions/UserNoteCriteria"
},
"type": "array"
},
"verified": {
"description": "Does Author's account have a verified email?",
"type": "boolean"
}
},
"type": "object"
},
"AuthorOptions": {
"description": "If present then these Author criteria are checked before running the rule. If criteria fails then the rule is skipped.",
"examples": [
{
"include": [
{
"flairText": [
"Contributor",
"Veteran"
]
},
{
"isMod": true
}
]
}
],
"properties": {
"exclude": {
"description": "Only runs if `include` is not present. Each AuthorCriteria is comprised of conditions that the Author being checked must \"not\" pass. See excludeCondition for set behavior\n\nEX: `isMod: true, name: Automoderator` => Will pass if the Author IS NOT a mod and IS NOT named Automoderator",
"items": {
"$ref": "#/definitions/AuthorCriteria"
},
"type": "array"
},
"excludeCondition": {
"default": "OR",
"description": "* OR => if ANY exclude condition \"does not\" pass then the exclude test passes\n* AND => if ALL exclude conditions \"do not\" pass then the exclude test passes\n\nDefaults to OR",
"enum": [
"AND",
"OR"
],
"type": "string"
},
"include": {
"description": "Will \"pass\" if any set of AuthorCriteria passes",
"items": {
"$ref": "#/definitions/AuthorCriteria"
},
"type": "array"
}
},
"type": "object"
},
"BotConnection": {
"description": "Configuration required to connect to a CM Server",
"properties": {
@@ -58,6 +241,10 @@
}
]
},
"filterCriteriaDefaults": {
"$ref": "#/definitions/FilterCriteriaDefaults",
"description": "Define the default behavior for all filter criteria on all checks in all subreddits\n\nDefaults to exclude mods and automoderator from checks"
},
"name": {
"type": "string"
},
@@ -94,13 +281,36 @@
},
{
"properties": {
"shared": {
"anyOf": [
{
"items": {
"enum": [
"modqueue",
"newComm",
"newSub",
"unmoderated"
],
"type": "string"
},
"type": "array"
},
{
"enum": [
true
],
"type": "boolean"
}
],
"description": "Set which polling sources should be shared among subreddits using default polling settings for that source\n\n* For `unmoderated and `modqueue` the bot will poll on **r/mod** for new activities\n* For `newSub` and `newComm` all subreddits sharing the source will be combined to poll like **r/subreddit1+subreddit2/new**\n\nIf set to `true` all polling sources will be shared, otherwise specify which sourcs should be shared as a list"
},
"sharedMod": {
"default": false,
"description": "If set to `true` all subreddits polling unmoderated/modqueue with default polling settings will share a request to \"r/mod\"\notherwise each subreddit will poll its own mod view\n\n* ENV => `SHARE_MOD`\n* ARG => `--shareMod`",
"description": "DEPRECATED: See `shared`\n\n Using the ENV or ARG will sett `unmoderated` and `modqueue` on `shared`\n\n* ENV => `SHARE_MOD`\n* ARG => `--shareMod`",
"type": "boolean"
},
"stagger": {
"description": "If sharing a mod stream stagger pushing relevant Activities to individual subreddits.\n\nUseful when running many subreddits and rules are potentially cpu/memory/traffic heavy -- allows spreading out load",
"description": "If sharing a stream staggers pushing relevant Activities to individual subreddits.\n\nUseful when running many subreddits and rules are potentially cpu/memory/traffic heavy -- allows spreading out load",
"type": "number"
}
},
@@ -253,6 +463,73 @@
],
"type": "string"
},
"CommentState": {
"description": "Different attributes a `Comment` can be in. Only include a property if you want to check it.",
"examples": [
{
"op": true,
"removed": false
}
],
"properties": {
"age": {
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
"type": "string"
},
"approved": {
"type": "boolean"
},
"deleted": {
"type": "boolean"
},
"depth": {
"description": "The (nested) level of a comment.\n\n* 0 mean the comment is at top-level (replying to submission)\n* non-zero, Nth value means the comment has N parent comments",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
"type": "string"
},
"distinguished": {
"type": "boolean"
},
"filtered": {
"type": "boolean"
},
"locked": {
"type": "boolean"
},
"op": {
"description": "Is this Comment Author also the Author of the Submission this comment is in?",
"type": "boolean"
},
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
"stickied": {
"type": "boolean"
},
"submissionState": {
"description": "A list of SubmissionState attributes to test the Submission this comment is in",
"items": {
"$ref": "#/definitions/SubmissionState"
},
"type": "array"
}
},
"type": "object"
},
"DiscordProviderConfig": {
"properties": {
"name": {
@@ -275,6 +552,61 @@
],
"type": "object"
},
"FilterCriteriaDefaults": {
"properties": {
"authorIs": {
"$ref": "#/definitions/AuthorOptions",
"description": "Determine how authorIs defaults behave when authorIs is present on the check\n\n* merge => merges defaults with check's authorIs\n* replace => check authorIs will replace defaults (no defaults used)",
"examples": [
{
"include": [
{
"flairText": [
"Contributor",
"Veteran"
]
},
{
"isMod": true
}
]
}
]
},
"authorIsBehavior": {
"enum": [
"merge",
"replace"
],
"type": "string"
},
"itemIs": {
"anyOf": [
{
"items": {
"$ref": "#/definitions/SubmissionState"
},
"type": "array"
},
{
"items": {
"$ref": "#/definitions/CommentState"
},
"type": "array"
}
]
},
"itemIsBehavior": {
"description": "Determine how itemIs defaults behave when itemIs is present on the check\n\n* merge => adds defaults to check's itemIs\n* replace => check itemIs will replace defaults (no defaults used)",
"enum": [
"merge",
"replace"
],
"type": "string"
}
},
"type": "object"
},
"NotificationConfig": {
"properties": {
"events": {
@@ -543,6 +875,113 @@
},
"type": "object"
},
"SubmissionState": {
"description": "Different attributes a `Submission` can be in. Only include a property if you want to check it.",
"examples": [
{
"over_18": true,
"removed": false
}
],
"properties": {
"age": {
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
"type": "string"
},
"approved": {
"type": "boolean"
},
"deleted": {
"type": "boolean"
},
"distinguished": {
"type": "boolean"
},
"filtered": {
"type": "boolean"
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"is_self": {
"type": "boolean"
},
"link_flair_css_class": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"link_flair_text": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"locked": {
"type": "boolean"
},
"over_18": {
"description": "NSFW",
"type": "boolean"
},
"pinned": {
"type": "boolean"
},
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 2` => greater than 2 total reports\n\nDefaults to TOTAL reports on an Activity. Suffix the value with the report type to check that type:\n\n* EX `> 3 mod` => greater than 3 mod reports\n* EX `>= 1 user` => greater than 1 user report",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
"spoiler": {
"type": "boolean"
},
"stickied": {
"type": "boolean"
},
"title": {
"description": "A valid regular expression to match against the title of the submission",
"type": "string"
}
},
"type": "object"
},
"ThirdPartyCredentialsJsonConfig": {
"additionalProperties": {
},
@@ -561,6 +1000,43 @@
},
"type": "object"
},
"UserNoteCriteria": {
"properties": {
"count": {
"default": ">= 1",
"description": "Number of occurrences of this type. Ignored if `search` is `current`\n\nA string containing a comparison operator and/or a value to compare number of occurrences against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [ascending|descending]`",
"examples": [
">= 1"
],
"pattern": "^\\s*(?<opStr>>|>=|<|<=)\\s*(?<value>\\d+)\\s*(?<percent>%?)\\s*(?<extra>asc.*|desc.*)*$",
"type": "string"
},
"search": {
"default": "current",
"description": "How to test the notes for this Author:\n\n### current\n\nOnly the most recent note is checked for `type`\n\n### total\n\nThe `count` comparison of `type` must be found within all notes\n\n* EX `count: > 3` => Must have more than 3 notes of `type`, total\n* EX `count: <= 25%` => Must have 25% or less of notes of `type`, total\n\n### consecutive\n\nThe `count` **number** of `type` notes must be found in a row.\n\nYou may also specify the time-based order in which to search the notes by specifying `ascending (asc)` or `descending (desc)` in the `count` value. Default is `descending`\n\n* EX `count: >= 3` => Must have 3 or more notes of `type` consecutively, in descending order\n* EX `count: < 2` => Must have less than 2 notes of `type` consecutively, in descending order\n* EX `count: > 4 asc` => Must have greater than 4 notes of `type` consecutively, in ascending order",
"enum": [
"consecutive",
"current",
"total"
],
"examples": [
"current"
],
"type": "string"
},
"type": {
"description": "User Note type key to search for",
"examples": [
"spamwarn"
],
"type": "string"
}
},
"required": [
"type"
],
"type": "object"
},
"WebCredentials": {
"description": "Separate credentials for the web interface can be provided when also running the api.\n\nAll properties not specified will default to values given in ENV/ARG credential properties\n\nRefer to the [required credentials table](https://github.com/FoxxMD/context-mod/blob/master/docs/operatorConfiguration.md#minimum-required-configuration) to see what is necessary for the web interface.",
"examples": [

View File

@@ -454,24 +454,52 @@
]
},
"flairCssClass": {
"description": "A list of (user) flair css class values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair css class (or list of) from the subreddit to match against",
"examples": [
"red"
]
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"items": {
"type": "string"
},
"type": "array"
"description": "A (user) flair template id (or list of) from the subreddit to match against"
},
"flairText": {
"description": "A list of (user) flair text values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair text value (or list of) from the subreddit to match against",
"examples": [
"Approved"
],
"items": {
"type": "string"
},
"type": "array"
]
},
"isMod": {
"description": "Is the author a moderator?",
@@ -535,12 +563,21 @@
],
"properties": {
"exclude": {
"description": "Only runs if `include` is not present. Will \"pass\" if any of set of the AuthorCriteria **does not** pass",
"description": "Only runs if `include` is not present. Each AuthorCriteria is comprised of conditions that the Author being checked must \"not\" pass. See excludeCondition for set behavior\n\nEX: `isMod: true, name: Automoderator` => Will pass if the Author IS NOT a mod and IS NOT named Automoderator",
"items": {
"$ref": "#/definitions/AuthorCriteria"
},
"type": "array"
},
"excludeCondition": {
"default": "OR",
"description": "* OR => if ANY exclude condition \"does not\" pass then the exclude test passes\n* AND => if ALL exclude conditions \"do not\" pass then the exclude test passes\n\nDefaults to OR",
"enum": [
"AND",
"OR"
],
"type": "string"
},
"include": {
"description": "Will \"pass\" if any set of AuthorCriteria passes",
"items": {
@@ -1913,14 +1950,47 @@
"filtered": {
"type": "boolean"
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"is_self": {
"type": "boolean"
},
"link_flair_css_class": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"link_flair_text": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"locked": {
"type": "boolean"

View File

@@ -428,24 +428,52 @@
]
},
"flairCssClass": {
"description": "A list of (user) flair css class values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair css class (or list of) from the subreddit to match against",
"examples": [
"red"
]
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"items": {
"type": "string"
},
"type": "array"
"description": "A (user) flair template id (or list of) from the subreddit to match against"
},
"flairText": {
"description": "A list of (user) flair text values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair text value (or list of) from the subreddit to match against",
"examples": [
"Approved"
],
"items": {
"type": "string"
},
"type": "array"
]
},
"isMod": {
"description": "Is the author a moderator?",
@@ -509,12 +537,21 @@
],
"properties": {
"exclude": {
"description": "Only runs if `include` is not present. Will \"pass\" if any of set of the AuthorCriteria **does not** pass",
"description": "Only runs if `include` is not present. Each AuthorCriteria is comprised of conditions that the Author being checked must \"not\" pass. See excludeCondition for set behavior\n\nEX: `isMod: true, name: Automoderator` => Will pass if the Author IS NOT a mod and IS NOT named Automoderator",
"items": {
"$ref": "#/definitions/AuthorCriteria"
},
"type": "array"
},
"excludeCondition": {
"default": "OR",
"description": "* OR => if ANY exclude condition \"does not\" pass then the exclude test passes\n* AND => if ALL exclude conditions \"do not\" pass then the exclude test passes\n\nDefaults to OR",
"enum": [
"AND",
"OR"
],
"type": "string"
},
"include": {
"description": "Will \"pass\" if any set of AuthorCriteria passes",
"items": {
@@ -1887,14 +1924,47 @@
"filtered": {
"type": "boolean"
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"is_self": {
"type": "boolean"
},
"link_flair_css_class": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"link_flair_text": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"locked": {
"type": "boolean"

View File

@@ -18,17 +18,15 @@ import {
totalFromMapStats,
triggeredIndicator,
} from "../util";
import {Poll} from "snoostorm";
import pEvent from "p-event";
import {RuleResult} from "../Rule";
import {ConfigBuilder, buildPollingOptions} from "../ConfigBuilder";
import {
ActionedEvent,
ActionResult,
DEFAULT_POLLING_INTERVAL,
DEFAULT_POLLING_LIMIT, Invokee,
DEFAULT_POLLING_LIMIT, FilterCriteriaDefaults, Invokee,
ManagerOptions, ManagerStateChangeOption, ManagerStats, PAUSED,
PollingOptionsStrong, ResourceStats, RUNNING, RunState, STOPPED, SYSTEM, USER
PollingOptionsStrong, PollOn, RUNNING, RunState, STOPPED, SYSTEM, USER
} from "../Common/interfaces";
import Submission from "snoowrap/dist/objects/Submission";
import {activityIsRemoved, itemContentPeek} from "../Utils/SnoowrapUtils";
@@ -48,7 +46,6 @@ import {queue, QueueObject} from 'async';
import {JSONConfig} from "../JsonConfig";
import {CheckStructuredJson} from "../Check";
import NotificationManager from "../Notification/NotificationManager";
import action from "../Web/Server/routes/authenticated/user/action";
import {createHistoricalDefaults, historicalDefaults} from "../Common/defaults";
import {ExtendedSnoowrap} from "../Utils/SnoowrapClients";
import {isRateLimitError, isStatusError} from "../Utils/Errors";
@@ -73,7 +70,7 @@ export interface CheckTask {
}
export interface RuntimeManagerOptions extends ManagerOptions {
sharedModqueue?: boolean;
sharedStreams?: PollOn[];
wikiLocation?: string;
botName: string;
maxWorkers: number;
@@ -98,13 +95,14 @@ export class Manager extends EventEmitter {
lastWikiRevision?: DayjsObj
lastWikiCheck: DayjsObj = dayjs();
wikiFormat: ('yaml' | 'json') = 'yaml';
filterCriteriaDefaults?: FilterCriteriaDefaults
//wikiUpdateRunning: boolean = false;
streamListedOnce: string[] = [];
streams: SPoll<Snoowrap.Submission | Snoowrap.Comment>[] = [];
modStreamCallbacks: Map<string, any> = new Map();
streams: Map<string, SPoll<Snoowrap.Submission | Snoowrap.Comment>> = new Map();
sharedStreamCallbacks: Map<string, any> = new Map();
pollingRetryHandler: Function;
dryRun?: boolean;
sharedModqueue: boolean;
sharedStreams: PollOn[];
cacheManager: BotResourcesManager;
globalDryRun?: boolean;
queue: QueueObject<CheckTask>;
@@ -198,7 +196,7 @@ export class Manager extends EventEmitter {
constructor(sub: Subreddit, client: ExtendedSnoowrap, logger: Logger, cacheManager: BotResourcesManager, opts: RuntimeManagerOptions = {botName: 'ContextMod', maxWorkers: 1}) {
super();
const {dryRun, sharedModqueue = false, wikiLocation = 'botconfig/contextbot', botName, maxWorkers} = opts;
const {dryRun, sharedStreams = [], wikiLocation = 'botconfig/contextbot', botName, maxWorkers, filterCriteriaDefaults} = opts;
this.displayLabel = opts.nickname || `${sub.display_name_prefixed}`;
const getLabels = this.getCurrentLabels;
const getDisplay = this.getDisplay;
@@ -214,7 +212,9 @@ export class Manager extends EventEmitter {
}, mergeArr);
this.globalDryRun = dryRun;
this.wikiLocation = wikiLocation;
this.sharedModqueue = sharedModqueue;
this.filterCriteriaDefaults = filterCriteriaDefaults;
this.sharedStreams = sharedStreams;
this.pollingRetryHandler = createRetryHandler({maxRequestRetry: 3, maxOtherRetry: 2}, this.logger);
this.subreddit = sub;
this.client = client;
this.botName = botName;
@@ -359,7 +359,7 @@ export class Manager extends EventEmitter {
return q;
}
protected async parseConfigurationFromObject(configObj: object) {
protected async parseConfigurationFromObject(configObj: object, suppressChangeEvent: boolean = false) {
try {
const configBuilder = new ConfigBuilder({logger: this.logger});
const validJson = configBuilder.validateJson(configObj);
@@ -419,7 +419,7 @@ export class Manager extends EventEmitter {
const commentChecks: Array<CommentCheck> = [];
const subChecks: Array<SubmissionCheck> = [];
const structuredChecks = configBuilder.parseToStructured(validJson);
const structuredChecks = configBuilder.parseToStructured(validJson, this.filterCriteriaDefaults);
// TODO check that bot has permissions for subreddit for all specified actions
// can find permissions in this.subreddit.mod_permissions
@@ -449,6 +449,19 @@ export class Manager extends EventEmitter {
this.logger.info(checkSummary);
}
this.validConfigLoaded = true;
if(!suppressChangeEvent) {
this.emit('configChange');
}
if(this.eventsState.state === RUNNING) {
// need to update polling, potentially
await this.buildPolling();
for(const stream of this.streams.values()) {
if(!stream.running) {
this.logger.debug(`Starting Polling for ${stream.name.toUpperCase()} ${stream.frequency / 1000}s interval`);
stream.startInterval();
}
}
}
} catch (err: any) {
this.validConfigLoaded = false;
throw err;
@@ -456,7 +469,7 @@ export class Manager extends EventEmitter {
}
async parseConfiguration(causedBy: Invokee = 'system', force: boolean = false, options?: ManagerStateChangeOption) {
const {reason, suppressNotification = false} = options || {};
const {reason, suppressNotification = false, suppressChangeEvent = false} = options || {};
//this.wikiUpdateRunning = true;
this.lastWikiCheck = dayjs();
@@ -541,14 +554,8 @@ export class Manager extends EventEmitter {
throw new ConfigParseError('Wiki page contents was empty');
}
const [configObj, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(sourceData);
if (jsonErr === undefined) {
this.wikiFormat = 'json';
} else if (yamlErr === undefined) {
this.wikiFormat = 'yaml';
} else {
this.wikiFormat = likelyJson5(sourceData) ? 'json' : 'yaml';
}
const [format, configObj, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(sourceData);
this.wikiFormat = format;
if (configObj === undefined) {
this.logger.error(`Could not parse wiki page contents as JSON or YAML. Looks like it should be ${this.wikiFormat}?`);
@@ -564,7 +571,7 @@ export class Manager extends EventEmitter {
throw new ConfigParseError('Could not parse wiki page contents as JSON or YAML')
}
await this.parseConfigurationFromObject(configObj);
await this.parseConfigurationFromObject(configObj.toJS(), suppressChangeEvent);
this.logger.info('Checks updated');
if(!suppressNotification) {
@@ -768,132 +775,197 @@ export class Manager extends EventEmitter {
}
}
async buildPolling() {
// give current handle() time to stop
//await sleep(1000);
isPollingShared(streamName: string): boolean {
const pollOption = this.pollOptions.find(x => x.pollOn === streamName);
return pollOption !== undefined && pollOption.limit === DEFAULT_POLLING_LIMIT && pollOption.interval === DEFAULT_POLLING_INTERVAL && this.sharedStreams.includes(streamName as PollOn);
}
const retryHandler = createRetryHandler({maxRequestRetry: 3, maxOtherRetry: 1}, this.logger);
async buildPolling() {
const sources: PollOn[] = ['unmoderated', 'modqueue', 'newComm', 'newSub'];
const subName = this.subreddit.display_name;
for (const pollOpt of this.pollOptions) {
const {
pollOn,
limit,
interval,
delayUntil,
clearProcessed,
} = pollOpt;
let stream: SPoll<Snoowrap.Submission | Snoowrap.Comment>;
let modStreamType: string | undefined;
for (const source of sources) {
switch (pollOn) {
case 'unmoderated':
if (limit === DEFAULT_POLLING_LIMIT && interval === DEFAULT_POLLING_INTERVAL && this.sharedModqueue) {
modStreamType = 'unmoderated';
// use default mod stream from resources
stream = this.cacheManager.modStreams.get('unmoderated') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
} else {
stream = new UnmoderatedStream(this.client, {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
clearProcessed,
});
}
break;
case 'modqueue':
if (limit === DEFAULT_POLLING_LIMIT && interval === DEFAULT_POLLING_INTERVAL) {
modStreamType = 'modqueue';
// use default mod stream from resources
stream = this.cacheManager.modStreams.get('modqueue') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
} else {
stream = new ModQueueStream(this.client, {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
clearProcessed
});
}
break;
case 'newSub':
stream = new SubmissionStream(this.client, {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
clearProcessed
});
break;
case 'newComm':
stream = new CommentStream(this.client, {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
clearProcessed
});
break;
if (!sources.includes(source)) {
this.logger.error(`'${source}' is not a valid polling source. Valid sources: unmoderated | modqueue | newComm | newSub`);
continue;
}
stream.once('listing', async (listing) => {
if (!this.streamListedOnce.includes(pollOn)) {
// warning if poll event could potentially miss activities
if (this.commentChecks.length === 0 && ['unmoderated', 'modqueue', 'newComm'].some(x => x === pollOn)) {
this.logger.warn(`Polling '${pollOn}' may return Comments but no comments checks were configured.`);
}
if (this.submissionChecks.length === 0 && ['unmoderated', 'modqueue', 'newSub'].some(x => x === pollOn)) {
this.logger.warn(`Polling '${pollOn}' may return Submissions but no submission checks were configured.`);
}
this.streamListedOnce.push(pollOn);
const pollOpt = this.pollOptions.find(x => x.pollOn.toLowerCase() === source.toLowerCase());
if (pollOpt === undefined) {
if(this.sharedStreamCallbacks.has(source)) {
this.logger.debug(`Removing listener for shared polling on ${source.toUpperCase()} because it no longer exists in config`);
this.sharedStreamCallbacks.delete(source);
}
});
const onItem = async (item: Comment | Submission) => {
if (!this.streamListedOnce.includes(pollOn)) {
return;
const existingStream = this.streams.get(source);
if (existingStream !== undefined) {
this.logger.debug(`Stopping polling on ${source.toUpperCase()} because it no longer exists in config`);
existingStream.end();
this.streams.delete(source);
}
if (item.subreddit.display_name !== subName || this.eventsState.state !== RUNNING) {
return;
}
let checkType: 'Submission' | 'Comment' | undefined;
if (item instanceof Submission) {
if (this.submissionChecks.length > 0) {
checkType = 'Submission';
}
} else if (this.commentChecks.length > 0) {
checkType = 'Comment';
}
if (checkType !== undefined) {
this.firehose.push({checkType, activity: item, options: {delayUntil}})
}
};
if (modStreamType !== undefined) {
this.modStreamCallbacks.set(pollOn, onItem);
} else {
stream.on('item', onItem);
// @ts-ignore
stream.on('error', async (err: any) => {
this.emit('error', err);
const {
limit,
interval,
delayUntil,
} = pollOpt;
let stream: SPoll<Snoowrap.Submission | Snoowrap.Comment>;
let modStreamType: string | undefined;
if(isRateLimitError(err)) {
this.logger.error('Encountered rate limit while polling! Bot is all out of requests :( Stopping subreddit queue and polling.');
await this.stop();
switch (source) {
case 'unmoderated':
if (limit === DEFAULT_POLLING_LIMIT && interval === DEFAULT_POLLING_INTERVAL && this.sharedStreams.includes(source)) {
modStreamType = 'unmoderated';
// use default mod stream from resources
stream = this.cacheManager.modStreams.get('unmoderated') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
} else {
stream = new UnmoderatedStream(this.client, {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
logger: this.logger,
});
}
break;
case 'modqueue':
if (limit === DEFAULT_POLLING_LIMIT && interval === DEFAULT_POLLING_INTERVAL && this.sharedStreams.includes(source)) {
modStreamType = 'modqueue';
// use default mod stream from resources
stream = this.cacheManager.modStreams.get('modqueue') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
} else {
stream = new ModQueueStream(this.client, {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
logger: this.logger,
});
}
break;
case 'newSub':
if (limit === DEFAULT_POLLING_LIMIT && interval === DEFAULT_POLLING_INTERVAL && this.sharedStreams.includes(source)) {
modStreamType = 'newSub';
// use default mod stream from resources
stream = this.cacheManager.modStreams.get('newSub') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
} else {
stream = new SubmissionStream(this.client, {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
logger: this.logger,
});
}
break;
case 'newComm':
if (limit === DEFAULT_POLLING_LIMIT && interval === DEFAULT_POLLING_INTERVAL && this.sharedStreams.includes(source)) {
modStreamType = 'newComm';
// use default mod stream from resources
stream = this.cacheManager.modStreams.get('newComm') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
} else {
stream = new CommentStream(this.client, {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
logger: this.logger,
});
}
break;
}
if (stream === undefined) {
this.logger.error(`Should have found polling source for '${source}' but it did not exist for some reason!`);
continue;
}
const onItem = async (item: Comment | Submission) => {
if (item.subreddit.display_name !== subName || this.eventsState.state !== RUNNING) {
return;
}
this.logger.error('Polling error occurred', err);
const shouldRetry = await retryHandler(err);
if (shouldRetry) {
stream.startInterval();
let checkType: 'Submission' | 'Comment' | undefined;
if (item instanceof Submission) {
if (this.submissionChecks.length > 0) {
checkType = 'Submission';
}
} else if (this.commentChecks.length > 0) {
checkType = 'Comment';
}
if (checkType !== undefined) {
this.firehose.push({checkType, activity: item, options: {delayUntil}})
}
};
if (modStreamType !== undefined) {
let removedOwn = false;
const existingStream = this.streams.get(source);
if(existingStream !== undefined) {
existingStream.end();
this.streams.delete(source);
removedOwn = true;
}
if(!this.sharedStreamCallbacks.has(source)) {
stream.once('listing', this.noChecksWarning(source));
this.sharedStreamCallbacks.set(source, onItem);
this.logger.debug(`${removedOwn ? 'Stopped own polling and replace with ' : 'Set '}listener on shared polling ${source}`);
}
} else {
let ownPollingMsgParts: string[] = [];
let removedShared = false;
if(this.sharedStreamCallbacks.has(source)) {
removedShared = true;
this.sharedStreamCallbacks.delete(source);
ownPollingMsgParts.push('removed shared polling listener');
}
const existingStream = this.streams.get(source);
let processed;
if (existingStream !== undefined) {
ownPollingMsgParts.push('replaced existing');
processed = existingStream.processed;
existingStream.end();
} else {
this.logger.warn('Stopping subreddit processing/polling due to too many errors');
await this.stop();
ownPollingMsgParts.push('create new');
stream.once('listing', this.noChecksWarning(source));
}
});
this.streams.push(stream);
this.logger.debug(`Polling ${source.toUpperCase()} => ${ownPollingMsgParts.join('and')} dedicated stream`);
stream.on('item', onItem);
// @ts-ignore
stream.on('error', async (err: any) => {
this.emit('error', err);
if (isRateLimitError(err)) {
this.logger.error('Encountered rate limit while polling! Bot is all out of requests :( Stopping subreddit queue and polling.');
await this.stop();
}
this.logger.error('Polling error occurred', err);
const shouldRetry = await this.pollingRetryHandler(err);
if (shouldRetry) {
stream.startInterval(false);
} else {
this.logger.warn('Stopping subreddit processing/polling due to too many errors');
await this.stop();
}
});
this.streams.set(source, stream);
}
}
}
}
noChecksWarning = (source: PollOn) => (listing: any) => {
if (this.commentChecks.length === 0 && ['modqueue', 'newComm'].some(x => x === source)) {
this.logger.warn(`Polling '${source.toUpperCase()}' may return Comments but no comments checks were configured.`);
}
if (this.submissionChecks.length === 0 && ['unmoderated', 'modqueue', 'newSub'].some(x => x === source)) {
this.logger.warn(`Polling '${source.toUpperCase()}' may return Submissions but no submission checks were configured.`);
}
}
startQueue(causedBy: Invokee = 'system', options?: ManagerStateChangeOption) {
const {reason, suppressNotification = false} = options || {};
if(this.queueState.state === RUNNING) {
@@ -1019,7 +1091,10 @@ export class Manager extends EventEmitter {
this.logger.warn('No submission or comment checks found!');
}
for (const s of this.streams) {
if (this.streams.size > 0) {
this.logger.debug(`Starting own streams => ${[...this.streams.values()].map(x => `${x.name.toUpperCase()} ${x.frequency / 1000}s interval`).join(' | ')}`)
}
for (const s of this.streams.values()) {
s.startInterval();
}
this.startedAt = dayjs();
@@ -1044,7 +1119,7 @@ export class Manager extends EventEmitter {
state: PAUSED,
causedBy
};
for(const s of this.streams) {
for(const s of this.streams.values()) {
s.end();
}
if(causedBy === USER) {
@@ -1061,15 +1136,11 @@ export class Manager extends EventEmitter {
stopEvents(causedBy: Invokee = 'system', options?: ManagerStateChangeOption) {
const {reason, suppressNotification = false} = options || {};
if(this.eventsState.state !== STOPPED) {
for (const s of this.streams) {
for (const s of this.streams.values()) {
s.end();
}
this.streams = [];
// for (const [k, v] of this.modStreamCallbacks) {
// const stream = this.cacheManager.modStreams.get(k) as Poll<Snoowrap.Submission | Snoowrap.Comment>;
// stream.removeListener('item', v);
// }
this.modStreamCallbacks = new Map();
this.streams = new Map();
this.sharedStreamCallbacks = new Map();
this.startedAt = undefined;
this.logger.info(`Events STOPPED by ${causedBy}`);
this.eventsState = {

View File

@@ -2,20 +2,22 @@ import {Poll, SnooStormOptions} from "snoostorm"
import Snoowrap from "snoowrap";
import {EventEmitter} from "events";
import {PollConfiguration} from "snoostorm/out/util/Poll";
import {ClearProcessedOptions, DEFAULT_POLLING_INTERVAL} from "../Common/interfaces";
import dayjs, {Dayjs} from "dayjs";
import { Duration } from "dayjs/plugin/duration";
import {parseDuration, random} from "../util";
import {DEFAULT_POLLING_INTERVAL} from "../Common/interfaces";
import {mergeArr, parseDuration, random} from "../util";
import { Logger } from "winston";
type Awaitable<T> = Promise<T> | T;
interface RCBPollingOptions extends SnooStormOptions {
interface RCBPollingOptions<T> extends SnooStormOptions {
subreddit: string,
clearProcessed?: ClearProcessedOptions
enforceContinuity?: boolean
logger: Logger
name?: string,
processed?: Set<T[keyof T]>
label?: string
}
interface RCBPollConfiguration<T> extends PollConfiguration<T> {
clearProcessed?: ClearProcessedOptions
interface RCBPollConfiguration<T> extends PollConfiguration<T>,RCBPollingOptions<T> {
}
export class SPoll<T extends object> extends Poll<T> {
@@ -23,30 +25,43 @@ export class SPoll<T extends object> extends Poll<T> {
getter: () => Awaitable<T[]>;
frequency;
running: boolean = false;
clearProcessedDuration?: Duration;
clearProcessedSize?: number;
clearProcessedAfter?: Dayjs;
retainProcessed: number = 0;
// intention of newStart is to make polling behavior such that only "new" items AFTER polling has started get emitted
// -- that is, we don't want to emit the items we immediately fetch on a fresh poll start since they existed "before" polling started
newStart: boolean = true;
enforceContinuity: boolean;
randInterval?: { clear: () => void };
name: string = 'Reddit Stream';
logger: Logger;
subreddit: string;
constructor(options: RCBPollConfiguration<T>) {
super(options);
this.identifier = options.identifier;
this.getter = options.get;
this.frequency = options.frequency;
const {
after,
size,
retain = 0,
} = options.clearProcessed || {};
if(after !== undefined) {
this.clearProcessedDuration = parseDuration(after);
}
this.clearProcessedSize = size;
this.retainProcessed = retain;
if (this.clearProcessedDuration !== undefined) {
this.clearProcessedAfter = dayjs().add(this.clearProcessedDuration.asSeconds(), 's');
identifier,
get,
frequency,
enforceContinuity = false,
logger,
name,
subreddit,
label = 'Polling',
processed
} = options;
this.subreddit = subreddit;
this.name = name !== undefined ? name : this.name;
this.logger = logger.child({labels: [label, this.name]}, mergeArr)
this.identifier = identifier;
this.getter = get;
this.frequency = frequency;
this.enforceContinuity = enforceContinuity;
// if we pass in processed on init the intention is to "continue" from where the previous stream left off
// WITHOUT new start behavior
if (processed !== undefined) {
this.processed = processed;
this.newStart = false;
}
clearInterval(this.interval);
}
@@ -54,21 +69,47 @@ export class SPoll<T extends object> extends Poll<T> {
this.interval = setTimeout((function (self) {
return async () => {
try {
const batch = await self.getter();
self.logger.debug('Polling...');
let batch = await self.getter();
const newItems: T[] = [];
for (const item of batch) {
const id = item[self.identifier];
if (self.processed.has(id)) continue;
let anyAlreadySeen = false;
let page = 1;
// initial iteration should always run
// but only continue iterating if stream enforces continuity and we've only seen new items so far
while(page === 1 || (self.enforceContinuity && !self.newStart && !anyAlreadySeen)) {
if(page !== 1) {
self.logger.debug(`Did not find any already seen activities and continuity is enforced. This probably means there were more new items than 1 api call can return. Fetching next page (${page})...`);
// @ts-ignore
batch = await batch.fetchMore({amount: 100});
}
for (const item of batch) {
const id = item[self.identifier];
if (self.processed.has(id)) {
anyAlreadySeen = true;
continue;
}
// Emit for new items and add it to the list
newItems.push(item);
self.processed.add(id);
self.emit("item", item);
// Emit for new items and add it to the list
newItems.push(item);
self.processed.add(id);
// but don't emit on new start since we are "buffering" already existing activities
if(!self.newStart) {
self.emit("item", item);
}
}
page++;
}
// Emit the new listing of all new items
self.emit("listing", newItems);
const newItemMsg = `Found ${newItems.length} new items`;
if(self.newStart) {
self.logger.debug(`${newItemMsg} but will ignore all on first start.`);
self.emit("listing", []);
} else {
self.logger.debug(newItemMsg);
// Emit the new listing of all new items
self.emit("listing", newItems);
}
// no longer new start on n+1 interval
self.newStart = false;
// if everything succeeded then create a new timeout
self.createInterval();
} catch (err: any) {
@@ -78,13 +119,18 @@ export class SPoll<T extends object> extends Poll<T> {
})(this), random(this.frequency - 1, this.frequency + 1));
}
startInterval = () => {
// allow controlling newStart state
startInterval = (newStartState?: boolean) => {
this.running = true;
if(newStartState !== undefined) {
this.newStart = newStartState;
}
this.createInterval();
}
end = () => {
this.running = false;
this.newStart = true;
super.end();
}
}
@@ -92,12 +138,13 @@ export class SPoll<T extends object> extends Poll<T> {
export class UnmoderatedStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: RCBPollingOptions) {
options: RCBPollingOptions<Snoowrap.Submission | Snoowrap.Comment>) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getSubreddit(options.subreddit).getUnmoderated(options),
identifier: "id",
clearProcessed: options.clearProcessed
name: 'Unmoderated',
...options,
});
}
}
@@ -105,12 +152,13 @@ export class UnmoderatedStream extends SPoll<Snoowrap.Submission | Snoowrap.Comm
export class ModQueueStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: RCBPollingOptions) {
options: RCBPollingOptions<Snoowrap.Submission | Snoowrap.Comment>) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getSubreddit(options.subreddit).getModqueue(options),
identifier: "id",
clearProcessed: options.clearProcessed
name: 'Modqueue',
...options,
});
}
}
@@ -118,12 +166,13 @@ export class ModQueueStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment
export class SubmissionStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: RCBPollingOptions) {
options: RCBPollingOptions<Snoowrap.Submission | Snoowrap.Comment>) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getNew(options.subreddit, options),
identifier: "id",
clearProcessed: options.clearProcessed
name: 'Submission',
...options,
});
}
}
@@ -131,12 +180,13 @@ export class SubmissionStream extends SPoll<Snoowrap.Submission | Snoowrap.Comme
export class CommentStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: RCBPollingOptions) {
options: RCBPollingOptions<Snoowrap.Submission | Snoowrap.Comment>) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getNewComments(options.subreddit, options),
identifier: "id",
clearProcessed: options.clearProcessed
name: 'Comment',
...options,
});
}
}

View File

@@ -13,12 +13,27 @@ import as from 'async';
import fetch from 'node-fetch';
import {
asSubmission,
buildCacheOptionsFromProvider, buildCachePrefix,
cacheStats, compareDurationValue, comparisonTextOp, createCacheManager, createHistoricalStatsDisplay,
formatNumber, getActivityAuthorName, getActivitySubredditName, isStrongSubredditState,
mergeArr, parseDurationComparison,
parseExternalUrl, parseGenericValueComparison, parseRedditEntity,
parseWikiContext, shouldCacheSubredditStateCriteriaResult, subredditStateIsNameOnly, toStrongSubredditState
buildCacheOptionsFromProvider,
buildCachePrefix,
cacheStats,
compareDurationValue,
comparisonTextOp,
createCacheManager,
createHistoricalStatsDisplay, FAIL,
fetchExternalUrl, filterCriteriaSummary,
formatNumber,
getActivityAuthorName,
getActivitySubredditName,
isStrongSubredditState, isSubmission,
mergeArr,
parseDurationComparison,
parseExternalUrl,
parseGenericValueComparison,
parseRedditEntity,
parseWikiContext, PASS,
shouldCacheSubredditStateCriteriaResult,
subredditStateIsNameOnly,
toStrongSubredditState
} from "../util";
import LoggedError from "../Utils/LoggedError";
import {
@@ -40,12 +55,12 @@ import {
HistoricalStats,
HistoricalStatUpdateData,
SubredditHistoricalStats,
SubredditHistoricalStatsDisplay, ThirdPartyCredentialsJsonConfig,
SubredditHistoricalStatsDisplay, ThirdPartyCredentialsJsonConfig, FilterCriteriaResult,
} from "../Common/interfaces";
import UserNotes from "./UserNotes";
import Mustache from "mustache";
import he from "he";
import {AuthorCriteria} from "../Author/Author";
import {AuthorCriteria, AuthorOptions} from "../Author/Author";
import {SPoll} from "./Streams";
import {Cache} from 'cache-manager';
import {Submission, Comment, Subreddit} from "snoowrap/dist/objects";
@@ -170,7 +185,7 @@ export class SubredditResources {
this.stats.cache.userNotes.requests++;
this.stats.cache.userNotes.miss += miss ? 1 : 0;
}
this.userNotes = new UserNotes(userNotesTTL, this.subreddit, this.logger, this.cache, cacheUseCB)
this.userNotes = new UserNotes(userNotesTTL, this.subreddit, this.client, this.logger, this.cache, cacheUseCB)
if(this.cacheType === 'memory' && this.cacheSettingsHash !== 'default') {
const min = Math.min(...([this.wikiTTL, this.authorTTL, this.submissionTTL, this.commentTTL, this.filterCriteriaTTL].filter(x => typeof x === 'number' && x !== 0) as number[]));
@@ -593,8 +608,7 @@ export class SubredditResources {
}
} else {
try {
const response = await fetch(extUrl as string);
wikiContent = await response.text();
wikiContent = await fetchExternalUrl(extUrl as string, this.logger);
} catch (err: any) {
const msg = `Error occurred while trying to fetch the url ${extUrl}`;
this.logger.error(msg, err);
@@ -711,7 +725,7 @@ export class SubredditResources {
return await this.isSubreddit(await this.getSubreddit(item), state, this.logger);
}
async testAuthorCriteria(item: (Comment | Submission), authorOpts: AuthorCriteria, include = true) {
async testAuthorCriteria(item: (Comment | Submission), authorOpts: AuthorCriteria, include = true): Promise<FilterCriteriaResult<AuthorCriteria>> {
if (this.filterCriteriaTTL !== false) {
// in the criteria check we only actually use the `item` to get the author flair
// which will be the same for the entire subreddit
@@ -724,17 +738,18 @@ export class SubredditResources {
await this.stats.cache.authorCrit.identifierRequestCount.set(hash, (await this.stats.cache.authorCrit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
this.stats.cache.authorCrit.requestTimestamps.push(Date.now());
this.stats.cache.authorCrit.requests++;
let miss = false;
const cachedAuthorTest = await this.cache.wrap(hash, async () => {
miss = true;
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
}, {ttl: this.filterCriteriaTTL});
if (!miss) {
// need to check shape of result to invalidate old result type
let cachedAuthorTest: FilterCriteriaResult<AuthorCriteria> = await this.cache.get(hash) as FilterCriteriaResult<AuthorCriteria>;
if(cachedAuthorTest !== null && cachedAuthorTest !== undefined && typeof cachedAuthorTest === 'object') {
this.logger.debug(`Cache Hit: Author Check on ${userName} (Hash ${hash})`);
return cachedAuthorTest;
} else {
this.stats.cache.authorCrit.miss++;
cachedAuthorTest = await testAuthorCriteria(item, authorOpts, include, this.userNotes);
await this.cache.set(hash, cachedAuthorTest, {ttl: this.filterCriteriaTTL});
return cachedAuthorTest;
}
return cachedAuthorTest;
}
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
@@ -764,7 +779,7 @@ export class SubredditResources {
const cachedItem = await this.cache.get(hash);
if (cachedItem !== undefined && cachedItem !== null) {
this.logger.debug(`Cache Hit: Item Check on ${item.name} (Hash ${hash})`);
return cachedItem as boolean;
//return cachedItem as boolean;
}
const itemResult = await this.isItem(item, states, this.logger);
this.stats.cache.itemCrit.miss++;
@@ -781,8 +796,8 @@ export class SubredditResources {
return await this.isItem(i, activityStates, this.logger);
}
async isSubreddit (subreddit: Subreddit, stateCriteria: SubredditState | StrongSubredditState, logger: Logger) {
delete stateCriteria.stateDescription;
async isSubreddit (subreddit: Subreddit, stateCriteriaRaw: SubredditState | StrongSubredditState, logger: Logger) {
const {stateDescription, ...stateCriteria} = stateCriteriaRaw;
if (Object.keys(stateCriteria).length === 0) {
return true;
@@ -858,7 +873,7 @@ export class SubredditResources {
if (crit[k] !== undefined) {
switch (k) {
case 'submissionState':
if(!(item instanceof Comment)) {
if(isSubmission(item)) {
log.warn('`submissionState` is not allowed in `itemIs` criteria when the main Activity is a Submission');
continue;
}
@@ -977,7 +992,7 @@ export class SubredditResources {
}
break;
case 'op':
if(item instanceof Submission) {
if(isSubmission(item)) {
log.warn(`On a Submission the 'op' property will always be true. Did you mean to use this on a comment instead?`);
break;
}
@@ -989,7 +1004,7 @@ export class SubredditResources {
}
break;
case 'depth':
if(item instanceof Submission) {
if(isSubmission(item)) {
log.warn(`Cannot test for 'depth' on a Submission`);
break;
}
@@ -1001,6 +1016,36 @@ export class SubredditResources {
return false
}
break;
case 'flairTemplate':
case 'link_flair_text':
case 'link_flair_css_class':
if(asSubmission(item)) {
const subCrit = crit as SubmissionState;
let propertyValue: string | null;
if(k === 'flairTemplate') {
propertyValue = await item.link_flair_template_id;
} else {
propertyValue = await item[k];
}
const expectedValues = typeof subCrit[k] === 'string' ? [subCrit[k]] : (subCrit[k] as string[]);
const VALUEPass = () => {
for (const c of expectedValues) {
if (c === propertyValue) {
return true;
}
}
return false;
};
const valueResult = VALUEPass();
if(!valueResult) {
log.debug(`Failed: Expected => ${k} ${expectedValues.join(' OR ')} | Found => ${k}:${propertyValue}`);
return false;
}
break;
} else {
log.warn(`Cannot test for ${k} on Comment`);
break;
}
default:
// @ts-ignore
if (item[k] !== undefined) {
@@ -1277,3 +1322,74 @@ export class BotResourcesManager {
return;
}
}
export const checkAuthorFilter = async (item: (Submission | Comment), filter: AuthorOptions, resources: SubredditResources, logger: Logger): Promise<[boolean, ('inclusive' | 'exclusive' | undefined)]> => {
const authLogger = logger.child({labels: ['Author Filter']}, mergeArr);
const {
include = [],
excludeCondition = 'AND',
exclude = [],
} = filter;
let authorPass = null;
if (include.length > 0) {
let index = 1;
for (const auth of include) {
const critResult = await resources.testAuthorCriteria(item, auth);
const [summary, details] = filterCriteriaSummary(critResult);
if (critResult.passed) {
authLogger.verbose(`${PASS} => Inclusive Author Criteria ${index} => ${summary}`);
authLogger.debug(`Criteria Details: \n${details.join('\n')}`);
return [true, 'inclusive'];
} else {
authLogger.debug(`${FAIL} => Inclusive Author Criteria ${index} => ${summary}`);
authLogger.debug(`Criteria Details: \n${details.join('\n')}`);
}
index++;
}
authLogger.verbose(`${FAIL} => No Inclusive Author Criteria matched`);
return [false, 'inclusive'];
}
if (exclude.length > 0) {
let index = 1;
const summaries: string[] = [];
for (const auth of exclude) {
const critResult = await resources.testAuthorCriteria(item, auth, false);
const [summary, details] = filterCriteriaSummary(critResult);
if (critResult.passed) {
if(excludeCondition === 'OR') {
authLogger.verbose(`${PASS} (OR) => Exclusive Author Criteria ${index} => ${summary}`);
authLogger.debug(`Criteria Details: \n${details.join('\n')}`);
authorPass = true;
break;
}
summaries.push(summary);
authLogger.debug(`${PASS} (AND) => Exclusive Author Criteria ${index} => ${summary}`);
authLogger.debug(`Criteria Details: \n${details.join('\n')}`);
} else if (!critResult.passed) {
if(excludeCondition === 'AND') {
authLogger.verbose(`${FAIL} (AND) => Exclusive Author Criteria ${index} => ${summary}`);
authLogger.debug(`Criteria Details: \n${details.join('\n')}`);
authorPass = false;
break;
}
summaries.push(summary);
authLogger.debug(`${FAIL} (OR) => Exclusive Author Criteria ${index} => ${summary}`);
authLogger.debug(`Criteria Details: \n${details.join('\n')}`);
}
index++;
}
if(excludeCondition === 'AND' && authorPass === null) {
authorPass = true;
}
if (authorPass !== true) {
if(excludeCondition === 'OR') {
authLogger.verbose(`${FAIL} => Exclusive author criteria not matched => ${summaries.length === 1 ? `${summaries[0]}` : '(many, see debug)'}`);
}
return [false, 'exclusive']
} else if(excludeCondition === 'AND') {
authLogger.verbose(`${PASS} => Exclusive author criteria matched => ${summaries.length === 1 ? `${summaries[0]}` : '(many, see debug)'}`);
}
return [true, 'exclusive'];
}
return [true, undefined];
}

View File

@@ -1,5 +1,5 @@
import dayjs, {Dayjs} from "dayjs";
import {Comment, RedditUser, WikiPage} from "snoowrap";
import Snoowrap, {Comment, RedditUser, WikiPage} from "snoowrap";
import {
COMMENT_URL_ID,
deflateUserNotes, getActivityAuthorName,
@@ -57,7 +57,7 @@ export type UserNotesConstants = Pick<any, "users" | "warnings">;
export class UserNotes {
notesTTL: number | false;
subreddit: Subreddit;
wiki: WikiPage;
client: Snoowrap;
moderators?: RedditUser[];
logger: Logger;
identifier: string;
@@ -70,14 +70,14 @@ export class UserNotes {
debounceCB: any;
batchCount: number = 0;
constructor(ttl: number | boolean, subreddit: Subreddit, logger: Logger, cache: Cache, cacheCB: Function) {
constructor(ttl: number | boolean, subreddit: Subreddit, client: Snoowrap, logger: Logger, cache: Cache, cacheCB: Function) {
this.notesTTL = ttl === true ? 0 : ttl;
this.subreddit = subreddit;
this.logger = logger;
this.wiki = subreddit.getWikiPage('usernotes');
this.identifier = `${this.subreddit.display_name}-usernotes`;
this.cache = cache;
this.cacheCB = cacheCB;
this.client = client;
}
async getUserNotes(user: RedditUser): Promise<UserNote[]> {
@@ -172,8 +172,8 @@ export class UserNotes {
// this.saveDebounce = undefined;
// }
// @ts-ignore
this.wiki = await this.subreddit.getWikiPage('usernotes').fetch();
const wikiContent = this.wiki.content_md;
const wiki = this.client.getSubreddit(this.subreddit.display_name).getWikiPage('usernotes');
const wikiContent = await wiki.content_md;
// TODO don't handle for versions lower than 6
const userNotes = JSON.parse(wikiContent);
@@ -197,6 +197,7 @@ export class UserNotes {
const blob = deflateUserNotes(payload.blob);
const wikiPayload = {text: JSON.stringify({...payload, blob}), reason: 'ContextBot edited usernotes'};
try {
const wiki = this.client.getSubreddit(this.subreddit.display_name).getWikiPage('usernotes');
if (this.notesTTL !== false) {
// DISABLED for now because if it fails throws an uncaught rejection
// and need to figured out how to handle this other than just logging (want to interrupt action flow too?)
@@ -226,12 +227,12 @@ export class UserNotes {
// this.logger.debug(`Saving Usernotes has been debounced for 5 seconds (${this.batchCount} batched)`)
// @ts-ignore
await this.subreddit.getWikiPage('usernotes').edit(wikiPayload);
await wiki.edit(wikiPayload);
await this.cache.set(this.identifier, payload, {ttl: this.notesTTL});
this.users = new Map();
} else {
// @ts-ignore
this.wiki = await this.subreddit.getWikiPage('usernotes').edit(wikiPayload);
await wiki.edit(wikiPayload);
}
return payload as RawUserNotesPayload;

View File

@@ -56,7 +56,7 @@ export const port = new commander.Option('-p, --port <port>', 'Port for web serv
export const sharedMod = new commander.Option('-q, --shareMod', `If enabled then all subreddits using the default settings to poll "unmoderated" or "modqueue" will retrieve results from a shared request to /r/mod (default: process.env.SHARE_MOD || false)`)
.argParser(parseBool);
export const operatorConfig = new commander.Option('-c, --operatorConfig <path>', 'An absolute path to a JSON file to load all parameters from (default: process.env.OPERATOR_CONFIG)');
export const operatorConfig = new commander.Option('-c, --operatorConfig <path>', 'An absolute path to a YAML/JSON file to load all parameters from (default: process.env.OPERATOR_CONFIG | CWD/config.yaml)');
export const getUniversalWebOptions = (): commander.Option[] => {
return [

View File

@@ -8,22 +8,23 @@ import he from "he";
import {RuleResult, UserNoteCriteria} from "../Rule";
import {
ActivityWindowType, CommentState, DomainInfo,
DurationVal,
DurationVal, FilterCriteriaPropertyResult, FilterCriteriaResult,
SubmissionState,
TypedActivityStates
} from "../Common/interfaces";
import {
asUserNoteCriteria,
compareDurationValue,
comparisonTextOp, escapeRegex, getActivityAuthorName,
isActivityWindowCriteria,
comparisonTextOp, escapeRegex, formatNumber, getActivityAuthorName,
isActivityWindowCriteria, isUserNoteCriteria,
normalizeName,
parseDuration,
parseDurationComparison,
parseGenericValueComparison,
parseGenericValueOrPercentComparison,
parseRuleResultsToMarkdownSummary, parseStringToRegex,
parseSubredditName,
truncateStringToLength, windowToActivityWindowCriteria
parseSubredditName, removeUndefinedKeys,
truncateStringToLength, userNoteCriteriaSummary, windowToActivityWindowCriteria
} from "../util";
import UserNotes from "../Subreddit/UserNotes";
import {Logger} from "winston";
@@ -32,6 +33,7 @@ import SimpleError from "./SimpleError";
import {AuthorCriteria} from "../Author/Author";
import {URL} from "url";
import {isStatusError} from "./Errors";
import {Dictionary, ElementOf, SafeDictionary} from "ts-essentials";
export const BOT_LINK = 'https://www.reddit.com/r/ContextModBot/comments/otz396/introduction_to_contextmodbot';
@@ -354,24 +356,54 @@ export const renderContent = async (template: string, data: (Submission | Commen
return he.decode(rendered);
}
export const testAuthorCriteria = async (item: (Comment | Submission), authorOpts: AuthorCriteria, include = true, userNotes: UserNotes) => {
const {shadowBanned, ...rest} = authorOpts;
type AuthorCritPropHelper = SafeDictionary<FilterCriteriaPropertyResult<AuthorCriteria>, keyof AuthorCriteria>;
type RequiredAuthorCrit = Required<AuthorCriteria>;
if(shadowBanned !== undefined) {
export const testAuthorCriteria = async (item: (Comment | Submission), authorOpts: AuthorCriteria, include = true, userNotes: UserNotes): Promise<FilterCriteriaResult<AuthorCriteria>> => {
const definedAuthorOpts = (removeUndefinedKeys(authorOpts) as RequiredAuthorCrit);
const propResultsMap = Object.entries(definedAuthorOpts).reduce((acc: AuthorCritPropHelper, [k, v]) => {
const key = (k as keyof AuthorCriteria);
let ex;
if (Array.isArray(v)) {
ex = v.map(x => {
if (asUserNoteCriteria(x)) {
return userNoteCriteriaSummary(x);
}
return x;
});
} else {
ex = [v];
}
acc[key] = {
property: key,
expected: ex,
behavior: include ? 'include' : 'exclude',
};
return acc;
}, {});
const {shadowBanned} = authorOpts;
if (shadowBanned !== undefined) {
try {
// @ts-ignore
await item.author.fetch();
// user is not shadowbanned
// if criteria specifies they SHOULD be shadowbanned then return false now
if(shadowBanned) {
return false;
if (shadowBanned) {
propResultsMap.shadowBanned!.found = false;
propResultsMap.shadowBanned!.passed = false;
}
} catch (err: any) {
if(isStatusError(err) && err.statusCode === 404) {
if (isStatusError(err) && err.statusCode === 404) {
// user is shadowbanned
// if criteria specifies they should not be shadowbanned then return false now
if(!shadowBanned) {
return false;
if (!shadowBanned) {
propResultsMap.shadowBanned!.found = true;
propResultsMap.shadowBanned!.passed = false;
}
} else {
throw err;
@@ -379,17 +411,30 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
}
}
try {
const authorName = getActivityAuthorName(item.author);
for (const k of Object.keys(rest)) {
// @ts-ignore
if (authorOpts[k] !== undefined) {
if (propResultsMap.shadowBanned === undefined || propResultsMap.shadowBanned.passed === undefined) {
try {
const authorName = getActivityAuthorName(item.author);
const keys = Object.keys(propResultsMap) as (keyof AuthorCriteria)[]
let shouldContinue = true;
for (const k of keys) {
if (k === 'shadowBanned') {
// we have already taken care of this with shadowban check above
continue;
}
const authorOptVal = definedAuthorOpts[k];
//if (authorOpts[k] !== undefined) {
switch (k) {
case 'name':
const nameVal = authorOptVal as RequiredAuthorCrit['name'];
const authPass = () => {
// @ts-ignore
for (const n of authorOpts[k]) {
for (const n of nameVal) {
if (n.toLowerCase() === authorName.toLowerCase()) {
return true;
}
@@ -397,8 +442,10 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
return false;
}
const authResult = authPass();
if ((include && !authResult) || (!include && authResult)) {
return false;
propResultsMap.name!.found = authorName;
propResultsMap.name!.passed = !((include && !authResult) || (!include && authResult));
if (!propResultsMap.name!.passed) {
shouldContinue = false;
}
break;
case 'flairCssClass':
@@ -413,8 +460,10 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
return false;
}
const cssResult = cssPass();
if ((include && !cssResult) || (!include && cssResult)) {
return false;
propResultsMap.flairCssClass!.found = css;
propResultsMap.flairCssClass!.passed = !((include && !cssResult) || (!include && cssResult));
if (!propResultsMap.flairCssClass!.passed) {
shouldContinue = false;
}
break;
case 'flairText':
@@ -429,68 +478,103 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
return false;
};
const textResult = textPass();
if ((include && !textResult) || (!include && textResult)) {
propResultsMap.flairText!.found = text;
propResultsMap.flairText!.passed = !((include && !textResult) || (!include && textResult));
if (!propResultsMap.flairText!.passed) {
shouldContinue = false;
}
break;
case 'flairTemplate':
const templateId = await item.author_flair_template_id;
const templatePass = () => {
// @ts-ignore
for (const c of authorOpts[k]) {
if (c === templateId) {
return true;
}
}
return false;
};
const templateResult = templatePass();
propResultsMap.flairTemplate!.found = templateId;
propResultsMap.flairTemplate!.passed = !((include && !templateResult) || (!include && templateResult));
if (!propResultsMap.flairTemplate!.passed) {
shouldContinue = false;
}
break;
case 'isMod':
const mods: RedditUser[] = await item.subreddit.getModerators();
const isModerator = mods.some(x => x.name === authorName);
const modMatch = authorOpts.isMod === isModerator;
if ((include && !modMatch) || (!include && modMatch)) {
return false;
propResultsMap.isMod!.found = isModerator;
propResultsMap.isMod!.passed = !((include && !modMatch) || (!include && modMatch));
if (!propResultsMap.isMod!.passed) {
shouldContinue = false;
}
break;
case 'age':
const ageTest = compareDurationValue(parseDurationComparison(await authorOpts.age as string), dayjs.unix(await item.author.created));
if ((include && !ageTest) || (!include && ageTest)) {
return false;
const authorAge = dayjs.unix(await item.author.created);
const ageTest = compareDurationValue(parseDurationComparison(await authorOpts.age as string), authorAge);
propResultsMap.age!.found = authorAge.fromNow(true);
propResultsMap.age!.passed = !((include && !ageTest) || (!include && ageTest));
if (!propResultsMap.age!.passed) {
shouldContinue = false;
}
break;
case 'linkKarma':
// @ts-ignore
const tk = await item.author.total_karma as number;
const lkCompare = parseGenericValueOrPercentComparison(await authorOpts.linkKarma as string);
let lkMatch;
if (lkCompare.isPercent) {
// @ts-ignore
const tk = await item.author.total_karma as number;
lkMatch = comparisonTextOp(item.author.link_karma / tk, lkCompare.operator, lkCompare.value / 100);
} else {
lkMatch = comparisonTextOp(item.author.link_karma, lkCompare.operator, lkCompare.value);
}
if ((include && !lkMatch) || (!include && lkMatch)) {
return false;
propResultsMap.linkKarma!.found = tk;
propResultsMap.linkKarma!.passed = !((include && !lkMatch) || (!include && lkMatch));
if (!propResultsMap.linkKarma!.passed) {
shouldContinue = false;
}
break;
case 'commentKarma':
// @ts-ignore
const ck = await item.author.total_karma as number;
const ckCompare = parseGenericValueOrPercentComparison(await authorOpts.commentKarma as string);
let ckMatch;
if (ckCompare.isPercent) {
// @ts-ignore
const ck = await item.author.total_karma as number;
ckMatch = comparisonTextOp(item.author.comment_karma / ck, ckCompare.operator, ckCompare.value / 100);
} else {
ckMatch = comparisonTextOp(item.author.comment_karma, ckCompare.operator, ckCompare.value);
}
if ((include && !ckMatch) || (!include && ckMatch)) {
return false;
propResultsMap.commentKarma!.found = ck;
propResultsMap.commentKarma!.passed = !((include && !ckMatch) || (!include && ckMatch));
if (!propResultsMap.commentKarma!.passed) {
shouldContinue = false;
}
break;
case 'totalKarma':
// @ts-ignore
const totalKarma = await item.author.total_karma as number;
const tkCompare = parseGenericValueComparison(await authorOpts.totalKarma as string);
if (tkCompare.isPercent) {
throw new SimpleError(`'totalKarma' value on AuthorCriteria cannot be a percentage`);
}
// @ts-ignore
const totalKarma = await item.author.total_karma as number;
const tkMatch = comparisonTextOp(totalKarma, tkCompare.operator, tkCompare.value);
if ((include && !tkMatch) || (!include && tkMatch)) {
return false;
propResultsMap.totalKarma!.found = totalKarma;
propResultsMap.totalKarma!.passed = !((include && !tkMatch) || (!include && tkMatch));
if (!propResultsMap.totalKarma!.passed) {
shouldContinue = false;
}
break;
case 'verified':
const vMatch = await item.author.has_verified_mail === authorOpts.verified as boolean;
if ((include && !vMatch) || (!include && vMatch)) {
return false;
const verified = await item.author.has_verified_mail;
const vMatch = verified === authorOpts.verified as boolean;
propResultsMap.verified!.found = verified;
propResultsMap.verified!.passed = !((include && !vMatch) || (!include && vMatch));
if (!propResultsMap.verified!.passed) {
shouldContinue = false;
}
break;
case 'description':
@@ -498,25 +582,32 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
const desc = await item.author.subreddit?.display_name.public_description;
const dVals = authorOpts[k] as string[];
let passed = false;
for(const val of dVals) {
let passReg;
for (const val of dVals) {
let reg = parseStringToRegex(val, 'i');
if(reg === undefined) {
if (reg === undefined) {
reg = parseStringToRegex(`/.*${escapeRegex(val.trim())}.*/`, 'i');
if(reg === undefined) {
if (reg === undefined) {
throw new SimpleError(`Could not convert 'description' value to a valid regex: ${authorOpts[k] as string}`);
}
}
if(reg.test(desc)) {
if (reg.test(desc)) {
passed = true;
passReg = reg.toString();
break;
}
}
if(!passed) {
return false;
propResultsMap.description!.found = typeof desc === 'string' ? truncateStringToLength(50)(desc) : desc;
propResultsMap.description!.passed = !((include && !passed) || (!include && passed));
if (!propResultsMap.description!.passed) {
shouldContinue = false;
} else {
propResultsMap.description!.reason = `Matched with: ${passReg as string}`;
}
break;
case 'userNotes':
const notes = await userNotes.getUserNotes(item.author);
let foundNoteResult: string[] = [];
const notePass = () => {
for (const noteCriteria of authorOpts[k] as UserNoteCriteria[]) {
const {count = '>= 1', search = 'current', type} = noteCriteria;
@@ -529,8 +620,14 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
const order = extra.includes('asc') ? 'ascending' : 'descending';
switch (search) {
case 'current':
if (notes.length > 0 && notes[notes.length - 1].noteType === type) {
return true;
if (notes.length > 0) {
const currentNoteType = notes[notes.length - 1].noteType;
foundNoteResult.push(`Current => ${currentNoteType}`);
if (currentNoteType === type) {
return true;
}
} else {
foundNoteResult.push('No notes present');
}
break;
case 'consecutive':
@@ -549,39 +646,64 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
if (isPercent) {
throw new SimpleError(`When comparing UserNotes with 'consecutive' search 'count' cannot be a percentage. Given: ${count}`);
}
foundNoteResult.push(`Found ${currCount} ${type} consecutively`);
if (comparisonTextOp(currCount, operator, value)) {
return true;
}
}
break;
case 'total':
const filteredNotes = notes.filter(x => x.noteType === type);
if (isPercent) {
if (comparisonTextOp(notes.filter(x => x.noteType === type).length / notes.length, operator, value / 100)) {
// avoid divide by zero
const percent = notes.length === 0 ? 0 : filteredNotes.length / notes.length;
foundNoteResult.push(`${formatNumber(percent)}% are ${type}`);
if (comparisonTextOp(percent, operator, value / 100)) {
return true;
}
} else {
foundNoteResult.push(`${filteredNotes.length} are ${type}`);
if (comparisonTextOp(notes.filter(x => x.noteType === type).length, operator, value)) {
return true;
}
} else if (comparisonTextOp(notes.filter(x => x.noteType === type).length, operator, value)) {
return true;
}
break;
}
}
return false;
}
const noteResult = notePass();
if ((include && !noteResult) || (!include && noteResult)) {
return false;
propResultsMap.userNotes!.found = foundNoteResult.join(' | ');
propResultsMap.userNotes!.passed = !((include && !noteResult) || (!include && noteResult));
if (!propResultsMap.userNotes!.passed) {
shouldContinue = false;
}
break;
}
//}
if (!shouldContinue) {
break;
}
}
} catch (err: any) {
if (isStatusError(err) && err.statusCode === 404) {
throw new SimpleError('Reddit returned a 404 while trying to retrieve User profile. It is likely this user is shadowbanned.');
} else {
throw err;
}
}
return true;
} catch (err: any) {
if(isStatusError(err) && err.statusCode === 404) {
throw new SimpleError('Reddit returned a 404 while trying to retrieve User profile. It is likely this user is shadowbanned.');
} else {
throw err;
}
}
// gather values and determine overall passed
const propResults = Object.values(propResultsMap);
const passed = propResults.filter(x => typeof x.passed === 'boolean').every(x => x.passed === true);
return {
behavior: include ? 'include' : 'exclude',
criteria: authorOpts,
propertyResults: propResults,
passed,
};
}
export interface ItemContent {

View File

@@ -9,10 +9,10 @@ import {Strategy as CustomStrategy} from 'passport-custom';
import {OperatorConfig, BotConnection, LogInfo} from "../../Common/interfaces";
import {
buildCachePrefix,
createCacheManager, filterLogBySubreddit,
createCacheManager, defaultFormat, filterLogBySubreddit,
formatLogLineToHtml,
intersect, isLogLineMinLevel,
LogEntry, parseFromJsonOrYamlToObject, parseInstanceLogInfoName, parseInstanceLogName,
LogEntry, parseInstanceLogInfoName, parseInstanceLogName, parseRedditEntity,
parseSubredditLogName, permissions,
randomId, sleep, triggeredIndicator
} from "../../util";
@@ -46,6 +46,9 @@ import {MESSAGE} from "triple-beam";
import Autolinker from "autolinker";
import path from "path";
import {ExtendedSnoowrap} from "../../Utils/SnoowrapClients";
import ClientUser from "../Common/User/ClientUser";
import {BotStatusResponse} from "../Common/interfaces";
import {TransformableInfo} from "logform";
const emitter = new EventEmitter();
@@ -89,19 +92,6 @@ declare module 'express-session' {
}
}
// declare global {
// namespace Express {
// interface User {
// name: string
// subreddits: string[]
// machine?: boolean
// isOperator?: boolean
// realManagers?: string[]
// moderatedManagers?: string[]
// }
// }
// }
interface ConnectedUserInfo {
level?: string,
user?: string,
@@ -202,8 +192,9 @@ const webClient = async (options: OperatorConfig) => {
done(null, { subreddits: subreddits.map((x: Subreddit) => x.display_name), isOperator: webOps.includes(user.toLowerCase()), name: user, scope, token, tokenExpiresAt: dayjs().unix() + (60 * 60) });
});
passport.deserializeUser(async function (obj, done) {
done(null, obj as Express.User);
passport.deserializeUser(async function (obj: any, done) {
const user = new ClientUser(obj.name, obj.subreddits, {token: obj.token, scope: obj.scope, webOperator: obj.isOperator, tokenExpiresAt: obj.tokenExpiresAt});
done(null, user);
// const data = await webCache.get(`userSession-${obj}`) as object;
// if (data === undefined) {
// done('Not Found');
@@ -236,7 +227,10 @@ const webClient = async (options: OperatorConfig) => {
code: code as string,
});
const user = await client.getMe().name as string;
const subs = await client.getModeratedSubreddits();
let subs = await client.getModeratedSubreddits({count: 100});
while(!subs.isFinished) {
subs = await subs.fetchMore({amount: 100});
}
io.to(req.session.id).emit('authStatus', {canSaveWiki: req.session.scope?.includes('wikiedit')});
return done(null, {user, subreddits: subs, scope: req.session.scope, token: client.accessToken});
}
@@ -326,32 +320,36 @@ const webClient = async (options: OperatorConfig) => {
});
// @ts-ignore
const user = await client.getMe();
const userName = `u/${user.name}`;
// @ts-ignore
await webCache.del(`invite:${req.session.inviteId}`);
let data: any = {
accessToken: client.accessToken,
refreshToken: client.refreshToken,
userName,
};
if(invite.instance !== undefined) {
const bot = cmInstances.find(x => x.friendly === invite.instance);
if(bot !== undefined) {
const botPayload: any = {
overwrite: invite.overwrite === true,
name: userName,
credentials: {
accessToken: client.accessToken,
refreshToken: client.refreshToken,
clientId: invite.clientId,
clientSecret: invite.clientSecret,
reddit: {
accessToken: client.accessToken,
refreshToken: client.refreshToken,
clientId: invite.clientId,
clientSecret: invite.clientSecret,
}
}
};
if(invite.subreddit !== undefined) {
botPayload.subreddits = {names: [invite.subreddit]};
if(invite.subreddits !== undefined && invite.subreddits.length > 0) {
botPayload.subreddits = {names: invite.subreddits};
}
const botAddResult: any = await addBot(bot, {name: invite.creator}, botPayload);
let msg = botAddResult.success ? 'Bot successfully added to running instance' : 'An error occurred while adding the bot to the instance';
if(botAddResult.success) {
msg = `${msg}. ${botAddResult.stored === false ? 'Additionally, the bot was not stored in config so the operator will need to add it manually to persist after a restart.' : ''}`;
}
data.addResult = msg;
// stored
// success
data = {...data, ...botAddResult};
// @ts-ignore
req.session.destroy();
req.logout();
@@ -396,12 +394,13 @@ const webClient = async (options: OperatorConfig) => {
let token = randomId();
interface InviteData {
permissions: string[],
subreddit?: string,
subreddits?: string,
instance?: string,
clientId: string
clientSecret: string
redirectUri: string
creator: string
overwrite?: boolean
}
const helperAuthed = async (req: express.Request, res: express.Response, next: Function) => {
@@ -417,7 +416,7 @@ const webClient = async (options: OperatorConfig) => {
'<div>or as an argument: <span class="font-mono">--operator YourRedditUsername</span></div>'});
}
// or if there is an operator and current user is operator
if(req.user.isOperator) {
if(req.user?.clientData?.webOperator) {
return next();
} else {
return res.render('error', {error: 'You must be an <b>Operator</b> to access this route.'});
@@ -429,7 +428,8 @@ const webClient = async (options: OperatorConfig) => {
redirectUri,
clientId,
clientSecret,
token: req.isAuthenticated() && req.user.isOperator ? token : undefined
token: req.isAuthenticated() && req.user?.clientData?.webOperator ? token : undefined,
instances: cmInstances.filter(x => req.user?.isInstanceOperator(x)).map(x => x.friendly),
});
});
@@ -457,7 +457,7 @@ const webClient = async (options: OperatorConfig) => {
clientSecret: ce,
redirect: redir,
instance,
subreddit,
subreddits,
code,
} = req.body as any;
@@ -482,7 +482,7 @@ const webClient = async (options: OperatorConfig) => {
clientSecret: (ce || clientSecret).trim(),
redirectUri: redir.trim(),
instance,
subreddit,
subreddits: subreddits.trim() === '' ? [] : subreddits.split(',').map((x: string) => parseRedditEntity(x).name),
creator: (req.user as Express.User).name,
}, {ttl: invitesMaxAge * 1000});
return res.send(inviteId);
@@ -527,6 +527,8 @@ const webClient = async (options: OperatorConfig) => {
const cmInstances: CMInstance[] = [];
let init = false;
const formatter = defaultFormat();
const formatTransform = formatter.transform as (info: TransformableInfo, opts?: any) => TransformableInfo;
let server: http.Server,
io: SocketServer;
@@ -567,7 +569,9 @@ const webClient = async (options: OperatorConfig) => {
limit: sessionData.limit,
sort: sessionData.sort,
level: sessionData.level,
stream: true
stream: true,
streamObjects: true,
formatted: false,
}
});
@@ -591,8 +595,24 @@ const webClient = async (options: OperatorConfig) => {
}
});
delim.on('data', (c: any) => {
io.to(sessionId).emit('log', formatLogLineToHtml(c.toString()));
const logObj = JSON.parse(c) as LogInfo;
let subredditMessage;
let allMessage;
if(logObj.subreddit !== undefined) {
const {subreddit, bot, ...rest} = logObj
// @ts-ignore
subredditMessage = formatLogLineToHtml(formatter.transform(rest)[MESSAGE], rest.timestamp);
}
if(logObj.bot !== undefined) {
const {bot, ...rest} = logObj
// @ts-ignore
allMessage = formatLogLineToHtml(formatter.transform(rest)[MESSAGE], rest.timestamp);
}
// @ts-ignore
let formattedMessage = formatLogLineToHtml(formatter.transform(logObj)[MESSAGE], logObj.timestamp);
io.to(sessionId).emit('log', {...logObj, subredditMessage, allMessage, formattedMessage});
});
gotStream.once('retry', retryFn);
@@ -626,20 +646,16 @@ const webClient = async (options: OperatorConfig) => {
return res.status(404).render('error', {error: msg});
}
const user = req.user as Express.User;
const isOperator = instance.operators.includes(user.name);
const canAccessBot = isOperator || intersect(user.subreddits, instance.subreddits).length > 0;
if (!user.isOperator && !canAccessBot) {
if (!req.user?.clientData?.webOperator && !req.user?.canAccessInstance(instance)) {
return res.status(404).render('error', {error: msg});
}
if (req.params.subreddit !== undefined && !isOperator && !user.subreddits.includes(req.params.subreddit)) {
if (req.params.subreddit !== undefined && !req.user?.isInstanceOperator(instance) && !req.user?.subreddits.includes(req.params.subreddit)) {
return res.status(404).render('error', {error: msg});
}
req.instance = instance;
req.session.botId = instance.friendly;
if(canAccessBot) {
if(req.user?.canAccessInstance(instance)) {
req.session.authBotId = instance.friendly;
}
return next();
@@ -664,15 +680,11 @@ const webClient = async (options: OperatorConfig) => {
return res.status(404).render('error', {error: msg});
}
const user = req.user as Express.User;
const isOperator = instance.operators.includes(user.name);
const canAccessBot = isOperator || intersect(user.subreddits, botInstance.subreddits.map(x => x.replace(/\\*r\/*/,''))).length > 0;
if (!user.isOperator && !canAccessBot) {
if (!req.user?.clientData?.webOperator && !req.user?.canAccessBot(botInstance)) {
return res.status(404).render('error', {error: msg});
}
if (req.params.subreddit !== undefined && !isOperator && !user.subreddits.includes(req.params.subreddit)) {
if (req.params.subreddit !== undefined && !req.user?.isInstanceOperator(instance) && !req.user?.subreddits.includes(req.params.subreddit)) {
return res.status(404).render('error', {error: msg});
}
req.bot = botInstance;
@@ -772,12 +784,12 @@ const webClient = async (options: OperatorConfig) => {
const level = req.session.level;
const shownInstances = cmInstances.reduce((acc: CMInstance[], curr) => {
const isBotOperator = curr.operators.map(x => x.toLowerCase()).includes(user.name.toLowerCase());
if(user.isOperator) {
const isBotOperator = req.user?.isInstanceOperator(curr);
if(user?.clientData?.webOperator) {
// @ts-ignore
return acc.concat({...curr, canAccessLocation: true, isOperator: isBotOperator});
}
if(!isBotOperator && intersect(user.subreddits, curr.subreddits).length === 0) {
if(!isBotOperator && !req.user?.canAccessInstance(curr)) {
return acc;
}
// @ts-ignore
@@ -804,7 +816,7 @@ const webClient = async (options: OperatorConfig) => {
return res.render('offline', {
instances: shownInstances,
instanceId: (req.instance as CMInstance).friendly,
isOperator: instance.operators.includes((req.user as Express.User).name),
isOperator: req.user?.isInstanceOperator(instance),
// @ts-ignore
logs: filterLogBySubreddit(instanceLogMap, [instance.friendly], {limit, sort, level, allLogName: 'web', allLogsParser: parseInstanceLogInfoName }).get(instance.friendly),
logSettings: {
@@ -832,12 +844,39 @@ const webClient = async (options: OperatorConfig) => {
// return acc.concat({...curr, isOperator: instanceOperator});
// },[]);
const isOp = req.user?.isInstanceOperator(instance);
res.render('status', {
instances: shownInstances,
bots: resp.bots,
bots: resp.bots.map((x: BotStatusResponse) => {
const {subreddits = []} = x;
const subredditsWithSimpleLogs = subreddits.map(y => {
let transformedLogs: string[];
if(y.name === 'All') {
// only need to remove bot name here
transformedLogs = (y.logs as LogInfo[]).map((z: LogInfo) => {
const {bot, ...rest} = z;
// @ts-ignore
return formatLogLineToHtml(formatter.transform(rest)[MESSAGE] as string, rest.timestamp);
});
} else {
transformedLogs = (y.logs as LogInfo[]).map((z: LogInfo) => {
const {bot, subreddit, ...rest} = z;
// @ts-ignore
return formatLogLineToHtml(formatter.transform(rest)[MESSAGE] as string, rest.timestamp);
});
}
y.logs = transformedLogs;
return y;
});
return {...x, subreddits: subredditsWithSimpleLogs};
}),
botId: (req.instance as CMInstance).friendly,
instanceId: (req.instance as CMInstance).friendly,
isOperator: instance.operators.includes((req.user as Express.User).name),
isOperator: isOp,
system: isOp ? {
logs: resp.system.logs,
} : undefined,
operators: instance.operators.join(', '),
operatorDisplay: instance.operatorDisplay,
logSettings: {
@@ -859,7 +898,7 @@ const webClient = async (options: OperatorConfig) => {
res.render('config', {
title: `Configuration Editor`,
format,
canSave: req.user?.scope?.includes('wikiedit') && req.user?.tokenExpiresAt !== undefined && dayjs.unix(req.user?.tokenExpiresAt).isAfter(dayjs())
canSave: req.user?.clientData?.scope?.includes('wikiedit') && req.user?.clientData?.tokenExpiresAt !== undefined && dayjs.unix(req.user?.clientData.tokenExpiresAt).isAfter(dayjs())
});
});
@@ -871,7 +910,7 @@ const webClient = async (options: OperatorConfig) => {
userAgent,
clientId,
clientSecret,
accessToken: req.user?.token
accessToken: req.user?.clientData?.token
});
try {
@@ -1024,7 +1063,7 @@ const webClient = async (options: OperatorConfig) => {
// setup general web log event
const webLogListener = (log: string) => {
const subName = parseSubredditLogName(log);
if((subName === undefined || user.isOperator) && isLogLineMinLevel(log, session.level as string)) {
if((subName === undefined || user.clientData?.webOperator === true) && isLogLineMinLevel(log, session.level as string)) {
io.to(session.id).emit('webLog', formatLogLineToHtml(log));
}
}
@@ -1093,8 +1132,9 @@ const webClient = async (options: OperatorConfig) => {
try {
const token = createToken(bot, userPayload);
const resp = await got.post(`${bot.normalUrl}/bot`, {
body: botPayload,
body: JSON.stringify(botPayload),
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${token}`,
}
}).json() as object;
@@ -1120,7 +1160,7 @@ const webClient = async (options: OperatorConfig) => {
if(lastCheck > 15) {
shouldCheck = true;
}
} else if(lastCheck > 300) {
} else if(lastCheck > 60) {
shouldCheck = true;
}
}
@@ -1155,7 +1195,9 @@ const webClient = async (options: OperatorConfig) => {
}
}).json() as CMInstance;
botStat = {...botStat, ...resp, online: true};
const {bots, ...restResp} = resp;
botStat = {...botStat, ...restResp, bots: bots.map(x => ({...x, instance: botStat})), online: true};
const sameNameIndex = cmInstances.findIndex(x => x.friendly === botStat.friendly);
if(sameNameIndex > -1 && sameNameIndex !== existingClientIndex) {
logger.warn(`Client returned a friendly name that is not unique (${botStat.friendly}), will fallback to host as friendly (${botStat.normalUrl})`);

View File

@@ -0,0 +1,23 @@
import {IUser} from "../interfaces";
export interface ClientUserData {
token?: string
tokenExpiresAt?: number
scope?: string[]
webOperator?: boolean
}
abstract class CMUser<Instance, Bot, SubredditEntity> implements IUser {
constructor(public name: string, public subreddits: string[], public clientData: ClientUserData = {}) {
}
public abstract isInstanceOperator(val: Instance): boolean;
public abstract canAccessInstance(val: Instance): boolean;
public abstract canAccessBot(val: Bot): boolean;
public abstract accessibleBots(bots: Bot[]): Bot[]
public abstract canAccessSubreddit(val: Bot, name: string): boolean;
public abstract accessibleSubreddits(bot: Bot): SubredditEntity[]
}
export default CMUser;

View File

@@ -0,0 +1,41 @@
import {BotInstance, CMInstance} from "../../interfaces";
import CMUser from "./CMUser";
import {intersect, parseRedditEntity} from "../../../util";
class ClientUser extends CMUser<CMInstance, BotInstance, string> {
isInstanceOperator(val: CMInstance): boolean {
return val.operators.map(x=> x.toLowerCase()).includes(this.name.toLowerCase());
}
canAccessInstance(val: CMInstance): boolean {
return this.isInstanceOperator(val) || intersect(this.subreddits, val.subreddits.map(x => parseRedditEntity(x).name)).length > 0;
}
canAccessBot(val: BotInstance): boolean {
return this.isInstanceOperator(val.instance) || intersect(this.subreddits, val.subreddits.map(x => parseRedditEntity(x).name)).length > 0;
}
canAccessSubreddit(val: BotInstance, name: string): boolean {
return this.isInstanceOperator(val.instance) || this.subreddits.map(x => x.toLowerCase()).includes(parseRedditEntity(name).name.toLowerCase());
}
accessibleBots(bots: BotInstance[]): BotInstance[] {
if (bots.length === 0) {
return bots;
}
return bots.filter(x => {
if (this.isInstanceOperator(x.instance)) {
return true;
}
return intersect(this.subreddits, x.subreddits.map(y => parseRedditEntity(y).name)).length > 0
});
}
accessibleSubreddits(bot: BotInstance): string[] {
return this.isInstanceOperator(bot.instance) ? bot.subreddits.map(x => parseRedditEntity(x).name) : intersect(this.subreddits, bot.subreddits.map(x => parseRedditEntity(x).name));
}
}
export default ClientUser;

View File

@@ -0,0 +1,39 @@
import {BotInstance, CMInstance} from "../../interfaces";
import CMUser from "./CMUser";
import {intersect, parseRedditEntity} from "../../../util";
import {App} from "../../../App";
import Bot from "../../../Bot";
import {Manager} from "../../../Subreddit/Manager";
class ServerUser extends CMUser<App, Bot, Manager> {
constructor(public name: string, public subreddits: string[], public machine: boolean, public isOperator: boolean) {
super(name, subreddits);
}
isInstanceOperator(): boolean {
return this.isOperator;
}
canAccessInstance(val: App): boolean {
return this.isOperator || val.bots.filter(x => intersect(this.subreddits, x.subManagers.map(y => y.subreddit.display_name))).length > 0;
}
canAccessBot(val: Bot): boolean {
return this.isOperator || intersect(this.subreddits, val.subManagers.map(y => y.subreddit.display_name)).length > 0;
}
accessibleBots(bots: Bot[]): Bot[] {
return this.isOperator ? bots : bots.filter(x => intersect(this.subreddits, x.subManagers.map(y => y.subreddit.display_name)).length > 0);
}
canAccessSubreddit(val: Bot, name: string): boolean {
return this.isOperator || this.subreddits.includes(parseRedditEntity(name).name) && val.subManagers.some(y => y.subreddit.display_name.toLowerCase() === parseRedditEntity(name).name.toLowerCase());
}
accessibleSubreddits(bot: Bot): Manager[] {
return this.isOperator ? bot.subManagers : bot.subManagers.filter(x => intersect(this.subreddits, [x.subreddit.display_name]).length > 0);
}
}
export default ServerUser;

View File

@@ -1,5 +1,5 @@
import {RunningState} from "../../Subreddit/Manager";
import {ManagerStats} from "../../Common/interfaces";
import {LogInfo, ManagerStats} from "../../Common/interfaces";
export interface BotStats {
startedAtHuman: string,
@@ -15,7 +15,7 @@ export interface BotStats {
export interface SubredditDataResponse {
name: string
logs: string[]
logs: (string|LogInfo)[]
botState: RunningState
eventsState: RunningState
queueState: RunningState
@@ -59,3 +59,17 @@ export interface BotStatusResponse {
}
subreddits: SubredditDataResponse[]
}
export interface IUser {
name: string
subreddits: string[]
machine?: boolean
isOperator?: boolean
realManagers?: string[]
moderatedManagers?: string[]
realBots?: string[]
moderatedBots?: string[]
scope?: string[]
token?: string
tokenExpiresAt?: number
}

View File

@@ -5,7 +5,7 @@ import {formatNumber} from "../../util";
import Bot from "../../Bot";
export const opStats = (bot: Bot): BotStats => {
const limitReset = dayjs(bot.client.ratelimitExpiration);
const limitReset = bot.client === undefined ? dayjs() : dayjs(bot.client.ratelimitExpiration);
const nextHeartbeat = bot.nextHeartbeat !== undefined ? bot.nextHeartbeat.local().format('MMMM D, YYYY h:mm A Z') : 'N/A';
const nextHeartbeatHuman = bot.nextHeartbeat !== undefined ? `in ${dayjs.duration(bot.nextHeartbeat.diff(dayjs())).humanize()}` : 'N/A'
return {

View File

@@ -1,27 +0,0 @@
import { Request } from "express";
import {App} from "../../App";
import Bot from "../../Bot";
// export interface ServerRequest extends Request {
// botApp: App
// bot?: Bot
// //user?: AuthenticatedUser
// }
//
// export interface ServerRequestRedditor extends ServerRequest {
// user?: AuthenticatedRedditUser
// }
//
// export interface AuthenticatedUser extends Express.User {
// machine: boolean
// }
//
// export interface AuthenticatedRedditUser extends AuthenticatedUser {
// name: string
// subreddits: string[]
// isOperator: boolean
// realManagers: string[]
// moderatedManagers: string[]
// realBots: string[]
// moderatedBots: string[]
// }

View File

@@ -1,9 +1,10 @@
import {Request, Response} from "express";
import {Request, Response, NextFunction} from "express";
import Bot from "../../Bot";
import ServerUser from "../Common/User/ServerUser";
export const authUserCheck = (userRequired: boolean = true) => async (req: Request, res: Response, next: Function) => {
if (req.isAuthenticated()) {
if (userRequired && req.user.machine) {
if (userRequired && (req.user as ServerUser).machine) {
return res.status(403).send('Must be authenticated as a user to access this route');
}
return next();
@@ -23,10 +24,15 @@ export const botRoute = (required = true) => async (req: Request, res: Response,
const botStr = botVal as string;
if(req.user !== undefined) {
if (req.user.realBots === undefined || !req.user.realBots.map(x => x.toLowerCase()).includes(botStr.toLowerCase())) {
const serverBot = req.botApp.bots.find(x => x.botName === botStr) as Bot;
if(serverBot === undefined) {
return res.status(404).send(`Bot named ${botStr} does not exist or you do not have permission to access it.`);
}
req.serverBot = req.botApp.bots.find(x => x.botName === botStr) as Bot;
if (!req.user?.canAccessBot(serverBot)) {
return res.status(404).send(`Bot named ${botStr} does not exist or you do not have permission to access it.`);
}
req.serverBot = serverBot;
return next();
}
return next();
@@ -37,18 +43,20 @@ export const subredditRoute = (required = true) => async (req: Request, res: Res
const bot = req.serverBot;
const {subreddit} = req.query as any;
if(subreddit === undefined && required === false) {
if(subreddit === undefined && !required) {
next();
} else {
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
if (!isOperator && !realManagers.includes(subreddit)) {
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
}
//const {name: userName} = req.user as Express.User;
const manager = bot.subManagers.find(x => x.displayLabel === subreddit);
if (manager === undefined) {
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
}
if (!req.user?.canAccessSubreddit(bot, subreddit)) {
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
}
req.manager = manager;
next();

View File

@@ -1,29 +1,23 @@
import express, {Request, Response} from 'express';
import {Request, Response} from 'express';
import {RUNNING, USER} from "../../../../../Common/interfaces";
import Submission from "snoowrap/dist/objects/Submission";
import LoggedError from "../../../../../Utils/LoggedError";
import winston from "winston";
import {authUserCheck, botRoute} from "../../../middleware";
import {booleanMiddle} from "../../../../Common/middleware";
import {Manager} from "../../../../../Subreddit/Manager";
import {parseRedditEntity} from "../../../../../util";
const action = async (req: express.Request, res: express.Response) => {
const action = async (req: Request, res: Response) => {
const bot = req.serverBot;
const {type, action, subreddit, force = false} = req.query as any;
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
let subreddits: string[] = [];
if (subreddit === 'All') {
subreddits = realManagers;
} else if (realManagers.includes(subreddit)) {
subreddits = [subreddit];
const userName = req.user?.name;
let subreddits: Manager[] = req.user?.accessibleSubreddits(bot) as Manager[];
if (subreddit !== 'All') {
subreddits = subreddits.filter(x => x.subreddit.display_name === parseRedditEntity(subreddit).name);
}
for (const s of subreddits) {
const manager = bot.subManagers.find(x => x.displayLabel === s);
if (manager === undefined) {
winston.loggers.get('app').warn(`Manager for ${s} does not exist`, {subreddit: `/u/${userName}`});
continue;
}
for (const manager of subreddits) {
const mLogger = manager.logger;
mLogger.info(`/u/${userName} invoked '${action}' action for ${type} on ${manager.displayLabel}`);
try {

View File

@@ -3,6 +3,8 @@ import {BotInstanceConfig} from "../../../../../Common/interfaces";
import {authUserCheck} from "../../../middleware";
import Bot from "../../../../../Bot";
import LoggedError from "../../../../../Utils/LoggedError";
import {open} from 'fs/promises';
import {buildBotConfig} from "../../../../../ConfigBuilder";
const addBot = () => {
@@ -12,13 +14,41 @@ const addBot = () => {
const response = async (req: Request, res: Response) => {
if (!(req.user as Express.User).isOperator) {
if (!req.user?.isInstanceOperator(req.app)) {
return res.status(401).send("Must be an Operator to use this route");
}
const newBot = new Bot(req.body as BotInstanceConfig, req.botApp.logger);
if (!req.botApp.fileConfig.isWriteable) {
return res.status(409).send('Operator config is not writeable');
}
const {overwrite = false, ...botData} = req.body;
// check if bot is new or overwriting
let existingBot = req.botApp.bots.find(x => x.botAccount === botData.name);
// spin down existing
if (existingBot !== undefined) {
const {
bots: botsFromConfig = []
} = req.botApp.fileConfig.document.toJS();
if(botsFromConfig.length === 0 || botsFromConfig.some(x => x.name !== botData.name)) {
req.botApp.logger.warn('Overwriting existing bot with the same name BUT this bot does not exist in the operator CONFIG FILE. You should check how you have provided config before next start or else this bot may be started twice (once from file, once from arg/env)');
}
await existingBot.destroy('system');
req.botApp.bots.filter(x => x.botAccount !== botData.name);
}
req.botApp.fileConfig.document.addBot(botData);
const handle = await open(req.botApp.fileConfig.document.location as string, 'w');
await handle.writeFile(req.botApp.fileConfig.document.toString());
await handle.close();
const newBot = new Bot(buildBotConfig(botData, req.botApp.config), req.botApp.logger);
req.botApp.bots.push(newBot);
let result: any = {stored: true};
let result: any = {stored: true, success: true};
try {
if (newBot.error !== undefined) {
result.error = newBot.error;
@@ -26,13 +56,14 @@ const addBot = () => {
}
await newBot.testClient();
await newBot.buildManagers();
newBot.runManagers('user').catch((err) => {
newBot.runManagers('system').catch((err) => {
req.botApp.logger.error(`Unexpected error occurred while running Bot ${newBot.botName}. Bot must be re-built to restart`);
if (!err.logged || !(err instanceof LoggedError)) {
req.botApp.logger.error(err);
}
});
} catch (err: any) {
result.success = false;
if (newBot.error === undefined) {
newBot.error = err.message;
result.error = err.message;

View File

@@ -66,7 +66,7 @@ const actionedEvents = async (req: Request, res: Response) => {
managers.push(manager);
} else {
for(const manager of req.serverBot.subManagers) {
if((req.user?.realManagers as string[]).includes(manager.displayLabel)) {
if(req.user?.canAccessSubreddit(req.serverBot, manager.subreddit.display_name)) {
managers.push(manager);
}
}
@@ -89,7 +89,7 @@ const action = async (req: Request, res: Response) => {
const bot = req.serverBot;
const {url, dryRun = false, subreddit} = req.query as any;
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
const {name: userName} = req.user as Express.User;
let a;
const commentId = commentReg(url);
@@ -115,7 +115,7 @@ const action = async (req: Request, res: Response) => {
let manager = subreddit === 'All' ? bot.subManagers.find(x => x.subreddit.display_name === sub) : bot.subManagers.find(x => x.displayLabel === subreddit);
if (manager === undefined || (!realManagers.includes(manager.displayLabel))) {
if (manager === undefined || !req.user?.canAccessSubreddit(req.serverBot, manager.subreddit.display_name)) {
let msg = 'Activity does not belong to a subreddit you moderate or the bot runs on.';
if (subreddit === 'All') {
msg = `${msg} If you want to test an Activity against a Subreddit\'s config it does not belong to then switch to that Subreddit's tab first.`

View File

@@ -24,8 +24,10 @@ const logs = (subLogMap: Map<string, LogEntry[]>) => {
const logger = winston.loggers.get('app');
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
const {level = 'verbose', stream, limit = 200, sort = 'descending', streamObjects = false} = req.query;
const userName = req.user?.name as string;
const isOperator = req.user?.isInstanceOperator(req.botApp);
const realManagers = req.botApp.bots.map(x => req.user?.accessibleSubreddits(x).map(x => x.displayLabel)).flat() as string[];
const {level = 'verbose', stream, limit = 200, sort = 'descending', streamObjects = false, formatted = true} = req.query;
if (stream) {
const origin = req.header('X-Forwarded-For') ?? req.header('host');
try {
@@ -34,9 +36,16 @@ const logs = (subLogMap: Map<string, LogEntry[]>) => {
const {subreddit: subName} = log;
if (isOperator || (subName !== undefined && (realManagers.includes(subName) || subName.includes(userName)))) {
if(streamObjects) {
res.write(`${JSON.stringify(log)}\r\n`);
} else {
let obj: any = log;
if(!formatted) {
const {[MESSAGE]: fMessage, ...rest} = log;
obj = rest;
}
res.write(`${JSON.stringify(obj)}\r\n`);
} else if(formatted) {
res.write(`${log[MESSAGE]}\r\n`)
} else {
res.write(`${log.message}\r\n`)
}
}
}
@@ -60,11 +69,17 @@ const logs = (subLogMap: Map<string, LogEntry[]>) => {
operator: isOperator,
user: userName,
sort: sort as 'descending' | 'ascending',
limit: Number.parseInt((limit as string))
limit: Number.parseInt((limit as string)),
returnType: 'object',
});
const subArr: any = [];
logs.forEach((v: string[], k: string) => {
subArr.push({name: k, logs: v.join('')});
logs.forEach((v: (string|LogInfo)[], k: string) => {
let logs = v as LogInfo[];
let output: any[] = formatted ? logs : logs.map((x) => {
const {[MESSAGE]: fMessage, ...rest} = x;
return rest;
})
subArr.push({name: k, logs: output});
});
return res.json(subArr);
}

View File

@@ -41,19 +41,18 @@ const status = () => {
if(req.serverBot !== undefined) {
bots = [req.serverBot];
} else {
bots = (req.user as Express.User).isOperator ? req.botApp.bots : req.botApp.bots.filter(x => {
const i = intersect(req.user?.subreddits as string[], x.subManagers.map(y => y.subreddit.display_name));
return i.length > 0;
});
bots = req.user?.accessibleBots(req.botApp.bots) as Bot[];
}
const botResponses: BotStatusResponse[] = [];
let index = 1;
for(const b of bots) {
botResponses.push(await botStatResponse(b, req, botLogMap));
botResponses.push(await botStatResponse(b, req, botLogMap, index));
index++;
}
const system: any = {};
if((req.user as Express.User).isOperator) {
if(req.user?.isInstanceOperator(req.botApp)) {
// @ts-ignore
system.logs = filterLogBySubreddit(new Map([['app', systemLogs]]), [], {level, sort, limit, operator: true}).get('app');
system.logs = filterLogBySubreddit(new Map([['app', systemLogs]]), [], {level, sort, limit, operator: true}).get('all');
}
const response = {
bots: botResponses,
@@ -62,7 +61,7 @@ const status = () => {
return res.json(response);
}
const botStatResponse = async (bot: Bot, req: Request, botLogMap: Map<string, Map<string, LogEntry[]>>) => {
const botStatResponse = async (bot: Bot, req: Request, botLogMap: Map<string, Map<string, LogEntry[]>>, index: number) => {
const {
//subreddits = [],
//user: userVal,
@@ -72,33 +71,24 @@ const status = () => {
lastCheck
} = req.query;
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
const user = userName as string;
const subreddits = realManagers;
//const isOperator = opNames.includes(user.toLowerCase())
const user = req.user?.name as string;
const logs = filterLogBySubreddit(botLogMap.get(bot.botName as string) || new Map(), realManagers, {
const logs = filterLogBySubreddit(botLogMap.get(bot.botName as string) || new Map(), req.user?.accessibleSubreddits(bot).map(x => x.displayLabel) as string[], {
level: (level as string),
operator: isOperator,
operator: req.user?.isInstanceOperator(req.botApp),
user,
// @ts-ignore
sort,
limit: Number.parseInt((limit as string))
limit: Number.parseInt((limit as string)),
returnType: 'object'
});
const subManagerData = [];
for (const s of subreddits) {
const m = bot.subManagers.find(x => x.displayLabel === s) as Manager;
if(m === undefined) {
continue;
}
if(!(req.user as Express.User).isOperator && !(req.user?.subreddits as string[]).includes(m.subreddit.display_name)) {
continue;
}
for (const m of req.user?.accessibleSubreddits(bot) as Manager[]) {
const sd = {
name: s,
name: m.displayLabel,
//linkName: s.replace(/\W/g, ''),
logs: logs.get(s) || [], // provide a default empty value in case we truly have not logged anything for this subreddit yet
logs: logs.get(m.displayLabel) || [], // provide a default empty value in case we truly have not logged anything for this subreddit yet
botState: m.botState,
eventsState: m.eventsState,
queueState: m.queueState,
@@ -308,8 +298,8 @@ const status = () => {
startedAt: bot.startedAt.local().format('MMMM D, YYYY h:mm A Z'),
running: bot.running,
error: bot.error,
account: bot.botAccount as string,
name: bot.botName as string,
account: (bot.botAccount as string) ?? `Bot ${index}`,
name: (bot.botName as string) ?? `Bot ${index}`,
...opStats(bot),
},
subreddits: [allManagerData, ...subManagerData],

View File

@@ -1,5 +1,5 @@
import {addAsync, Router} from '@awaitjs/express';
import express, {Request, Response} from 'express';
import express, {Request, Response, NextFunction, RequestHandler} from 'express';
import bodyParser from 'body-parser';
import {App} from "../../App";
import {Transform} from "stream";
@@ -16,7 +16,7 @@ import {
} from "../../util";
import {getLogger} from "../../Utils/loggerFactory";
import LoggedError from "../../Utils/LoggedError";
import {Invokee, LogInfo, OperatorConfig} from "../../Common/interfaces";
import {Invokee, LogInfo, OperatorConfigWithFileContext} from "../../Common/interfaces";
import http from "http";
import SimpleError from "../../Utils/SimpleError";
import {heartbeat} from "./routes/authenticated/applicationRoutes";
@@ -29,6 +29,7 @@ import {opStats} from "../Common/util";
import Bot from "../../Bot";
import addBot from "./routes/authenticated/user/addBot";
import dayjs from "dayjs";
import ServerUser from "../Common/User/ServerUser";
const server = addAsync(express());
server.use(bodyParser.json());
@@ -51,7 +52,7 @@ const botLogMap: Map<string, Map<string, LogEntry[]>> = new Map();
const botSubreddits: Map<string, string[]> = new Map();
const rcbServer = async function (options: OperatorConfig) {
const rcbServer = async function (options: OperatorConfigWithFileContext) {
const {
operator: {
@@ -87,7 +88,7 @@ const rcbServer = async function (options: OperatorConfig) {
botLog.set('app', appLogs.slice(0, 200 + 1));
} else {
let botSubs = botSubreddits.get(botName) || [];
if(botSubs.length === 0 && app !== undefined) {
if(app !== undefined && (botSubs.length === 0 || !botSubs.includes(subName))) {
const b = app.bots.find(x => x.botName === botName);
if(b !== undefined) {
botSubs = b.subManagers.map(x => x.displayLabel);
@@ -128,31 +129,35 @@ const rcbServer = async function (options: OperatorConfig) {
}, function (jwtPayload, done) {
const {name, subreddits = [], machine = true} = jwtPayload.data;
if (machine) {
return done(null, {machine});
const user = new ServerUser(name, subreddits, true, false);
return done(null, user);
//return done(null, {machine});
}
const isOperator = opNames.includes(name.toLowerCase());
let moderatedBots: string[] = [];
let moderatedManagers: string[] = [];
let realBots: string[] = [];
let realManagers: string[] = [];
if(app !== undefined) {
const modBots = app.bots.filter(x => intersect(subreddits, x.subManagers.map(y => y.subreddit.display_name)));
moderatedBots = modBots.map(x => x.botName as string);
moderatedManagers = [...new Set(modBots.map(x => x.subManagers.map(y => y.displayLabel)).flat())];
realBots = isOperator ? app.bots.map(x => x.botName as string) : moderatedBots;
realManagers = isOperator ? [...new Set(app.bots.map(x => x.subManagers.map(y => y.displayLabel)).flat())] : moderatedManagers
}
// let moderatedBots: string[] = [];
// let moderatedManagers: string[] = [];
// let realBots: string[] = [];
// let realManagers: string[] = [];
// if(app !== undefined) {
// const modBots = app.bots.filter(x => intersect(subreddits, x.subManagers.map(y => y.subreddit.display_name)).length > 0);
// moderatedBots = modBots.map(x => x.botName as string);
// moderatedManagers = [...new Set(modBots.map(x => x.subManagers).flat().filter(x => subreddits.includes(x.subreddit.display_name)).map(x => x.displayLabel))];
// realBots = isOperator ? app.bots.map(x => x.botName as string) : moderatedBots;
// realManagers = isOperator ? [...new Set(app.bots.map(x => x.subManagers.map(y => y.displayLabel)).flat())] : moderatedManagers
// }
return done(null, {
name,
subreddits,
isOperator,
machine: false,
moderatedManagers,
realManagers,
moderatedBots,
realBots,
});
const user = new ServerUser(name, subreddits, false, isOperator);
return done(null, user);
// return done(null, {
// name,
// subreddits,
// isOperator,
// machine: false,
// moderatedManagers,
// realManagers,
// moderatedBots,
// realBots,
// });
}));
server.use(passport.authenticate('jwt', {session: false}));
@@ -169,12 +174,14 @@ const rcbServer = async function (options: OperatorConfig) {
let bots: Bot[] = [];
if(req.serverBot !== undefined) {
bots = [req.serverBot];
} else {
bots = (req.user as Express.User).isOperator ? req.botApp.bots : req.botApp.bots.filter(x => intersect(req.user?.subreddits as string[], x.subManagers.map(y => y.subreddit.display_name)));
} else if(req.user !== undefined) {
bots = req.user.accessibleBots(req.botApp.bots);
}
const resp = [];
let index = 1;
for(const b of bots) {
resp.push({name: b.botName, data: await opStats(b)});
resp.push({name: b.botName ?? `Bot ${index}`, data: await opStats(b)});
index++;
}
return res.json(resp);
});
@@ -197,7 +204,7 @@ const rcbServer = async function (options: OperatorConfig) {
server.getAsync('/check', ...actionRoute);
server.getAsync('/addBot', ...addBot());
server.postAsync('/bot', ...addBot());
server.getAsync('/bot/invite', ...getInvitesRoute);

View File

@@ -9,19 +9,29 @@
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Congrats! You did the thing.</div>
<div class="space-y-3">
<div>These are the credentials ContextMod will use to act as your bot, <b><%= userName %></b></div>
<ul class="list-inside list-disc">
<li>Access Token: <b><%= accessToken %></b></li>
<li>Refresh Token: <b><%= refreshToken %></b></li>
</ul>
<% if(locals.addResult !== undefined) { %>
<div>Result of trying to add bot automatically: <%= addResult %></div>
<div>Note: You can revoke ContextMod's access to this account at any time by visiting the <a href="https://www.reddit.com/prefs/apps">reddit app preferences</a> while logged in as the account and clicking the <b>revoke access</b> link under ContextMod</div>
</div>
<div class="text-xl my-4">What Do I Do Now?</div>
<div class="space-y-3">
<% if(locals.stored === true) { %>
<% if(locals.success === true) { %>
<div>Credentials were successfully persisted to the application and the bot was automatically started! You may now <a href="/">login with your normal/moderator account</a> to view the web dashboard where your bot can be monitored.</div>
<% } else { %>
<div>The bot was successfully saved to the application but it could not be started automatically. Please inform the operator so they can restart the application.</div>
<% } %>
<% } else { %>
<div>Bot was not automatically added to an instance and will need to manually appended to configuration...</div>
<div>These credentials were <b>not automatically added</b> to an instance and will need to be <b>manually added by the operator</b>:</div>
<ul class="list-inside list-disc">
<li>If you are a <b>Moderator</b> then copy the above <b>Tokens</b> and pass them on to the Operator of this ContextMod instance.</li>
<li>If you are an <b>Operator</b> copy these somewhere and then restart the application providing these as either arguments, environmental variables, or in a config as described in the <a
href="https://github.com/FoxxMD/context-mod/blob/master/docs/operatorConfiguration.md#defining-configuration">configuration guide</a></li>
</ul>
<% } %>
<div>If you are a <b>Moderator</b> then copy the above <b>Tokens</b> and pass them on to the Operator of this ContextMod instance.</div>
<div>If you are an <b>Operator</b> copy these somewhere and then restart the application providing these as either arguments, environmental variables, or in a json config as described in the <a
href="https://github.com/FoxxMD/context-mod/blob/master/docs/operatorConfiguration.md#defining-configuration">configuration guide</a>
</div>
</div>
</div>
</div>

View File

@@ -46,7 +46,43 @@
class="text-black placeholder-gray-500 rounded mt-2 mb-3 p-2"
placeholder="<%= locals.clientSecret !== undefined ? 'Use Provided Client Secret' : 'Client Secret Not Provided' %>">
</div>
<div class="text-lg text-semibold my-3">3. Select permissions</div>
<div class="text-lg text-semibold my-3">3. Select Instance</div>
<div class="my-2 ml-5">
<div class="space-y-3">
<div>Specify the ContextMod instance to add this bot to.</div>
<select id="instanceSelect" style="max-width:400px;" class="form-select
block
w-full
px-3
py-1.5
text-base
font-normal
text-gray-700
bg-white bg-clip-padding bg-no-repeat
border border-solid border-gray-300
rounded
transition
ease-in-out
m-0
focus:text-gray-700 focus:bg-white focus:border-blue-600 focus:outline-none" aria-label="Default select example">
<% instances.forEach(function (name, index){ %>
<option selected="<%= index === 0 ? 'true' : 'false' %>" value="<%= name %>"><%= name %></option>
<%= name %>
<% }) %>
</select>
</div>
</div>
<div class="text-lg text-semibold my-3">4. Optionally, restrict to Subreddits</div>
<div class="my-2 ml-5">
<div class="space-y-3">
<div>Specify which subreddits, out of all the subreddits the bot moderates, CM should run on.</div>
<div>Subreddits should be seperated with a comma. Leave blank to run on all moderated subreddits</div>
<input id="subreddits" style="max-width:800px; display: block;"
class="text-black placeholder-gray-500 rounded mt-2 mb-3 p-2 w-full"
placeholder="aSubreddit,aSecondSubreddit,aThirdSubreddit">
</div>
</div>
<div class="text-lg text-semibold my-3">5. Select permissions</div>
<div class="my-2 ml-5">
<div class="space-y-3">
<div>These are permissions to allow the bot account to perform these actions, <b>in
@@ -220,6 +256,8 @@
clientSecret: document.querySelector('#clientSecret').value,
code: document.querySelector("#inviteCode").value === '' ? undefined : document.querySelector("#inviteCode").value,
permissions,
instance: document.querySelector('#instanceSelect').value,
subreddits: document.querySelector('#subreddits').value
})
}).then((resp) => {
if(!resp.ok) {

View File

@@ -1,10 +1,19 @@
<div class="space-x-4 py-1 md:px-10 leading-6 font-semibold bg-gray-700">
<div class="container mx-auto">
<% if(locals.bots !== undefined) { %>
<ul id="botTabs" class="inline-flex flex-wrap">
<% if(locals.system !== undefined && locals.system.logs !== undefined) {%>
<li class="my-3 px-3">
<span data-bot="system" class="rounded-md py-2 px-3 tabSelectWrapper">
<a class="tabSelect instance font-normal pointer hover:font-bold" data-bot="system">
System
</a>
</span>
</li>
<% } %>
<% if(locals.bots !== undefined) { %>
<% bots.forEach(function (data){ %>
<li class="my-3 px-3">
<span data-bot="<%= data.system.name %>" class="rounded-md py-2 px-3 tabSelectWrapper">
<span data-bot="<%= data.system.name %>" class="rounded-md py-2 px-3 tabSelectWrapper real">
<a class="tabSelect font-normal pointer hover:font-bold"
data-bot="<%= data.system.name %>">
<%= data.system.name %>
@@ -24,7 +33,7 @@
</span>
</li>
<% } %>
</ul>
<% } %>
</ul>
</div>
</div>

View File

@@ -9,6 +9,19 @@
<div class="grid">
<div class="">
<div class="pb-6 md:px-7">
<% if(isOperator) { %>
<div class="sub" data-bot="system" data-subreddit="All">
<div class="grid grid-cols-1 lg:grid-cols-2 xl:grid-cols-3 2xl:grid-cols-3 gap-5">
</div>
<br/>
<%- include('partials/loadingIcon') %>
<div data-subreddit="All" class="logs font-mono text-sm">
<% system.logs.forEach(function (logEntry){ %>
<%- logEntry %>
<% }) %>
</div>
</div>
<% } %>
<% bots.forEach(function (bot){ %>
<% bot.subreddits.forEach(function (data){ %>
<div class="sub <%= bot.system.running ? '' : 'offline' %>" data-subreddit="<%= data.name %>" data-bot="<%= bot.system.name %>">
@@ -611,12 +624,9 @@
<input data-subreddit="<%= data.name %>" style="min-width: 420px;"
class="border-gray-50 placeholder-gray-500 rounded mt-2 mb-3 p-2 text-black checkUrl"
placeholder="<%= data.name === 'All' ? 'Run Bot on a permalink from any moderated Subreddit' : `Run Bot on a permalink using this Subreddit's config` %>"/>
<span class="mx-2">
<input type="checkbox" class="dryrunCheck" data-subreddit="<%= data.name %>"
name="dryrunCheck">
<label for="dryrunCheck">Dry Run?</label>
</span>
<a class="runCheck" data-subreddit="<%= data.name %>" href="">Run</a>
<a class="hover:bg-gray-700 pointer-events-none opacity-20 no-underline rounded-md mx-4 py-2 px-3 border checkActions dryRunCheck" data-subreddit="<%= data.name %>" href="">Dry Run</a>
<a class="hover:bg-gray-700 pointer-events-none opacity-20 no-underline rounded-md py-2 px-3 border checkActions runCheck" data-subreddit="<%= data.name %>" href="">Run</a>
</div>
<%- include('partials/logSettings') %>
</div>
@@ -658,23 +668,42 @@
});
})
document.querySelectorAll(".runCheck").forEach(el => {
document.querySelectorAll(".checkUrl").forEach(el => {
const toggleButtons = (e) => {
const subFilter = `.sub[data-subreddit="${e.target.dataset.subreddit}"]`;
const inputVal = document.querySelector(`${subFilter} .checkUrl`).value;
if (inputVal.length > 0) {
document.querySelectorAll(`${subFilter} .checkActions`).forEach(el => {
el.classList.remove('pointer-events-none', 'opacity-20');
});
} else {
document.querySelectorAll(`${subFilter} .checkActions`).forEach(el => {
el.classList.add('pointer-events-none', 'opacity-20');
});
}
}
el.addEventListener('keyup', toggleButtons, false);
el.addEventListener('change', toggleButtons, false);
});
document.querySelectorAll(".checkActions").forEach(el => {
el.addEventListener('click', e => {
e.preventDefault();
const subreddit = e.target.dataset.subreddit;
const urlInput = document.querySelector(`[data-subreddit="${subreddit}"].checkUrl`);
const dryRunCheck = document.querySelector(`[data-subreddit="${subreddit}"].dryrunCheck`);
const subFilter = `.sub[data-subreddit="${subreddit}"]`;
const urlInput = document.querySelector(`${subFilter} .checkUrl`);
const isDryun = e.target.classList.contains('dryRunCheck');
const subSection = e.target.closest('div.sub');
bot = subSection.dataset.bot;
const url = urlInput.value;
const dryRun = dryRunCheck.checked ? 1 : 0;
const fetchUrl = `/api/check?instance=<%= instanceId %>&bot=${bot}&url=${url}&dryRun=${dryRun}&subreddit=${subreddit}`;
const fetchUrl = `/api/check?instance=<%= instanceId %>&bot=${bot}&url=${url}&dryRun=${isDryun ? 1 : 0}&subreddit=${subreddit}`;
fetch(fetchUrl);
urlInput.value = '';
dryRunCheck.checked = false;
urlInput.dispatchEvent(new Event('change'));
});
});
@@ -711,23 +740,31 @@
const firstSub = document.querySelectorAll(`[data-bot="${bot}"].sub`)[0];
firstSub.classList.add('active');
let firstSubWrapper;
const firstSubTab = document.querySelector(`ul[data-bot="${bot}"] [data-subreddit="${firstSub.dataset.subreddit}"].tabSelect`);
firstSubTab.classList.add('font-bold', 'no-underline', 'pointer-events-none');
const firstSubWrapper = firstSubTab.closest('.tabSelectWrapper');
//document.querySelector(`[data-subreddit="${subreddit}"][data-bot="${bot}"].sub`).classList.add('active');
if(firstSubTab !== null) {
firstSubTab.classList.add('font-bold', 'no-underline', 'pointer-events-none');
firstSubWrapper = firstSubTab.closest('.tabSelectWrapper');
//document.querySelector(`[data-subreddit="${subreddit}"][data-bot="${bot}"].sub`).classList.add('active');
}
document.querySelectorAll('.tabSelectWrapper').forEach(el => {
el.classList.remove('border-2');
el.classList.add('border');
})
firstSubWrapper.classList.remove('border');
firstSubWrapper.classList.add('border-2');
if(firstSubWrapper !== undefined) {
firstSubWrapper.classList.remove('border');
firstSubWrapper.classList.add('border-2');
}
document.querySelectorAll('[data-bot].subreddit.nestedTabs').forEach(el => {
el.classList.remove('active');
})
document.querySelector(`[data-bot="${bot}"].subreddit.nestedTabs`).classList.add('active');
const nested = document.querySelector(`[data-bot="${bot}"].subreddit.nestedTabs`);
if(nested !== null) {
nested.classList.add('active');
}
const wrapper = e.target.closest('.tabSelectWrapper');//document.querySelector(`[data-subreddit="${subreddit}"].tabSelectWrapper`);
wrapper.classList.remove('border');
@@ -736,7 +773,9 @@
if ('URLSearchParams' in window) {
var searchParams = new URLSearchParams(window.location.search)
searchParams.set("bot", bot);
searchParams.set("sub", firstSub.dataset.subreddit);
if(firstSub.dataset.subreddit !== undefined) {
searchParams.set("sub", firstSub.dataset.subreddit);
}
var newRelativePathQuery = window.location.pathname + '?' + searchParams.toString();
history.pushState(null, '', newRelativePathQuery);
}
@@ -781,7 +820,10 @@
let shownBot = searchParams.get('bot');
if(shownBot === null) {
// show the first bot listed if none is specified
const firstBot = document.querySelector('.tabSelectWrapper[data-bot]');
let firstBot = document.querySelector('.real.tabSelectWrapper[data-bot]');
if(firstBot === null) {
}
if(firstBot !== null) {
shownBot = firstBot.dataset.bot;
searchParams.set('bot', shownBot);
@@ -791,17 +833,27 @@
}
document.querySelector(`[data-bot="${shownBot}"].tabSelect`).classList.add('font-bold', 'no-underline', 'pointer-events-none');
document.querySelector(`ul[data-bot="${shownBot}"] [data-subreddit="${shownSub}"].tabSelect`).classList.add('font-bold', 'no-underline', 'pointer-events-none');
const tabSelect = document.querySelector(`ul[data-bot="${shownBot}"] [data-subreddit="${shownSub}"].tabSelect`);
if(tabSelect !== null) {
tabSelect.classList.add('font-bold', 'no-underline', 'pointer-events-none');
}
document.querySelectorAll('.tabSelectWrapper').forEach(el => el.classList.add('border'));
document.querySelector(`[data-bot="${shownBot}"][data-subreddit="${shownSub}"].sub`).classList.add('active');
const subWrapper = document.querySelector(`ul[data-bot="${shownBot}"] [data-subreddit="${shownSub}"].tabSelectWrapper`);
subWrapper.classList.remove('border');
subWrapper.classList.add('border-2');
if(subWrapper !== null) {
subWrapper.classList.remove('border');
subWrapper.classList.add('border-2');
}
const wrapper = document.querySelector(`[data-bot="${shownBot}"].tabSelectWrapper`);
wrapper.classList.remove('border');
wrapper.classList.add('border-2');
if(wrapper !== null) {
wrapper.classList.remove('border');
wrapper.classList.add('border-2');
}
document.querySelector(`[data-bot="${shownBot}"].subreddit.nestedTabs`).classList.add('active');
const nestedTabs = document.querySelector(`[data-bot="${shownBot}"].subreddit.nestedTabs`);
if(nestedTabs !== null) {
nestedTabs.classList.add('active');
}
document.querySelectorAll('.stats.reloadStats').forEach(el => el.classList.add('hidden'));
document.querySelectorAll('.allStatsToggle').forEach(el => el.classList.add('font-bold', 'no-underline', 'pointer-events-none'));
@@ -809,19 +861,6 @@
<script src="https://cdn.socket.io/4.1.2/socket.io.min.js" integrity="sha384-toS6mmwu70G0fw54EGlWWeA4z3dyJ+dlXBtSURSKN4vyRFOcxd3Bzjj/AoOwY+Rg" crossorigin="anonymous"></script>
<script>
const SUBREDDIT_NAME_LOG_REGEX = /{(.+?)}/;
const BOT_NAME_LOG_REGEX = /~(.+?)~/;
const parseALogName = (reg) => {
return (val) => {
const matches = val.match(reg);
if (matches === null) {
return undefined;
}
return matches[1];
}
}
const parseSubredditLogName = parseALogName(SUBREDDIT_NAME_LOG_REGEX);
const parseBotLogName = parseALogName(BOT_NAME_LOG_REGEX);
let socket = io({
reconnectionAttempts: 5, // bail after 5 attempts
@@ -844,25 +883,29 @@
socket.on("connect", () => {
document.body.classList.add('connected')
socket.on("log", data => {
bufferedBot.set('All', bufferedBot.get('All').concat(data));
const bot = parseBotLogName(data);
const {
subreddit,
bot,
subredditMessage,
allMessage,
formattedMessage
} = data;
if(bot === undefined && subreddit === undefined) {
const sys = bufferedBot.get('system');
if(sys !== undefined) {
sys.set('All', sys.get('All').concat(formattedMessage));
bufferedBot.set('system', sys);
}
}
if(bot !== undefined) {
bufferedBot.set('All', bufferedBot.get('All').concat(allMessage));
const buffBot = bufferedBot.get(bot) || newBufferedLogs();
buffBot.set('All', buffBot.get('All').concat(data));
const sub = parseSubredditLogName(data);
if (sub !== undefined) {
buffBot.set(sub, (buffBot.get(sub) || []).concat(data));
buffBot.set('All', buffBot.get('All').concat(allMessage));
if (subreddit !== undefined) {
buffBot.set(subreddit, (buffBot.get(subreddit) || []).concat(subredditMessage));
}
bufferedBot.set(bot, buffBot);
} else {
bufferedBot.forEach((logs, botName) => {
if(botName === 'All') {
return;
}
logs.set('All', logs.get('All').concat(data));
bufferedBot.set(botName, logs);
});
}

View File

@@ -8,6 +8,7 @@ export interface BotInstance {
subreddits: string[]
nanny?: string
running: boolean
instance: CMInstance
}
export interface CMInstance extends BotConnection {

View File

@@ -2,6 +2,7 @@ import {App} from "../../../App";
import Bot from "../../../Bot";
import {BotInstance, CMInstance} from "../../interfaces";
import {Manager} from "../../../Subreddit/Manager";
import CMUser from "../../Common/User/CMUser";
declare global {
declare namespace Express {
@@ -13,18 +14,7 @@ declare global {
serverBot: Bot,
manager?: Manager,
}
interface User {
name: string
subreddits: string[]
machine?: boolean
isOperator?: boolean
realManagers?: string[]
moderatedManagers?: string[]
realBots?: string[]
moderatedBots?: string[]
scope?: string[]
token?: string
tokenExpiresAt?: number
class User extends CMUser<any, any, any> {
}
}
}

View File

@@ -64,7 +64,8 @@ const program = new Command();
.allowUnknownOption();
runCommand = addOptions(runCommand, getUniversalWebOptions());
runCommand.action(async (interfaceVal, opts) => {
const config = buildOperatorConfigWithDefaults(await parseOperatorConfigFromSources({...opts, mode: interfaceVal}));
const [opConfig, fileConfig] = await parseOperatorConfigFromSources({...opts, mode: interfaceVal});
const config = buildOperatorConfigWithDefaults(opConfig);
const {
mode,
} = config;
@@ -73,7 +74,7 @@ const program = new Command();
await clientServer(config);
}
if(mode === 'all' || mode === 'server') {
await apiServer(config);
await apiServer({...config, fileConfig});
}
} catch (err: any) {
throw err;
@@ -92,9 +93,10 @@ const program = new Command();
checkCommand
.addOption(checks)
.action(async (activityIdentifier, type, botVal, commandOptions = {}) => {
const config = buildOperatorConfigWithDefaults(await parseOperatorConfigFromSources(commandOptions));
const [opConfig, fileConfig] = await parseOperatorConfigFromSources(commandOptions);
const config = buildOperatorConfigWithDefaults(opConfig);
const {checks = []} = commandOptions;
app = new App(config);
app = new App({...config, fileConfig});
let a;
const commentId = commentReg(activityIdentifier);
@@ -168,7 +170,8 @@ const program = new Command();
unmodCommand
.addOption(checks)
.action(async (subreddits = [], botVal, opts = {}) => {
const config = buildOperatorConfigWithDefaults(await parseOperatorConfigFromSources(opts));
const [opConfig, fileConfig] = await parseOperatorConfigFromSources(opts);
const config = buildOperatorConfigWithDefaults(opConfig);
const {checks = []} = opts;
const logger = winston.loggers.get('app');
let bots: Bot[] = [];
@@ -201,7 +204,7 @@ const program = new Command();
} catch (err: any) {
if (!err.logged && !(err instanceof LoggedError)) {
const logger = winston.loggers.get('app');
const logger = winston.loggers.has('app') ? winston.loggers.get('app') : winston.loggers.get('init');
if(isScopeError(err)) {
logger.error('Reddit responded with a 403 insufficient_scope which means the bot is lacking necessary OAUTH scopes to perform general actions.');
}

View File

@@ -1,7 +1,7 @@
import winston, {Logger} from "winston";
import jsonStringify from 'safe-stable-stringify';
import dayjs, {Dayjs, OpUnitType} from 'dayjs';
import {FormattedRuleResult, isRuleSetResult, RulePremise, RuleResult, RuleSetResult} from "./Rule";
import {FormattedRuleResult, isRuleSetResult, RulePremise, RuleResult, RuleSetResult, UserNoteCriteria} from "./Rule";
import deepEqual from "fast-deep-equal";
import {Duration} from 'dayjs/plugin/duration.js';
import Ajv from "ajv";
@@ -15,7 +15,7 @@ import {
ActivityWindowCriteria, ActivityWindowType,
CacheOptions,
CacheProvider,
DurationComparison, DurationVal,
DurationComparison, DurationVal, FilterCriteriaPropertyResult, FilterCriteriaResult,
GenericComparison,
HistoricalStats,
HistoricalStatsDisplay, ImageComparisonResult,
@@ -23,7 +23,7 @@ import {
ImageDetection,
//ImageDownloadOptions,
LogInfo,
NamedGroup,
NamedGroup, OperatorJsonConfig,
PollingOptionsStrong,
RedditEntity,
RedditEntityType,
@@ -34,8 +34,7 @@ import {
StrongSubredditState,
SubredditState
} from "./Common/interfaces";
import JSON5 from "json5";
import yaml, {JSON_SCHEMA} from "js-yaml";
import { Document as YamlDocument } from 'yaml'
import SimpleError from "./Utils/SimpleError";
import InvalidRegexError from "./Utils/InvalidRegexError";
import {constants, promises} from "fs";
@@ -54,11 +53,17 @@ import ImageData from "./Common/ImageData";
import {Sharp, SharpOptions} from "sharp";
// @ts-ignore
import {blockhashData, hammingDistance} from 'blockhash';
import {SetRandomInterval} from "./Common/types";
import {ConfigFormat, SetRandomInterval} from "./Common/types";
import stringSimilarity from 'string-similarity';
import calculateCosineSimilarity from "./Utils/StringMatching/CosineSimilarity";
import levenSimilarity from "./Utils/StringMatching/levenSimilarity";
import {isRequestError, isStatusError} from "./Utils/Errors";
import {parse} from "path";
import JsonConfigDocument from "./Common/Config/JsonConfigDocument";
import YamlConfigDocument from "./Common/Config/YamlConfigDocument";
import AbstractConfigDocument, {ConfigDocumentInterface} from "./Common/Config/AbstractConfigDocument";
//import {ResembleSingleCallbackComparisonResult} from "resemblejs";
// want to guess how many concurrent image comparisons we should be doing
@@ -289,13 +294,39 @@ export const mergeArr = (objValue: [], srcValue: []): (any[] | undefined) => {
}
}
export const removeFromSourceIfKeysExistsInDestination = (destinationArray: any[], sourceArray: any[], options: any): any[] => {
// get all keys from objects in destination
const destKeys = destinationArray.reduce((acc: string[], curr) => {
// can only get keys for objects, skip for everything else
if(curr !== null && typeof curr === 'object') {
const keys = Object.keys(curr).map(x => x.toLowerCase());
for(const k of keys) {
if(!acc.includes(k)) {
acc.push(k);
}
}
}
return acc;
}, []);
const sourceItemsToKeep = sourceArray.filter(x => {
if(x !== null && typeof x === 'object') {
const sourceKeys = Object.keys(x).map(x => x.toLowerCase());
// only keep if keys from this object do not appear anywhere in destination items
return intersect(sourceKeys, destKeys).length === 0;
}
// keep if item is not an object since we can't test for keys anyway
return true;
});
return sourceItemsToKeep.concat(destinationArray);
}
export const ruleNamesFromResults = (results: RuleResult[]) => {
return results.map(x => x.name || x.premise.kind).join(' | ')
}
export const triggeredIndicator = (val: boolean | null): string => {
export const triggeredIndicator = (val: boolean | null, nullResultIndicator = '-'): string => {
if(val === null) {
return '-';
return nullResultIndicator;
}
return val ? PASS : FAIL;
}
@@ -312,6 +343,40 @@ export const resultsSummary = (results: (RuleResult|RuleSetResult)[], topLevelCo
//return results.map(x => x.name || x.premise.kind).join(' | ')
}
export const filterCriteriaSummary = (val: FilterCriteriaResult<any>): [string, string[]] => {
// summarize properties relevant to result
const passedProps = {props: val.propertyResults.filter(x => x.passed === true), name: 'Passed'};
const failedProps = {props: val.propertyResults.filter(x => x.passed === false), name: 'Failed'};
const skippedProps = {props: val.propertyResults.filter(x => x.passed === null), name: 'Skipped'};
const dnrProps = {props: val.propertyResults.filter(x => x.passed === undefined), name: 'DNR'};
const propSummary = [passedProps, failedProps];
if (skippedProps.props.length > 0) {
propSummary.push(skippedProps);
}
if (dnrProps.props.length > 0) {
propSummary.push(dnrProps);
}
const propSummaryStrArr = propSummary.map(x => `${x.props.length} ${x.name}${x.props.length > 0 ? ` (${x.props.map(y => y.property as string)})` : ''}`);
return [propSummaryStrArr.join(' | '), val.propertyResults.map(filterCriteriaPropertySummary)]
}
export const filterCriteriaPropertySummary = (val: FilterCriteriaPropertyResult<any>): string => {
let passResult: string;
switch (val.passed) {
case undefined:
passResult = 'DNR'
break;
case null:
case true:
case false:
passResult = triggeredIndicator(val.passed, 'Skipped');
break;
}
const found = val.passed === null || val.passed === undefined ? '' : ` => Found: ${val.found}${val.reason !== undefined ? ` -- ${val.reason}` : ''}${val.behavior === 'exclude' ? ' (Exclude passes when Expected is not Found)' : ''}`;
return `${val.property as string} => ${passResult} => Expected: ${val.expected}${found}`;
}
export const createAjvFactory = (logger: Logger) => {
return new Ajv({logger: logger, verbose: true, strict: "log", allowUnionTypes: true});
}
@@ -458,34 +523,64 @@ export const isActivityWindowCriteria = (val: any): val is ActivityWindowCriteri
return false;
}
export const parseFromJsonOrYamlToObject = (content: string): [object?, Error?, Error?] => {
export interface ConfigToObjectOptions {
location?: string,
jsonDocFunc?: (content: string, location?: string) => AbstractConfigDocument<OperatorJsonConfig>,
yamlDocFunc?: (content: string, location?: string) => AbstractConfigDocument<YamlDocument>
}
export const parseFromJsonOrYamlToObject = (content: string, options?: ConfigToObjectOptions): [ConfigFormat, ConfigDocumentInterface<YamlDocument | object>?, Error?, Error?] => {
let obj;
let configFormat: ConfigFormat = 'yaml';
let jsonErr,
yamlErr;
const likelyType = likelyJson5(content) ? 'json' : 'yaml';
const {
location,
jsonDocFunc = (content: string, location?: string) => new JsonConfigDocument(content, location),
yamlDocFunc = (content: string, location?: string) => new YamlConfigDocument(content, location),
} = options || {};
try {
obj = JSON5.parse(content);
const oType = obj === null ? 'null' : typeof obj;
const jsonObj = jsonDocFunc(content, location);
const output = jsonObj.toJS();
const oType = output === null ? 'null' : typeof output;
if (oType !== 'object') {
jsonErr = new SimpleError(`Parsing as json produced data of type '${oType}' (expected 'object')`);
obj = undefined;
} else {
obj = jsonObj;
configFormat = 'json';
}
} catch (err: any) {
jsonErr = err;
}
if (obj === undefined) {
try {
obj = yaml.load(content, {schema: JSON_SCHEMA, json: true});
const oType = obj === null ? 'null' : typeof obj;
if (oType !== 'object') {
yamlErr = new SimpleError(`Parsing as yaml produced data of type '${oType}' (expected 'object')`);
obj = undefined;
try {
const yamlObj = yamlDocFunc(content, location)
const output = yamlObj.toJS();
const oType = output === null ? 'null' : typeof output;
if (oType !== 'object') {
yamlErr = new SimpleError(`Parsing as yaml produced data of type '${oType}' (expected 'object')`);
obj = undefined;
} else if (obj === undefined && (likelyType !== 'json' || yamlObj.parsed.errors.length === 0)) {
configFormat = 'yaml';
if(yamlObj.parsed.errors.length !== 0) {
yamlErr = new Error(yamlObj.parsed.errors.join('\n'))
} else {
obj = yamlObj;
}
} catch (err: any) {
yamlErr = err;
}
} catch (err: any) {
yamlErr = err;
}
return [obj, jsonErr, yamlErr];
if (obj === undefined) {
configFormat = likelyType;
}
return [configFormat, obj, jsonErr, yamlErr];
}
export const comparisonTextOp = (val1: number, strOp: string, val2: number): boolean => {
@@ -662,6 +757,126 @@ export const parseExternalUrl = (val: string) => {
return (matches.groups as any).url as string;
}
export const dummyLogger = {
debug: (v: any) => null,
error: (v: any) => null,
warn: (v: any) => null,
info: (v: any) => null
}
const GIST_REGEX = new RegExp(/.*gist\.github\.com\/.+\/(.+)/i)
const GH_BLOB_REGEX = new RegExp(/.*github\.com\/(.+)\/(.+)\/blob\/(.+)/i);
const REGEXR_REGEX = new RegExp(/^.*((regexr\.com)\/[\w\d]+).*$/i);
const REGEXR_PAGE_REGEX = new RegExp(/(.|[\n\r])+"expression":"(.+)","text"/g);
export const fetchExternalUrl = async (url: string, logger: (any) = dummyLogger): Promise<string> => {
let hadError = false;
logger.debug(`Attempting to detect resolvable URL for ${url}`);
let match = url.match(GIST_REGEX);
if (match !== null) {
const gistApiUrl = `https://api.github.com/gists/${match[1]}`;
logger.debug(`Looks like a non-raw gist URL! Trying to resolve ${gistApiUrl}`);
try {
const response = await fetch(gistApiUrl);
if (!response.ok) {
logger.error(`Response was not OK from Gist API (${response.statusText}) -- will return response from original URL instead`);
if (response.size > 0) {
logger.error(await response.text())
}
hadError = true;
} else {
const data = await response.json();
// get first found file
const fileKeys = Object.keys(data.files);
if (fileKeys.length === 0) {
logger.error(`No files found in gist!`);
} else {
if (fileKeys.length > 1) {
logger.warn(`More than one file found in gist! Using first found: ${fileKeys[0]}`);
} else {
logger.debug(`Using file ${fileKeys[0]}`);
}
const file = data.files[fileKeys[0]];
if (file.truncated === false) {
return file.content;
}
const rawUrl = file.raw_url;
logger.debug(`File contents was truncated, retrieving full contents from ${rawUrl}`);
try {
const rawUrlResponse = await fetch(rawUrl);
return await rawUrlResponse.text();
} catch (err: any) {
logger.error('Gist Raw URL Response returned an error, will return response from original URL instead');
logger.error(err);
}
}
}
} catch (err: any) {
logger.error('Response returned an error, will return response from original URL instead');
logger.error(err);
}
}
match = url.match(GH_BLOB_REGEX);
if (match !== null) {
const rawUrl = `https://raw.githubusercontent.com/${match[1]}/${match[2]}/${match[3]}`
logger.debug(`Looks like a single file github URL! Resolving to ${rawUrl}`);
try {
const response = await fetch(rawUrl);
if (!response.ok) {
logger.error(`Response was not OK (${response.statusText}) -- will return response from original URL instead`);
if (response.size > 0) {
logger.error(await response.text())
}
hadError = true;
} else {
return await response.text();
}
} catch (err: any) {
logger.error('Response returned an error, will return response from original URL instead');
logger.error(err);
}
}
match = url.match(REGEXR_REGEX);
if(match !== null) {
logger.debug(`Looks like a Regexr URL! Trying to get expression from page HTML`);
try {
const response = await fetch(url);
if (!response.ok) {
if (response.size > 0) {
logger.error(await response.text())
}
throw new Error(`Response was not OK: ${response.statusText}`);
} else {
const page = await response.text();
const pageMatch = [...page.matchAll(REGEXR_PAGE_REGEX)];
if(pageMatch.length > 0) {
const unescaped = JSON.parse(`{"value": "${pageMatch[0][2]}"}`)
return unescaped.value;
} else {
throw new Error('Could not parse regex expression from page HTML');
}
}
} catch (err: any) {
logger.error('Response returned an error');
throw err;
}
}
if(!hadError) {
logger.debug('URL was not special (gist, github blob, etc...) so will retrieve plain contents');
}
const response = await fetch(url);
if(!response.ok) {
if (response.size > 0) {
logger.error(await response.text())
}
throw new Error(`Response was not OK: ${response.statusText}`);
}
return await response.text();
}
export interface RetryOptions {
maxRequestRetry: number,
maxOtherRetry: number,
@@ -767,7 +982,8 @@ export const isLogLineMinLevel = (log: string | LogInfo, minLevelText: string):
// https://regexr.com/3e6m0
const HYPERLINK_REGEX: RegExp = /(http(s)?:\/\/.)?(www\.)?[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]{2,6}\b([-a-zA-Z0-9@:%_\+.~#?&//=]*)/;
export const formatLogLineToHtml = (log: string | LogInfo) => {
const formattedTime = (short: string, full: string) => `<span class="has-tooltip"><span style="margin-top:35px" class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black space-y-3 p-2 text-left'>${full}</span><span>${short}</span></span>`;
export const formatLogLineToHtml = (log: string | LogInfo, timestamp?: string) => {
const val = typeof log === 'string' ? log : log[MESSAGE];
const logContent = Autolinker.link(val, {
email: false,
@@ -784,7 +1000,14 @@ export const formatLogLineToHtml = (log: string | LogInfo) => {
.replace(/(\s*verbose\s*):/i, '<span class="error purple">$1</span>:')
.replaceAll('\n', '<br />');
//.replace(HYPERLINK_REGEX, '<a target="_blank" href="$&">$&</a>');
return `<div class="logLine">${logContent}</div>`
let line = `<div class="logLine">${logContent}</div>`
if(timestamp !== undefined) {
line = line.replace(timestamp, (match) => {
return formattedTime(dayjs(match).format('HH:mm:ss z'), match);
});
}
return line;
}
export type LogEntry = [number, LogInfo];
@@ -795,10 +1018,11 @@ export interface LogOptions {
operator?: boolean,
user?: string,
allLogsParser?: Function
allLogName?: string
allLogName?: string,
returnType?: 'string' | 'object'
}
export const filterLogBySubreddit = (logs: Map<string, LogEntry[]>, validLogCategories: string[] = [], options: LogOptions): Map<string, string[]> => {
export const filterLogBySubreddit = (logs: Map<string, LogEntry[]>, validLogCategories: string[] = [], options: LogOptions): Map<string, (string|LogInfo)[]> => {
const {
limit,
level,
@@ -806,7 +1030,8 @@ export const filterLogBySubreddit = (logs: Map<string, LogEntry[]>, validLogCate
operator = false,
user,
allLogsParser = parseSubredditLogInfoName,
allLogName = 'app'
allLogName = 'app',
returnType = 'string',
} = options;
// get map of valid logs categories
@@ -840,13 +1065,18 @@ export const filterLogBySubreddit = (logs: Map<string, LogEntry[]>, validLogCate
const sortFunc = sort === 'ascending' ? (a: LogEntry, b: LogEntry) => a[0] - b[0] : (a: LogEntry, b: LogEntry) => b[0] - a[0];
const preparedMap: Map<string, string[]> = new Map();
const preparedMap: Map<string, (string|LogInfo)[]> = new Map();
// iterate each entry and
// sort, filter by level, slice to limit, then map to html string
for(const [k,v] of validSubMap.entries()) {
let preparedEntries = v.filter(([time, l]) => isLogLineMinLevel(l, level));
preparedEntries.sort(sortFunc);
preparedMap.set(k, preparedEntries.slice(0, limit + 1).map(([time, l]) => formatLogLineToHtml(l)));
const entriesSlice = preparedEntries.slice(0, limit + 1);
if(returnType === 'string') {
preparedMap.set(k, entriesSlice.map(([time, l]) => formatLogLineToHtml(l)));
} else {
preparedMap.set(k, entriesSlice.map(([time, l]) => l));
}
}
@@ -996,6 +1226,8 @@ export const toStrongSubredditState = (s: SubredditState, opts?: StrongSubreddit
if (generateDescription && stateDescription === undefined) {
strongState.stateDescription = objectToStringSummary(strongState);
} else {
strongState.stateDescription = stateDescription;
}
return strongState;
@@ -1008,19 +1240,25 @@ export const convertSubredditsRawToStrong = (x: (SubredditState | string), opts:
return toStrongSubredditState(x, opts);
}
export async function readConfigFile(path: string, opts: any) {
export async function readConfigFile(path: string, opts: any): Promise<[string?, ConfigFormat?]> {
const {log, throwOnNotFound = true} = opts;
let extensionHint: ConfigFormat | undefined;
const fileInfo = parse(path);
if(fileInfo.ext !== undefined) {
switch(fileInfo.ext) {
case '.json':
case '.json5':
extensionHint = 'json';
break;
case '.yaml':
extensionHint = 'yaml';
break;
}
}
try {
await promises.access(path, constants.R_OK);
const data = await promises.readFile(path);
const [configObj, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(data as unknown as string);
if(configObj !== undefined) {
return configObj as object;
}
log.error(`Could not parse file contents at ${path} as JSON or YAML:`);
log.error(jsonErr);
log.error(yamlErr);
throw new SimpleError(`Could not parse file contents at ${path} as JSON or YAML`);
return [(data as any).toString(), extensionHint]
} catch (e: any) {
const {code} = e;
if (code === 'ENOENT') {
@@ -1028,14 +1266,16 @@ export async function readConfigFile(path: string, opts: any) {
if (log) {
log.warn('No file found at given path', {filePath: path});
}
e.extension = extensionHint;
throw e;
} else {
return;
return [];
}
} else if (log) {
log.warn(`Encountered error while parsing file`, {filePath: path});
log.error(e);
}
e.extension = extensionHint;
throw e;
}
}
@@ -1044,6 +1284,29 @@ export async function readConfigFile(path: string, opts: any) {
// return (item && typeof item === 'object' && !Array.isArray(item));
// }
export const fileOrDirectoryIsWriteable = async (location: string) => {
const pathInfo = parse(location);
try {
await promises.access(location, constants.R_OK | constants.W_OK);
return true;
} catch (err: any) {
const {code} = err;
if (code === 'ENOENT') {
// file doesn't exist, see if we can write to directory in which case we are good
try {
await promises.access(pathInfo.dir, constants.R_OK | constants.W_OK)
// we can write to dir
return true;
} catch (accessError: any) {
// also can't access directory :(
throw new SimpleError(`No file exists at ${location} and application does not have permission to write to that directory`);
}
} else {
throw new SimpleError(`File exists at ${location} but application does have permission to write to it.`);
}
}
}
export const overwriteMerge = (destinationArray: any[], sourceArray: any[], options: any): any[] => sourceArray;
export const removeUndefinedKeys = (obj: any) => {
@@ -1160,6 +1423,18 @@ export const asSubmission = (value: any): value is Submission => {
return isSubmission(value);
}
export const isUserNoteCriteria = (value: any) => {
return value !== null && typeof value === 'object' && value.type !== undefined;
}
export const asUserNoteCriteria = (value: any): value is UserNoteCriteria => {
return isUserNoteCriteria(value);
}
export const userNoteCriteriaSummary = (val: UserNoteCriteria): string => {
return `${val.count === undefined ? '>= 1' : val.count} of ${val.search === undefined ? 'current' : val.search} notes is ${val.type}`;
}
/**
* Serialized activities store subreddit and user properties as their string representations (instead of proxy)
* */

View File

@@ -3,7 +3,10 @@
"compilerOptions": {
"sourceMap": true,
"resolveJsonModule": true,
"typeRoots": ["./src/Web/types"]
"typeRoots": [
"./node_modules/@types",
"./src/Web/types"
]
},
// "compilerOptions": {
// "module": "es6",