Compare commits

..

20 Commits

Author SHA1 Message Date
FoxxMD
79db9d3848 feat: Add dns lookup caching
Reduces dns traffic by 95%
2024-06-13 14:16:46 -04:00
FoxxMD
4e3ef71c73 fix(mhs): Check for authentication error response
MHS will return a 200 HTTP response but with response detailing auth error
2024-02-21 09:38:41 -05:00
FoxxMD
c1c0f02c91 fix(notes): Fix user/mod note existing check bugs
* Render note content to check criteria so templated content isn't compared against rendered content
* Fix cached mod note acted on id to have correct prefix based on thing type
* Fix references mod action not checking note for acted on
2023-10-06 11:32:17 -04:00
FoxxMD
da45925f0c feat(history): Store rule failure context in results 2023-10-06 11:30:49 -04:00
MHFDoge
e465f2f1e7 Update configuration.md
(cherry picked from commit 6e37fc4eb7)
2023-05-23 11:41:49 -04:00
FoxxMD
00bc917296 fix: activity fetch reporting wrong raw count
unfiltered doesn't need to be initialized until it is actually used
2023-05-23 11:41:17 -04:00
FoxxMD
8080a0b058 feat: Add banned user lookup functionality
* Retrieve author's ban status in subreddit on a per-name basis
* Cache author's ban status for AuthorTTL
2023-05-17 11:49:13 -04:00
FoxxMD
f7dc9222d6 feat(template): Add flair text props to template rendering for submissions and authors
#141
2023-04-21 10:06:52 -04:00
FoxxMD
021e5c524b feat(cache): Cache reddit request errors when fetching activities
Use small, in-memory cache to store reddit api response errors for 5 seconds to prevent wasting api calls
2023-01-30 12:22:12 -05:00
FoxxMD
e5fe4589e0 fix: Prevent non-serious errors from contributing to retry counts
Do not increase retry count if any error in the stack was marked as non-serious.
2023-01-30 12:20:43 -05:00
FoxxMD
580a9c8fe6 feat(filter): Implement own-subreddit placeholder for subreddit filtering
When subreddit criteria name is `{{subreddit}}` CM checks the given subreddit against the name of the subreddit the bot is currently operating in.
2022-12-16 10:26:42 -05:00
FoxxMD
ef372e531e fix(database): Prevent usage of LIMIT in session storage driver when db backend is mysql/mariadb
Related to freshgiammi-lab/connect-typeorm#8

Closes #128
2022-11-29 09:47:55 -05:00
FoxxMD
fde2836208 chore: remove comments about wrong endpoints
At some point maybe this was fixed by reddit silently?
2022-11-28 14:36:33 -05:00
Matt Foxx
021dd5b0c5 Merge pull request #130 from rysie/bug/selecftlair-fix 2022-11-28 14:35:39 -05:00
Marcin Macinski
5bd38d367a assignFlair doesn't work with flair_template_id 2022-11-25 17:13:49 +01:00
FoxxMD
e79779d980 feat: Implement templating for flair actions 2022-11-21 11:43:34 -05:00
FoxxMD
b094b72d4a docs: Update docker compose instructions
* Specify docker-compose minimum version
* Change commands to use new syntax
2022-11-21 11:29:14 -05:00
FoxxMD
d90e88360d feat: Add some author properties for templating 2022-11-17 13:16:10 -05:00
FoxxMD
9031f7fec8 refactor(polling): Improve resilience for polling source parsing
* Replace hard-coded polling sources with string constants
* Implement string to PollOn parsing which is case-insensitive and forgives mispelling
* Check that manager specifies only one of each polling source type when build config
2022-11-17 12:03:55 -05:00
FoxxMD
0a2b13e4c4 fix: Fix including self activities in Recent Activity without filtering
* Consolidate ACID check for author history results into authorActivities function so it can be used everywhere
* Remove self check in Recent Activity and used consolidated functionality so that filtering still occurs on current activity
2022-11-16 10:18:37 -05:00
34 changed files with 530 additions and 513 deletions

View File

@@ -24,6 +24,9 @@ services:
cache:
image: 'redis:7-alpine'
volumes:
# on linux will need to make sure this directory has correct permissions for container to access
- './data/cache:/data'
database:
image: 'mariadb:10.9.3'

View File

@@ -36,7 +36,7 @@ configuration.
* **FILE** -- Values specified in a YAML/JSON configuration file using the structure [in the schema](https://json-schema.app/view/%23?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Fcontext-mod%2Fmaster%2Fsrc%2FSchema%2FOperatorConfig.json)
* When reading the **schema** if the variable is available at a level of configuration other than **FILE** it will be
noted with the same symbol as above. The value shown is the default.
* **ARG** -- Values specified as CLI arguments to the program (see [ClI Usage](#cli-usage) below)
* **ARG** -- Values specified as CLI arguments to the program (see [CLI Usage](#cli-usage) below)
## File Configuration (Recommended)

View File

@@ -55,6 +55,8 @@ The included [`docker-compose.yml`](/docker-compose.yml) provides production-rea
#### Setup
The included `docker-compose.yml` file is written for **Docker Compose v2.**
For new installations copy [`config.yaml`](/docker/config/docker-compose/config.yaml) into a folder named `data` in the same folder `docker-compose.yml` will be run from. For users migrating their existing CM instances to docker-compose, copy your existing `config.yaml` into the same `data` folder.
Read through the comments in both `docker-compose.yml` and `config.yaml` and makes changes to any relevant settings (passwords, usernames, etc...). Ensure that any settings used in both files (EX mariaDB passwords) match.
@@ -62,13 +64,13 @@ Read through the comments in both `docker-compose.yml` and `config.yaml` and mak
To build and start CM:
```bash
docker-compose up -d
docker compose up -d
```
To include Grafana/Influx dependencies run:
```bash
docker-compose --profile full up -d
docker compose --profile full up -d
```
## Locally

View File

@@ -57,17 +57,42 @@ All Actions with `content` have access to this data:
| `title` | As comments => the body of the comment. As Submission => title | Test post please ignore |
| `shortTitle` | The same as `title` but truncated to 15 characters | test post pleas... |
#### Common Author
Additionally, `author` has these properties accessible:
| Name | Description | Example |
|----------------|-----------------------------------------------------------------------------------|------------|
| `age` | (Approximate) Age of account | 3 months |
| `linkKarma` | Amount of link karma | 10 |
| `commentKarma` | Amount of comment karma | 3 |
| `totalKarma` | Combined link+comment karma | 13 |
| `verified` | Does account have a verified email? | true |
| `flairText` | The text of the Flair assigned to the Author in this subreddit, if one is present | Test Flair |
NOTE: Accessing these properties may require an additional API call so use sparingly on high-volume comments
##### Example Usage
```
The user {{item.author}} has been a redditor for {{item.author.age}}
```
Produces:
> The user FoxxMD has been a redditor for 3 months
### Submissions
If the **Activity** is a Submission these additional properties are accessible:
| Name | Description | Example |
|---------------|-----------------------------------------------------------------|-------------------------|
| `upvoteRatio` | The upvote ratio | 100% |
| `nsfw` | If the submission is marked as NSFW | true |
| `spoiler` | If the submission is marked as a spoiler | true |
| `url` | If the submission was a link then this is the URL for that link | http://example.com |
| `title` | The title of the submission | Test post please ignore |
| Name | Description | Example |
|-------------------|-----------------------------------------------------------------|-------------------------|
| `upvoteRatio` | The upvote ratio | 100% |
| `nsfw` | If the submission is marked as NSFW | true |
| `spoiler` | If the submission is marked as a spoiler | true |
| `url` | If the submission was a link then this is the URL for that link | http://example.com |
| `title` | The title of the submission | Test post please ignore |
| `link_flair_text` | The flair text assigned to this submission | Test Flair |
### Comments

28
package-lock.json generated
View File

@@ -31,6 +31,7 @@
"body-parser": "^1.19.0",
"cache-manager": "^3.4.4",
"cache-manager-redis-store": "^2.0.0",
"cacheable-lookup": "^6.1.0",
"command-exists": "^1.2.9",
"commander": "^8.0.0",
"comment-json": "^4.1.1",
@@ -2395,9 +2396,9 @@
}
},
"node_modules/cacheable-lookup": {
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz",
"integrity": "sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==",
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-6.1.0.tgz",
"integrity": "sha512-KJ/Dmo1lDDhmW2XDPMo+9oiy/CeqosPguPCrgcVzKyZrL6pM1gU2GmPY/xo6OQPTUaA/c0kwHuywB4E6nmT9ww==",
"engines": {
"node": ">=10.6.0"
}
@@ -4421,6 +4422,14 @@
"url": "https://github.com/sindresorhus/got?sponsor=1"
}
},
"node_modules/got/node_modules/cacheable-lookup": {
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz",
"integrity": "sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==",
"engines": {
"node": ">=10.6.0"
}
},
"node_modules/graceful-fs": {
"version": "4.2.10",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz",
@@ -12324,9 +12333,9 @@
}
},
"cacheable-lookup": {
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz",
"integrity": "sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA=="
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-6.1.0.tgz",
"integrity": "sha512-KJ/Dmo1lDDhmW2XDPMo+9oiy/CeqosPguPCrgcVzKyZrL6pM1gU2GmPY/xo6OQPTUaA/c0kwHuywB4E6nmT9ww=="
},
"cacheable-request": {
"version": "7.0.2",
@@ -13900,6 +13909,13 @@
"lowercase-keys": "^2.0.0",
"p-cancelable": "^2.0.0",
"responselike": "^2.0.0"
},
"dependencies": {
"cacheable-lookup": {
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz",
"integrity": "sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA=="
}
}
},
"graceful-fs": {

View File

@@ -53,6 +53,7 @@
"body-parser": "^1.19.0",
"cache-manager": "^3.4.4",
"cache-manager-redis-store": "^2.0.0",
"cacheable-lookup": "^6.1.0",
"command-exists": "^1.2.9",
"commander": "^8.0.0",
"comment-json": "^4.1.1",

View File

@@ -67,13 +67,15 @@ export class BanAction extends Action {
// @ts-ignore
const fetchedSub = await item.subreddit.fetch();
const fetchedName = await item.author.name;
const bannedUser = await fetchedSub.banUser({
const banData = {
name: fetchedName,
banMessage: renderedContent === undefined ? undefined : renderedContent,
banReason: renderedReason,
banNote: renderedNote,
duration: this.duration
});
};
const bannedUser = await fetchedSub.banUser(banData);
await this.resources.addUserToSubredditBannedUserCache(banData)
touchedEntities.push(bannedUser);
}
return {

View File

@@ -57,8 +57,18 @@ export class ModNoteAction extends Action {
// nothing to do!
noteCheckResult = 'existingNoteCheck=false so no existing note checks were performed.';
} else {
const contextualCheck = {...this.existingNoteCheck};
let contextualNotes: string[] | undefined = undefined;
if(this.existingNoteCheck.note !== undefined && this.existingNoteCheck.note !== null) {
contextualNotes = [];
const notes = Array.isArray(this.existingNoteCheck.note) ? this.existingNoteCheck.note : [this.existingNoteCheck.note];
for(const n of notes) {
contextualNotes.push((await this.renderContent(n, item, ruleResults, actionResults) as string))
}
contextualCheck.note = contextualNotes;
}
const noteCheckCriteriaResult = await this.resources.isAuthor(item, {
modActions: [this.existingNoteCheck]
modActions: [contextualCheck]
});
noteCheckPassed = noteCheckCriteriaResult.passed;
const {details} = buildFilterCriteriaSummary(noteCheckCriteriaResult);

View File

@@ -30,30 +30,25 @@ export class FlairAction extends Action {
async process(item: Comment | Submission, ruleResults: RuleResultEntity[], actionResults: ActionResultEntity[], options: runCheckOptions): Promise<ActionProcessResult> {
const dryRun = this.getRuntimeAwareDryrun(options);
let flairParts = [];
if(this.text !== '') {
flairParts.push(`Text: ${this.text}`);
}
if(this.css !== '') {
flairParts.push(`CSS: ${this.css}`);
}
if(this.flair_template_id !== '') {
flairParts.push(`Template: ${this.flair_template_id}`);
}
const renderedText = this.text === '' ? '' : await this.renderContent(this.text, item, ruleResults, actionResults) as string;
flairParts.push(`Text: ${renderedText === '' ? '(None)' : renderedText}`);
const renderedCss = this.css === '' ? '' : await this.renderContent(this.css, item, ruleResults, actionResults) as string;
flairParts.push(`CSS: ${renderedCss === '' ? '(None)' : renderedCss}`);
flairParts.push(`Template: ${this.flair_template_id === '' ? '(None)' : this.flair_template_id}`);
const flairSummary = flairParts.length === 0 ? 'No flair (unflaired)' : flairParts.join(' | ');
this.logger.verbose(flairSummary);
if (item instanceof Submission) {
if(!this.dryRun) {
if (this.flair_template_id) {
// typings are wrong for this function, flair_template_id should be accepted
// assignFlair uses /api/flair (mod endpoint)
// selectFlair uses /api/selectflair (self endpoint for user to choose their own flair for submission)
// @ts-ignore
await item.assignFlair({flair_template_id: this.flair_template_id}).then(() => {});
await item.selectFlair({flair_template_id: this.flair_template_id}).then(() => {});
item.link_flair_template_id = this.flair_template_id;
} else {
await item.assignFlair({text: this.text, cssClass: this.css}).then(() => {});
item.link_flair_css_class = this.css;
item.link_flair_text = this.text;
await item.assignFlair({text: renderedText, cssClass: renderedCss}).then(() => {});
item.link_flair_css_class = renderedCss;
item.link_flair_text = renderedText;
}
await this.resources.resetCacheForItem(item);
}

View File

@@ -26,6 +26,8 @@ export class UserFlairAction extends Action {
async process(item: Comment | Submission, ruleResults: RuleResultEntity[], actionResults: ActionResultEntity[], options: runCheckOptions): Promise<ActionProcessResult> {
const dryRun = this.getRuntimeAwareDryrun(options);
let flairParts = [];
let renderedText: string | undefined = undefined;
let renderedCss: string | undefined = undefined;
if (this.flair_template_id !== undefined) {
flairParts.push(`Flair template ID: ${this.flair_template_id}`)
@@ -34,10 +36,12 @@ export class UserFlairAction extends Action {
}
} else {
if (this.text !== undefined) {
flairParts.push(`Text: ${this.text}`);
renderedText = await this.renderContent(this.text, item, ruleResults, actionResults) as string;
flairParts.push(`Text: ${renderedText}`);
}
if (this.css !== undefined) {
flairParts.push(`CSS: ${this.css}`);
renderedCss = await this.renderContent(this.css, item, ruleResults, actionResults) as string;
flairParts.push(`CSS: ${renderedCss}`);
}
}
@@ -58,7 +62,7 @@ export class UserFlairAction extends Action {
this.logger.error('Either the flair template ID is incorrect or you do not have permission to access it.');
throw err;
}
} else if (this.text === undefined && this.css === undefined) {
} else if (renderedText === undefined && renderedCss === undefined) {
// @ts-ignore
await item.subreddit.deleteUserFlair(item.author.name);
item.author_flair_css_class = null;
@@ -68,11 +72,11 @@ export class UserFlairAction extends Action {
// @ts-ignore
await item.author.assignFlair({
subredditName: item.subreddit.display_name,
cssClass: this.css,
text: this.text,
cssClass: renderedCss,
text: renderedText,
});
item.author_flair_text = this.text ?? null;
item.author_flair_css_class = this.css ?? null;
item.author_flair_text = renderedText ?? null;
item.author_flair_css_class = renderedCss ?? null;
}
await this.resources.resetCacheForItem(item);
if(typeof item.author !== 'string') {

View File

@@ -54,8 +54,18 @@ export class UserNoteAction extends Action {
// nothing to do!
noteCheckResult = 'existingNoteCheck=false so no existing note checks were performed.';
} else {
const contextualCheck = {...this.existingNoteCheck};
let contextualNotes: string[] | undefined = undefined;
if(this.existingNoteCheck.note !== undefined && this.existingNoteCheck.note !== null) {
contextualNotes = [];
const notes = Array.isArray(this.existingNoteCheck.note) ? this.existingNoteCheck.note : [this.existingNoteCheck.note];
for(const n of notes) {
contextualNotes.push((await this.renderContent(n, item, ruleResults, actionResults) as string))
}
contextualCheck.note = contextualNotes;
}
const noteCheckCriteriaResult = await this.resources.isAuthor(item, {
userNotes: [this.existingNoteCheck]
userNotes: [contextualCheck]
});
noteCheckPassed = noteCheckCriteriaResult.passed;
const {details} = buildFilterCriteriaSummary(noteCheckCriteriaResult);

View File

@@ -46,7 +46,13 @@ import {RunStateType} from "../Common/Entities/RunStateType";
import {QueueRunState} from "../Common/Entities/EntityRunState/QueueRunState";
import {EventsRunState} from "../Common/Entities/EntityRunState/EventsRunState";
import {ManagerRunState} from "../Common/Entities/EntityRunState/ManagerRunState";
import {Invokee, PollOn} from "../Common/Infrastructure/Atomic";
import {
Invokee,
POLLING_COMMENTS, POLLING_MODQUEUE,
POLLING_SUBMISSIONS,
POLLING_UNMODERATED,
PollOn
} from "../Common/Infrastructure/Atomic";
import {FilterCriteriaDefaults} from "../Common/Infrastructure/Filters/FilterShapes";
import {snooLogWrapper} from "../Utils/loggerFactory";
import {InfluxClient} from "../Common/Influx/InfluxClient";
@@ -558,9 +564,9 @@ class Bot implements BotInstanceFunctions {
parseSharedStreams() {
const sharedCommentsSubreddits = !this.sharedStreams.includes('newComm') ? [] : this.subManagers.filter(x => x.isPollingShared('newComm')).map(x => x.subreddit.display_name);
const sharedCommentsSubreddits = !this.sharedStreams.includes(POLLING_COMMENTS) ? [] : this.subManagers.filter(x => x.isPollingShared(POLLING_COMMENTS)).map(x => x.subreddit.display_name);
if (sharedCommentsSubreddits.length > 0) {
const stream = this.cacheManager.modStreams.get('newComm');
const stream = this.cacheManager.modStreams.get(POLLING_COMMENTS);
if (stream === undefined || stream.subreddit !== sharedCommentsSubreddits.join('+')) {
let processed;
if (stream !== undefined) {
@@ -580,20 +586,20 @@ class Bot implements BotInstanceFunctions {
label: 'Shared Polling'
});
// @ts-ignore
defaultCommentStream.on('error', this.createSharedStreamErrorListener('newComm'));
defaultCommentStream.on('listing', this.createSharedStreamListingListener('newComm'));
this.cacheManager.modStreams.set('newComm', defaultCommentStream);
defaultCommentStream.on('error', this.createSharedStreamErrorListener(POLLING_COMMENTS));
defaultCommentStream.on('listing', this.createSharedStreamListingListener(POLLING_COMMENTS));
this.cacheManager.modStreams.set(POLLING_COMMENTS, defaultCommentStream);
}
} else {
const stream = this.cacheManager.modStreams.get('newComm');
const stream = this.cacheManager.modStreams.get(POLLING_COMMENTS);
if (stream !== undefined) {
stream.end('Determined no managers are listening on shared stream parsing');
}
}
const sharedSubmissionsSubreddits = !this.sharedStreams.includes('newSub') ? [] : this.subManagers.filter(x => x.isPollingShared('newSub')).map(x => x.subreddit.display_name);
const sharedSubmissionsSubreddits = !this.sharedStreams.includes(POLLING_SUBMISSIONS) ? [] : this.subManagers.filter(x => x.isPollingShared(POLLING_SUBMISSIONS)).map(x => x.subreddit.display_name);
if (sharedSubmissionsSubreddits.length > 0) {
const stream = this.cacheManager.modStreams.get('newSub');
const stream = this.cacheManager.modStreams.get(POLLING_SUBMISSIONS);
if (stream === undefined || stream.subreddit !== sharedSubmissionsSubreddits.join('+')) {
let processed;
if (stream !== undefined) {
@@ -613,19 +619,19 @@ class Bot implements BotInstanceFunctions {
label: 'Shared Polling'
});
// @ts-ignore
defaultSubStream.on('error', this.createSharedStreamErrorListener('newSub'));
defaultSubStream.on('listing', this.createSharedStreamListingListener('newSub'));
this.cacheManager.modStreams.set('newSub', defaultSubStream);
defaultSubStream.on('error', this.createSharedStreamErrorListener(POLLING_SUBMISSIONS));
defaultSubStream.on('listing', this.createSharedStreamListingListener(POLLING_SUBMISSIONS));
this.cacheManager.modStreams.set(POLLING_SUBMISSIONS, defaultSubStream);
}
} else {
const stream = this.cacheManager.modStreams.get('newSub');
const stream = this.cacheManager.modStreams.get(POLLING_SUBMISSIONS);
if (stream !== undefined) {
stream.end('Determined no managers are listening on shared stream parsing');
}
}
const isUnmoderatedShared = !this.sharedStreams.includes('unmoderated') ? false : this.subManagers.some(x => x.isPollingShared('unmoderated'));
const unmoderatedstream = this.cacheManager.modStreams.get('unmoderated');
const isUnmoderatedShared = !this.sharedStreams.includes(POLLING_UNMODERATED) ? false : this.subManagers.some(x => x.isPollingShared(POLLING_UNMODERATED));
const unmoderatedstream = this.cacheManager.modStreams.get(POLLING_UNMODERATED);
if (isUnmoderatedShared && unmoderatedstream === undefined) {
const defaultUnmoderatedStream = new UnmoderatedStream(this.client, {
subreddit: 'mod',
@@ -634,15 +640,15 @@ class Bot implements BotInstanceFunctions {
label: 'Shared Polling'
});
// @ts-ignore
defaultUnmoderatedStream.on('error', this.createSharedStreamErrorListener('unmoderated'));
defaultUnmoderatedStream.on('listing', this.createSharedStreamListingListener('unmoderated'));
this.cacheManager.modStreams.set('unmoderated', defaultUnmoderatedStream);
defaultUnmoderatedStream.on('error', this.createSharedStreamErrorListener(POLLING_UNMODERATED));
defaultUnmoderatedStream.on('listing', this.createSharedStreamListingListener(POLLING_UNMODERATED));
this.cacheManager.modStreams.set(POLLING_UNMODERATED, defaultUnmoderatedStream);
} else if (!isUnmoderatedShared && unmoderatedstream !== undefined) {
unmoderatedstream.end('Determined no managers are listening on shared stream parsing');
}
const isModqueueShared = !this.sharedStreams.includes('modqueue') ? false : this.subManagers.some(x => x.isPollingShared('modqueue'));
const modqueuestream = this.cacheManager.modStreams.get('modqueue');
const isModqueueShared = !this.sharedStreams.includes(POLLING_MODQUEUE) ? false : this.subManagers.some(x => x.isPollingShared(POLLING_MODQUEUE));
const modqueuestream = this.cacheManager.modStreams.get(POLLING_MODQUEUE);
if (isModqueueShared && modqueuestream === undefined) {
const defaultModqueueStream = new ModQueueStream(this.client, {
subreddit: 'mod',
@@ -651,9 +657,9 @@ class Bot implements BotInstanceFunctions {
label: 'Shared Polling'
});
// @ts-ignore
defaultModqueueStream.on('error', this.createSharedStreamErrorListener('modqueue'));
defaultModqueueStream.on('listing', this.createSharedStreamListingListener('modqueue'));
this.cacheManager.modStreams.set('modqueue', defaultModqueueStream);
defaultModqueueStream.on('error', this.createSharedStreamErrorListener(POLLING_MODQUEUE));
defaultModqueueStream.on('listing', this.createSharedStreamListingListener(POLLING_MODQUEUE));
this.cacheManager.modStreams.set(POLLING_MODQUEUE, defaultModqueueStream);
} else if (isModqueueShared && modqueuestream !== undefined) {
modqueuestream.end('Determined no managers are listening on shared stream parsing');
}

View File

@@ -1,4 +1,4 @@
import {Entity, Column, ManyToOne, PrimaryColumn, OneToMany, Index, DataSource, JoinColumn} from "typeorm";
import {Entity, Column, ManyToOne, PrimaryColumn, OneToMany, Index} from "typeorm";
import {AuthorEntity} from "./AuthorEntity";
import {Subreddit} from "./Subreddit";
import {CMEvent} from "./CMEvent";
@@ -6,8 +6,6 @@ import {asComment, getActivityAuthorName, parseRedditFullname, redditThingTypeTo
import {activityReports, ActivityType, Report, SnoowrapActivity} from "../Infrastructure/Reddit";
import {ActivityReport} from "./ActivityReport";
import dayjs, {Dayjs} from "dayjs";
import {ExtendedSnoowrap} from "../../Utils/SnoowrapClients";
import {Comment, Submission} from 'snoowrap/dist/objects';
export interface ActivityEntityOptions {
id: string
@@ -47,7 +45,7 @@ export class Activity {
@Column({name: 'name'})
name!: string;
@ManyToOne(type => Subreddit, sub => sub.activities, {cascade: ['insert'], eager: true})
@ManyToOne(type => Subreddit, sub => sub.activities, {cascade: ['insert']})
subreddit!: Subreddit;
@Column("varchar", {length: 20})
@@ -60,18 +58,17 @@ export class Activity {
@Column("text")
permalink!: string;
@ManyToOne(type => AuthorEntity, author => author.activities, {cascade: ['insert'], eager: true})
@ManyToOne(type => AuthorEntity, author => author.activities, {cascade: ['insert']})
author!: AuthorEntity;
@OneToMany(type => CMEvent, act => act.activity)
@OneToMany(type => CMEvent, act => act.activity) // note: we will create author property in the Photo class below
actionedEvents!: CMEvent[]
@ManyToOne('Activity', 'comments', {nullable: true, cascade: ['insert']})
@JoinColumn({name: 'submission_id'})
@ManyToOne(type => Activity, obj => obj.comments, {nullable: true})
submission?: Activity;
@OneToMany('Activity', 'submission', {nullable: true})
comments?: Activity[];
@OneToMany(type => Activity, obj => obj.submission, {nullable: true})
comments!: Activity[];
@OneToMany(type => ActivityReport, act => act.activity, {cascade: ['insert'], eager: true})
reports: ActivityReport[] | undefined
@@ -154,12 +151,10 @@ export class Activity {
return false;
}
static async fromSnoowrapActivity(activity: SnoowrapActivity, options: fromSnoowrapOptions | undefined = {}) {
static fromSnoowrapActivity(subreddit: Subreddit, activity: SnoowrapActivity, lastKnownStateTimestamp?: dayjs.Dayjs | undefined) {
let submission: Activity | undefined;
let type: ActivityType = 'submission';
let content: string;
const subreddit = await Subreddit.fromSnoowrap(activity.subreddit, options?.db);
if(asComment(activity)) {
type = 'comment';
content = activity.body;
@@ -184,30 +179,8 @@ export class Activity {
submission
});
entity.syncReports(activity, options.lastKnownStateTimestamp);
entity.syncReports(activity, lastKnownStateTimestamp);
return entity;
}
toSnoowrap(client: ExtendedSnoowrap): SnoowrapActivity {
let act: SnoowrapActivity;
if(this.type === 'submission') {
act = new Submission({name: this.id, id: this.name}, client, false);
act.title = this.content;
} else {
act = new Comment({name: this.id, id: this.name}, client, false);
act.link_id = this.submission?.id as string;
act.body = this.content;
}
act.permalink = this.permalink;
act.subreddit = this.subreddit.toSnoowrap(client);
act.author = this.author.toSnoowrap(client);
return act;
}
}
export interface fromSnoowrapOptions {
lastKnownStateTimestamp?: dayjs.Dayjs | undefined
db?: DataSource
}

View File

@@ -1,8 +1,5 @@
import {Entity, Column, PrimaryColumn, OneToMany} from "typeorm";
import {Activity} from "./Activity";
import {ExtendedSnoowrap} from "../../Utils/SnoowrapClients";
import {SnoowrapActivity} from "../Infrastructure/Reddit";
import {RedditUser} from "snoowrap/dist/objects";
@Entity({name: 'Author'})
export class AuthorEntity {
@@ -14,15 +11,11 @@ export class AuthorEntity {
name!: string;
@OneToMany(type => Activity, act => act.author)
activities!: Promise<Activity[]>
activities!: Activity[]
constructor(data?: any) {
if(data !== undefined) {
this.name = data.name;
}
}
toSnoowrap(client: ExtendedSnoowrap): RedditUser {
return new RedditUser({name: this.name, id: this.id}, client, false);
}
}

View File

@@ -6,7 +6,7 @@ import {
ManyToOne,
PrimaryColumn,
BeforeInsert,
AfterLoad, JoinColumn
AfterLoad
} from "typeorm";
import {
ActivityDispatch
@@ -22,15 +22,15 @@ import Comment from "snoowrap/dist/objects/Comment";
import {ColumnDurationTransformer} from "./Transformers";
import { RedditUser } from "snoowrap/dist/objects";
import {ActivitySourceTypes, DurationVal, NonDispatchActivitySourceValue, onExistingFoundBehavior} from "../Infrastructure/Atomic";
import {Activity} from "./Activity";
@Entity({name: 'DispatchedAction'})
export class DispatchedEntity extends TimeAwareRandomBaseEntity {
//@ManyToOne(type => Activity, obj => obj.dispatched, {cascade: ['insert'], eager: true, nullable: false})
@ManyToOne(type => Activity, undefined, {cascade: ['insert'], eager: true, nullable: false})
@JoinColumn({name: 'activityId'})
activity!: Activity
@Column()
activityId!: string
@Column()
author!: string
@Column({
type: 'int',
@@ -82,10 +82,11 @@ export class DispatchedEntity extends TimeAwareRandomBaseEntity {
}})
tardyTolerant!: boolean | Duration
constructor(data?: HydratedActivityDispatch) {
constructor(data?: ActivityDispatch & { manager: ManagerEntity }) {
super();
if (data !== undefined) {
this.activity = data.activity;
this.activityId = data.activity.name;
this.author = getActivityAuthorName(data.activity.author);
this.delay = data.delay;
this.createdAt = data.queuedAt;
this.type = data.type;
@@ -150,7 +151,20 @@ export class DispatchedEntity extends TimeAwareRandomBaseEntity {
}
async toActivityDispatch(client: ExtendedSnoowrap): Promise<ActivityDispatch> {
let activity = this.activity.toSnoowrap(client);
const redditThing = parseRedditFullname(this.activityId);
if(redditThing === undefined) {
throw new Error(`Could not parse reddit ID from value '${this.activityId}'`);
}
let activity: Comment | Submission;
if (redditThing?.type === 'comment') {
// @ts-ignore
activity = await client.getComment(redditThing.id);
} else {
// @ts-ignore
activity = await client.getSubmission(redditThing.id);
}
activity.author = new RedditUser({name: this.author}, client, false);
activity.id = redditThing.id;
return {
id: this.id,
queuedAt: this.createdAt,
@@ -162,13 +176,8 @@ export class DispatchedEntity extends TimeAwareRandomBaseEntity {
cancelIfQueued: this.cancelIfQueued,
identifier: this.identifier,
type: this.type,
author: activity.author.name,
author: this.author,
dryRun: this.dryRun
}
}
}
export interface HydratedActivityDispatch extends Omit<ActivityDispatch, 'activity'> {
activity: Activity
manager: ManagerEntity
}

View File

@@ -1,7 +1,5 @@
import {Entity, Column, PrimaryColumn, OneToMany, Index, DataSource} from "typeorm";
import {Entity, Column, PrimaryColumn, OneToMany, Index} from "typeorm";
import {Activity} from "./Activity";
import {ExtendedSnoowrap} from "../../Utils/SnoowrapClients";
import {Subreddit as SnoowrapSubreddit} from "snoowrap/dist/objects";
export interface SubredditEntityOptions {
id: string
@@ -27,18 +25,4 @@ export class Subreddit {
this.name = data.name;
}
}
toSnoowrap(client: ExtendedSnoowrap): SnoowrapSubreddit {
return new SnoowrapSubreddit({display_name: this.name, name: this.id}, client, false);
}
static async fromSnoowrap(subreddit: SnoowrapSubreddit, db?: DataSource) {
if(db !== undefined) {
const existing = await db.getRepository(Subreddit).findOneBy({name: subreddit.display_name});
if(existing) {
return existing;
}
}
return new Subreddit({id: await subreddit.name, name: await subreddit.display_name});
}
}

View File

@@ -111,6 +111,19 @@ export interface DurationObject {
export type JoinOperands = 'OR' | 'AND';
export type PollOn = 'unmoderated' | 'modqueue' | 'newSub' | 'newComm';
export const POLLING_UNMODERATED: PollOn = 'unmoderated';
export const POLLING_MODQUEUE: PollOn = 'modqueue';
export const POLLING_SUBMISSIONS: PollOn = 'newSub';
export const POLLING_COMMENTS: PollOn = 'newComm';
export const pollOnTypes: PollOn[] = [POLLING_UNMODERATED, POLLING_MODQUEUE, POLLING_SUBMISSIONS, POLLING_COMMENTS];
export const pollOnTypeMapping: Map<string, PollOn> = new Map([
['unmoderated', POLLING_UNMODERATED],
['modqueue', POLLING_MODQUEUE],
['newsub', POLLING_SUBMISSIONS],
['newcomm', POLLING_COMMENTS],
// be nice if user mispelled
['newcom', POLLING_COMMENTS]
]);
export type ModeratorNames = 'self' | 'automod' | 'automoderator' | string;
export type Invokee = 'system' | 'user';
export type RunState = 'running' | 'paused' | 'stopped';
@@ -395,3 +408,9 @@ export interface RuleResultsTemplateData {
export interface GenericContentTemplateData extends BaseTemplateData, Partial<RuleResultsTemplateData>, Partial<ActionResultsTemplateData> {
item?: (SubmissionTemplateData | CommentTemplateData)
}
export type SubredditPlaceholderType = '{{subreddit}}';
export const subredditPlaceholder: SubredditPlaceholderType = '{{subreddit}}';
export const asSubredditPlaceholder = (val: any): val is SubredditPlaceholderType => {
return typeof val === 'string' && val.toLowerCase() === '{{subreddit}}';
}

View File

@@ -4,7 +4,7 @@ import {
DurationComparor,
ModeratorNameCriteria,
ModeratorNames, ModActionType,
ModUserNoteLabel, RelativeDateTimeMatch
ModUserNoteLabel, RelativeDateTimeMatch, SubredditPlaceholderType
} from "../Atomic";
import {ActivityType, MaybeActivityType} from "../Reddit";
import {GenericComparison, parseGenericValueComparison} from "../Comparisons";
@@ -57,7 +57,7 @@ export interface SubredditCriteria {
}
export interface StrongSubredditCriteria extends SubredditCriteria {
name?: RegExp
name?: RegExp | SubredditPlaceholderType
}
export const defaultStrongSubredditCriteriaOptions = {

View File

@@ -1,4 +1,5 @@
import {Comment, RedditUser, Submission, Subreddit} from "snoowrap/dist/objects";
import { BannedUser } from "snoowrap/dist/objects/Subreddit";
import { ValueOf } from "ts-essentials";
import {CMError} from "../../Utils/Errors";
@@ -166,3 +167,14 @@ export interface RedditRemovalMessageOptions {
title?: string
lock?: boolean
}
export interface CMBannedUser extends SnoowrapBannedUser {
user: RedditUser
}
export interface SnoowrapBannedUser extends Omit<BannedUser, 'id'> {
days_left: number | null
rel_id?: string
id?: string
}

View File

@@ -1,19 +0,0 @@
import {MigrationInterface, QueryRunner, TableColumn} from "typeorm"
export class delayedReset1667415256831 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
queryRunner.connection.logger.logSchemaBuild('Truncating (removing) existing Dispatched Actions due to internal structural changes');
await queryRunner.clearTable('DispatchedAction');
await queryRunner.changeColumn('DispatchedAction', 'author', new TableColumn({
name: 'author',
type: 'varchar',
length: '150',
isNullable: true
}));
}
public async down(queryRunner: QueryRunner): Promise<void> {
}
}

View File

@@ -372,7 +372,7 @@ export interface PollingOptions extends PollingDefaults {
* * after they have been manually approved from modqueue
*
* */
pollOn: 'unmoderated' | 'modqueue' | 'newSub' | 'newComm'
pollOn: PollOn
}
export interface TTLConfig {

View File

@@ -8,7 +8,7 @@ import {
overwriteMerge,
parseBool, parseExternalUrl, parseUrlContext, parseWikiContext, randomId,
readConfigFile,
removeUndefinedKeys, resolvePathFromEnvWithRelative, toStrongSharingACLConfig
removeUndefinedKeys, resolvePathFromEnvWithRelative, toPollOn, toStrongSharingACLConfig
} from "./util";
import Ajv, {Schema} from 'ajv';
@@ -74,8 +74,8 @@ import {ErrorWithCause} from "pony-cause";
import {RunConfigHydratedData, RunConfigData, RunConfigObject} from "./Run";
import {AuthorRuleConfig} from "./Rule/AuthorRule";
import {
CacheProvider, ConfigFormat, ConfigFragmentParseFunc,
PollOn
CacheProvider, ConfigFormat, ConfigFragmentParseFunc, POLLING_MODQUEUE, POLLING_UNMODERATED,
PollOn, pollOnTypes
} from "./Common/Infrastructure/Atomic";
import {
asFilterOptionsJson,
@@ -452,27 +452,31 @@ export class ConfigBuilder {
export const buildPollingOptions = (values: (string | PollingOptions)[]): PollingOptionsStrong[] => {
let opts: PollingOptionsStrong[] = [];
let rawOpts: PollingOptions;
for (const v of values) {
if (typeof v === 'string') {
opts.push({
pollOn: v as PollOn,
interval: DEFAULT_POLLING_INTERVAL,
limit: DEFAULT_POLLING_LIMIT,
});
rawOpts = {pollOn: v as PollOn}; // maybeee
} else {
const {
pollOn: p,
interval = DEFAULT_POLLING_INTERVAL,
limit = DEFAULT_POLLING_LIMIT,
delayUntil,
} = v;
opts.push({
pollOn: p as PollOn,
interval,
limit,
delayUntil,
});
rawOpts = v;
}
const {
pollOn: p,
interval = DEFAULT_POLLING_INTERVAL,
limit = DEFAULT_POLLING_LIMIT,
delayUntil,
} = rawOpts;
const pVal = toPollOn(p);
if (opts.some(x => x.pollOn === pVal)) {
throw new SimpleError(`Polling source ${pVal} cannot appear more than once in polling options`);
}
opts.push({
pollOn: pVal,
interval,
limit,
delayUntil,
});
}
return opts;
}
@@ -796,7 +800,7 @@ export const parseDefaultBotInstanceFromArgs = (args: any): BotInstanceJsonConfi
heartbeatInterval: heartbeat,
},
polling: {
shared: sharedMod ? ['unmoderated', 'modqueue'] : undefined,
shared: sharedMod ? [POLLING_UNMODERATED, POLLING_MODQUEUE] : undefined,
},
nanny: {
softLimit,
@@ -908,7 +912,7 @@ export const parseDefaultBotInstanceFromEnv = (): BotInstanceJsonConfig => {
heartbeatInterval: process.env.HEARTBEAT !== undefined ? parseInt(process.env.HEARTBEAT) : undefined,
},
polling: {
shared: parseBool(process.env.SHARE_MOD) ? ['unmoderated', 'modqueue'] : undefined,
shared: parseBool(process.env.SHARE_MOD) ? [POLLING_UNMODERATED, POLLING_MODQUEUE] : undefined,
},
nanny: {
softLimit: process.env.SOFT_LIMIT !== undefined ? parseInt(process.env.SOFT_LIMIT) : undefined,
@@ -1525,10 +1529,10 @@ export const buildBotConfig = (data: BotInstanceJsonConfig, opConfig: OperatorCo
botCache.provider.prefix = buildCachePrefix([botCache.provider.prefix, 'bot', (botName || objectHash.sha1(botCreds))]);
}
let realShared = shared === true ? ['unmoderated', 'modqueue', 'newComm', 'newSub'] : shared;
let realShared: PollOn[] = shared === true ? pollOnTypes : shared.map(toPollOn);
if (sharedMod === true) {
realShared.push('unmoderated');
realShared.push('modqueue');
realShared.push(POLLING_UNMODERATED);
realShared.push(POLLING_MODQUEUE);
}
const botLevelStatDefaults = {...statDefaultsFromOp, ...databaseStatisticsDefaults};
@@ -1566,7 +1570,7 @@ export const buildBotConfig = (data: BotInstanceJsonConfig, opConfig: OperatorCo
caching: botCache,
userAgent,
polling: {
shared: [...new Set(realShared)] as PollOn[],
shared: Array.from(new Set(realShared)),
stagger,
limit,
interval,

View File

@@ -311,21 +311,24 @@ export class HistoryRule extends Rule {
let criteriaMet = false;
let failCriteriaResult: string = '';
const criteriaResultsSummary = criteriaResults.map(x => this.generateResultDataFromCriteria(x, true).result).join(this.condition === 'OR' ? ' OR ' : ' AND ');
if (this.condition === 'OR') {
criteriaMet = criteriaResults.some(x => x.triggered);
if(!criteriaMet) {
failCriteriaResult = `${FAIL} No criteria was met`;
failCriteriaResult = `${FAIL} No criteria was met => ${criteriaResultsSummary}`;
}
} else {
criteriaMet = criteriaResults.every(x => x.triggered);
if(!criteriaMet) {
if(criteriaResults.some(x => x.triggered)) {
const met = criteriaResults.filter(x => x.triggered);
failCriteriaResult = `${FAIL} ${met.length} out of ${criteriaResults.length} criteria met but Rule required all be met. Set log level to debug to see individual results`;
failCriteriaResult = `${FAIL} ${met.length} out of ${criteriaResults.length} criteria met but Rule required all be met => ${criteriaResultsSummary}`;
const results = criteriaResults.map(x => this.generateResultDataFromCriteria(x, true));
this.logger.debug(`\r\n ${results.map(x => x.result).join('\r\n')}`);
} else {
failCriteriaResult = `${FAIL} No criteria was met`;
failCriteriaResult = `${FAIL} No criteria was met => ${criteriaResultsSummary}`;
}
}
}
@@ -335,8 +338,8 @@ export class HistoryRule extends Rule {
const refCriteriaResults = criteriaResults.find(x => x.triggered);
const resultData = this.generateResultDataFromCriteria(refCriteriaResults);
this.logger.verbose(`${PASS} ${resultData.result}`);
return Promise.resolve([true, this.getResult(true, resultData)]);
this.logger.verbose(`${PASS} ${criteriaResultsSummary}`);
return Promise.resolve([true, this.getResult(true, {data: resultData.data, result: criteriaResultsSummary})]);
} else {
// log failures for easier debugging
for(const res of criteriaResults) {

View File

@@ -241,6 +241,8 @@ export class MHSRule extends Rule {
res = await this.callMHS(content);
if(res.response.toLowerCase() === 'success') {
await this.resources.cache.set(key, res, {ttl: this.resources.ttl.wikiTTL});
} else if(res.response.toLowerCase().includes('authentication failure')) {
throw new CMError(`MHS Request failed with Authentication failure. You most likely need to generate a new API key.`);
}
return res;
}

View File

@@ -126,28 +126,7 @@ export class RecentActivityRule extends Rule {
async process(item: Submission | Comment): Promise<[boolean, RuleResult]> {
let activities;
// ACID is a bitch
// reddit may not return the activity being checked in the author's recent history due to availability/consistency issues or *something*
// so make sure we add it in if config is checking the same type and it isn't included
// TODO refactor this for SubredditState everywhere branch
let shouldIncludeSelf = true;
const strongWindow = windowConfigToWindowCriteria(this.window);
const {
filterOn: {
post: {
subreddits: {
include = [],
exclude = []
} = {},
} = {},
} = {}
} = strongWindow;
// typeof x === string -- a patch for now...technically this is all it supports but eventually will need to be able to do any SubredditState
if (include.length > 0 && !include.some(x => x.name !== undefined && x.name.toLocaleLowerCase() === item.subreddit.display_name.toLocaleLowerCase())) {
shouldIncludeSelf = false;
} else if (exclude.length > 0 && exclude.some(x => x.name !== undefined && x.name.toLocaleLowerCase() === item.subreddit.display_name.toLocaleLowerCase())) {
shouldIncludeSelf = false;
}
if(strongWindow.fetch === undefined && this.lookAt !== undefined) {
switch(this.lookAt) {
@@ -159,25 +138,10 @@ export class RecentActivityRule extends Rule {
}
}
activities = await this.resources.getAuthorActivities(item.author, strongWindow);
switch (strongWindow.fetch) {
case 'comment':
if (shouldIncludeSelf && item instanceof Comment && !activities.some(x => x.name === item.name)) {
activities.unshift(item);
}
break;
case 'submission':
if (shouldIncludeSelf && item instanceof Submission && !activities.some(x => x.name === item.name)) {
activities.unshift(item);
}
break;
default:
if (shouldIncludeSelf && !activities.some(x => x.name === item.name)) {
activities.unshift(item);
}
break;
}
// ACID is a bitch
// reddit may not return the activity being checked in the author's recent history due to availability/consistency issues or *something*
// so add current activity as a prefetched activity and add it to the returned activities (after it goes through filtering)
activities = await this.resources.getAuthorActivities(item.author, strongWindow, undefined, [item]);
let viableActivity = activities;
// if config does not specify reference then we set the default based on whether the item is a submission or not

View File

@@ -93,8 +93,8 @@ import {EntityRunState} from "../Common/Entities/EntityRunState/EntityRunState";
import {
ActivitySourceValue,
EventRetentionPolicyRange,
Invokee,
PollOn,
Invokee, POLLING_COMMENTS, POLLING_MODQUEUE, POLLING_SUBMISSIONS, POLLING_UNMODERATED,
PollOn, pollOnTypes,
recordOutputTypes,
RunState
} from "../Common/Infrastructure/Atomic";
@@ -635,7 +635,7 @@ export class Manager extends EventEmitter implements RunningStates {
const configBuilder = new ConfigBuilder({logger: this.logger});
const validJson = configBuilder.validateJson(configObj);
const {
polling = [{pollOn: 'unmoderated', limit: DEFAULT_POLLING_LIMIT, interval: DEFAULT_POLLING_INTERVAL}],
polling = [{pollOn: POLLING_SUBMISSIONS, limit: DEFAULT_POLLING_LIMIT, interval: DEFAULT_POLLING_INTERVAL}],
caching,
credentials,
dryRun,
@@ -957,7 +957,7 @@ export class Manager extends EventEmitter implements RunningStates {
await this.resources.setActivityLastSeenDate(item.name);
// if modqueue is running then we know we are checking for new reports every X seconds
if(options.activitySource.identifier === 'modqueue') {
if(options.activitySource.identifier === POLLING_MODQUEUE) {
// if the activity is from modqueue and only has one report then we know that report was just created
if(item.num_reports === 1
// otherwise if it has more than one report AND we have seen it (its only seen if it has already been stored (in below block))
@@ -975,7 +975,7 @@ export class Manager extends EventEmitter implements RunningStates {
let shouldPersistReports = false;
if (existingEntity === null) {
activityEntity = await Activity.fromSnoowrapActivity(activity, {lastKnownStateTimestamp, db: this.resources.database});
activityEntity = Activity.fromSnoowrapActivity(this.managerEntity.subreddit, activity, lastKnownStateTimestamp);
// always persist if activity is not already persisted and any reports exist
if (item.num_reports > 0) {
shouldPersistReports = true;
@@ -1189,7 +1189,7 @@ export class Manager extends EventEmitter implements RunningStates {
// @ts-ignore
const subProxy = await this.client.getSubmission((item as Comment).link_id);
const sub = await this.resources.getActivity(subProxy);
subActivity = await this.activityRepo.save(await Activity.fromSnoowrapActivity(sub, {db: this.resources.database}));
subActivity = await this.activityRepo.save(Activity.fromSnoowrapActivity(this.managerEntity.subreddit, sub));
}
event.activity.submission = subActivity;
@@ -1325,25 +1325,20 @@ export class Manager extends EventEmitter implements RunningStates {
}
}
isPollingShared(streamName: string): boolean {
isPollingShared(streamName: PollOn): boolean {
const pollOption = this.pollOptions.find(x => x.pollOn === streamName);
return pollOption !== undefined && pollOption.limit === DEFAULT_POLLING_LIMIT && pollOption.interval === DEFAULT_POLLING_INTERVAL && this.sharedStreams.includes(streamName as PollOn);
return pollOption !== undefined && pollOption.limit === DEFAULT_POLLING_LIMIT && pollOption.interval === DEFAULT_POLLING_INTERVAL && this.sharedStreams.includes(streamName);
}
async buildPolling() {
const sources: PollOn[] = ['unmoderated', 'modqueue', 'newComm', 'newSub'];
const sources = [...pollOnTypes];
const subName = this.subreddit.display_name;
for (const source of sources) {
if (!sources.includes(source)) {
this.logger.error(`'${source}' is not a valid polling source. Valid sources: unmoderated | modqueue | newComm | newSub`);
continue;
}
const pollOpt = this.pollOptions.find(x => x.pollOn.toLowerCase() === source.toLowerCase());
const pollOpt = this.pollOptions.find(x => x.pollOn === source);
if (pollOpt === undefined) {
if(this.sharedStreamCallbacks.has(source)) {
this.logger.debug(`Removing listener for shared polling on ${source.toUpperCase()} because it no longer exists in config`);
@@ -1366,11 +1361,11 @@ export class Manager extends EventEmitter implements RunningStates {
let modStreamType: string | undefined;
switch (source) {
case 'unmoderated':
case POLLING_UNMODERATED:
if (limit === DEFAULT_POLLING_LIMIT && interval === DEFAULT_POLLING_INTERVAL && this.sharedStreams.includes(source)) {
modStreamType = 'unmoderated';
modStreamType = POLLING_UNMODERATED;
// use default mod stream from resources
stream = this.cacheManager.modStreams.get('unmoderated') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
stream = this.cacheManager.modStreams.get(POLLING_UNMODERATED) as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
} else {
stream = new UnmoderatedStream(this.client, {
subreddit: this.subreddit.display_name,
@@ -1380,11 +1375,11 @@ export class Manager extends EventEmitter implements RunningStates {
});
}
break;
case 'modqueue':
case POLLING_MODQUEUE:
if (limit === DEFAULT_POLLING_LIMIT && interval === DEFAULT_POLLING_INTERVAL && this.sharedStreams.includes(source)) {
modStreamType = 'modqueue';
modStreamType = POLLING_MODQUEUE;
// use default mod stream from resources
stream = this.cacheManager.modStreams.get('modqueue') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
stream = this.cacheManager.modStreams.get(POLLING_MODQUEUE) as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
} else {
stream = new ModQueueStream(this.client, {
subreddit: this.subreddit.display_name,
@@ -1394,11 +1389,11 @@ export class Manager extends EventEmitter implements RunningStates {
});
}
break;
case 'newSub':
case POLLING_SUBMISSIONS:
if (limit === DEFAULT_POLLING_LIMIT && interval === DEFAULT_POLLING_INTERVAL && this.sharedStreams.includes(source)) {
modStreamType = 'newSub';
modStreamType = POLLING_SUBMISSIONS;
// use default mod stream from resources
stream = this.cacheManager.modStreams.get('newSub') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
stream = this.cacheManager.modStreams.get(POLLING_SUBMISSIONS) as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
} else {
stream = new SubmissionStream(this.client, {
subreddit: this.subreddit.display_name,
@@ -1408,11 +1403,11 @@ export class Manager extends EventEmitter implements RunningStates {
});
}
break;
case 'newComm':
case POLLING_COMMENTS:
if (limit === DEFAULT_POLLING_LIMIT && interval === DEFAULT_POLLING_INTERVAL && this.sharedStreams.includes(source)) {
modStreamType = 'newComm';
modStreamType = POLLING_COMMENTS;
// use default mod stream from resources
stream = this.cacheManager.modStreams.get('newComm') as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
stream = this.cacheManager.modStreams.get(POLLING_COMMENTS) as SPoll<Snoowrap.Submission | Snoowrap.Comment>;
} else {
stream = new CommentStream(this.client, {
subreddit: this.subreddit.display_name,
@@ -1422,6 +1417,8 @@ export class Manager extends EventEmitter implements RunningStates {
});
}
break;
default:
throw new CMError(`This shouldn't happen! All polling sources are enumerated in switch. Source value: ${source}`)
}
if (stream === undefined) {
@@ -1514,10 +1511,10 @@ export class Manager extends EventEmitter implements RunningStates {
}
noChecksWarning = (source: PollOn) => (listing: any) => {
if (this.commentChecks.length === 0 && ['modqueue', 'newComm'].some(x => x === source)) {
if (this.commentChecks.length === 0 && [POLLING_MODQUEUE, POLLING_COMMENTS].some(x => x === source)) {
this.logger.warn(`Polling '${source.toUpperCase()}' may return Comments but no comments checks were configured.`);
}
if (this.submissionChecks.length === 0 && ['unmoderated', 'modqueue', 'newSub'].some(x => x === source)) {
if (this.submissionChecks.length === 0 && [POLLING_UNMODERATED, POLLING_MODQUEUE, POLLING_SUBMISSIONS].some(x => x === source)) {
this.logger.warn(`Polling '${source.toUpperCase()}' may return Submissions but no submission checks were configured.`);
}
}
@@ -1670,7 +1667,7 @@ export class Manager extends EventEmitter implements RunningStates {
}
this.startedAt = dayjs();
const modQueuePollOpts = this.pollOptions.find(x => x.pollOn === 'modqueue');
const modQueuePollOpts = this.pollOptions.find(x => x.pollOn === POLLING_MODQUEUE);
if(modQueuePollOpts !== undefined) {
this.modqueueInterval = modQueuePollOpts.interval;
}

View File

@@ -167,7 +167,15 @@ export class ModNote {
if (referenceItem === undefined) {
throw new CMError('Criteria wants to check if mod note references activity but not activity was given.');
}
const isCurrentActivity = this.action.actedOn !== undefined && referenceItem !== undefined && this.action.actedOn.name === referenceItem.name;
let isCurrentActivity = false;
if(referenceItem !== undefined) {
if(this.action.actedOn !== undefined) {
isCurrentActivity = this.action.actedOn.name === referenceItem.name;
}
if(isCurrentActivity === false && this.note !== undefined && this.note.actedOn !== undefined) {
isCurrentActivity = this.note.actedOn.name === referenceItem.name;
}
}
if ((referencesCurrentActivity === true && !isCurrentActivity) || (referencesCurrentActivity === false && isCurrentActivity)) {
return false;
}

View File

@@ -33,9 +33,19 @@ export class ModUserNote {
}
toRaw(): ModUserNoteRaw {
let id = undefined;
if(this.actedOn !== undefined) {
if(this.actedOn instanceof Submission) {
id = `t3_${this.actedOn.id}`;
} else if(this.actedOn instanceof Comment) {
id = `t1_${this.actedOn.id}`;
} else if(this.actedOn instanceof RedditUser) {
id = `t2_${this.actedOn.id}`;
}
}
return {
note: this.note,
reddit_id: this.actedOn !== undefined ? this.actedOn.id : undefined,
reddit_id: id,
label: this.label
}
}

View File

@@ -56,7 +56,7 @@ import {
} from "../util";
import {
ActivityDispatch,
CacheConfig,
CacheConfig, CacheOptions,
Footer,
HistoricalStatsDisplay,
ResourceStats, StrongTTLConfig,
@@ -69,16 +69,7 @@ import {cacheTTLDefaults, createHistoricalDisplayDefaults,} from "../Common/defa
import {ExtendedSnoowrap} from "../Utils/SnoowrapClients";
import dayjs, {Dayjs} from "dayjs";
import ImageData from "../Common/ImageData";
import {
Between, Brackets,
DataSource,
DeleteQueryBuilder,
In,
LessThan,
NotBrackets,
Repository,
SelectQueryBuilder
} from "typeorm";
import {Between, DataSource, DeleteQueryBuilder, LessThan, Repository, SelectQueryBuilder} from "typeorm";
import {CMEvent as ActionedEventEntity, CMEvent} from "../Common/Entities/CMEvent";
import {RuleResultEntity} from "../Common/Entities/RuleResultEntity";
import globrex from 'globrex';
@@ -113,7 +104,7 @@ import {
UserNoteCriteria
} from "../Common/Infrastructure/Filters/FilterCriteria";
import {
ActivitySourceValue,
ActivitySourceValue, asSubredditPlaceholder,
ConfigFragmentParseFunc,
DurationVal,
EventRetentionPolicyRange,
@@ -124,7 +115,7 @@ import {
ModUserNoteLabel,
RelativeDateTimeMatch,
statFrequencies,
StatisticFrequencyOption,
StatisticFrequencyOption, SubredditPlaceholderType,
WikiContext
} from "../Common/Infrastructure/Atomic";
import {
@@ -145,9 +136,9 @@ import {Duration} from "dayjs/plugin/duration";
import {
ActivityType,
AuthorHistorySort,
CachedFetchedActivitiesResult,
CachedFetchedActivitiesResult, CMBannedUser,
FetchedActivitiesResult, MaybeActivityType, RedditUserLike,
SnoowrapActivity,
SnoowrapActivity, SnoowrapBannedUser,
SubredditLike,
SubredditRemovalReason
} from "../Common/Infrastructure/Reddit";
@@ -170,9 +161,8 @@ import {ActionResultEntity} from "../Common/Entities/ActionResultEntity";
import {ActivitySource} from "../Common/ActivitySource";
import {SubredditResourceOptions} from "../Common/Subreddit/SubredditResourceInterfaces";
import {SubredditStats} from "./Stats";
import {CMCache} from "../Common/Cache";
import { Activity } from '../Common/Entities/Activity';
import {FindOptionsWhere} from "typeorm/find-options/FindOptionsWhere";
import {CMCache, createCacheManager} from "../Common/Cache";
import {BannedUser, BanOptions} from "snoowrap/dist/objects/Subreddit";
export const DEFAULT_FOOTER = '\r\n*****\r\nThis action was performed by [a bot.]({{botLink}}) Mention a moderator or [send a modmail]({{modmailLink}}) if you have any ideas, questions, or concerns about this action.';
@@ -198,13 +188,13 @@ export class SubredditResources {
database: DataSource
client: ExtendedSnoowrap
cache: CMCache
memoryCache: CMCache
cacheSettingsHash?: string;
thirdPartyCredentials: ThirdPartyCredentialsJsonConfig;
delayedItems: ActivityDispatch[] = [];
botAccount?: string;
dispatchedActivityRepo: Repository<DispatchedEntity>
activitySourceRepo: Repository<ActivitySourceEntity>
activityRepo: Repository<Activity>
retention?: EventRetentionPolicyRange
managerEntity: ManagerEntity
botEntity: Bot
@@ -241,7 +231,6 @@ export class SubredditResources {
this.database = database;
this.dispatchedActivityRepo = this.database.getRepository(DispatchedEntity);
this.activitySourceRepo = this.database.getRepository(ActivitySourceEntity);
this.activityRepo = this.database.getRepository(Activity);
this.retention = retention;
//this.prefix = prefix;
this.client = client;
@@ -257,6 +246,12 @@ export class SubredditResources {
}
this.cache = cache;
this.cache.setLogger(this.logger);
const memoryCacheOpts: CacheOptions = {
store: 'memory',
max: 10,
ttl: 10
};
this.memoryCache = new CMCache(createCacheManager(memoryCacheOpts), memoryCacheOpts, false, undefined, {}, this.logger);
this.subredditStats = new SubredditStats(database, managerEntity, cache, statFrequency, this.logger);
@@ -417,25 +412,21 @@ export class SubredditResources {
}
},
relations: {
manager: true,
activity: {
submission: true
}
manager: true
}
});
const now = dayjs();
const toRemove = [];
for(const dAct of dispatchedActivities) {
const shouldDispatchAt = dAct.createdAt.add(dAct.delay.asSeconds(), 'seconds');
let tardyHint = '';
if(shouldDispatchAt.isBefore(now)) {
let tardyHint = `Activity ${dAct.activity.id} queued at ${dAct.createdAt.format('YYYY-MM-DD HH:mm:ssZ')} for ${dAct.delay.humanize()} is now LATE`;
let tardyHint = `Activity ${dAct.activityId} queued at ${dAct.createdAt.format('YYYY-MM-DD HH:mm:ssZ')} for ${dAct.delay.humanize()} is now LATE`;
if(dAct.tardyTolerant === true) {
tardyHint += ` but was configured as ALWAYS 'tardy tolerant' so will be dispatched immediately`;
} else if(dAct.tardyTolerant === false) {
tardyHint += ` and was not configured as 'tardy tolerant' so will be dropped`;
this.logger.warn(tardyHint);
toRemove.push(dAct.id);
await this.removeDelayedActivity(dAct.id);
continue;
} else {
// see if its within tolerance
@@ -443,7 +434,7 @@ export class SubredditResources {
if(latest.isBefore(now)) {
tardyHint += ` and IS NOT within tardy tolerance of ${dAct.tardyTolerant.humanize()} of planned dispatch time so will be dropped`;
this.logger.warn(tardyHint);
toRemove.push(dAct.id);
await this.removeDelayedActivity(dAct.id);
continue;
} else {
tardyHint += `but is within tardy tolerance of ${dAct.tardyTolerant.humanize()} of planned dispatch time so will be dispatched immediately`;
@@ -456,115 +447,27 @@ export class SubredditResources {
try {
this.delayedItems.push(await dAct.toActivityDispatch(this.client))
} catch (e) {
this.logger.warn(new ErrorWithCause(`Unable to add Activity ${dAct.activity.id} from database delayed activities to in-app delayed activities queue`, {cause: e}));
this.logger.warn(new ErrorWithCause(`Unable to add Activity ${dAct.activityId} from database delayed activities to in-app delayed activities queue`, {cause: e}));
}
}
if(toRemove.length > 0) {
await this.removeDelayedActivity(toRemove);
}
}
}
async addDelayedActivity(data: ActivityDispatch) {
// TODO merge this with getActivity or something...
if(asComment(data.activity)) {
const existingSub = await this.activityRepo.findOneBy({_id: data.activity.link_id});
if(existingSub === null) {
const sub = await this.getActivity(new Submission({name: data.activity.link_id}, this.client, false));
await this.activityRepo.save(await Activity.fromSnoowrapActivity(sub, {db: this.database}));
}
}
const dEntity = await this.dispatchedActivityRepo.save(new DispatchedEntity({...data, manager: this.managerEntity, activity: await Activity.fromSnoowrapActivity(data.activity, {db: this.database})}));
const dEntity = await this.dispatchedActivityRepo.save(new DispatchedEntity({...data, manager: this.managerEntity}));
data.id = dEntity.id;
this.delayedItems.push(data);
}
async removeDelayedActivity(val?: string | string[]) {
let dispatched: DispatchedEntity[] = [];
const where: FindOptionsWhere<DispatchedEntity> = {
manager: {
id: this.managerEntity.id
}
};
if(val !== undefined) {
const ids = typeof val === 'string' ? [val] : val;
where.id = In(ids);
}
dispatched = await this.dispatchedActivityRepo.find({
where,
relations: {
manager: true,
activity: {
actionedEvents: true,
submission: {
actionedEvents: true
}
}
}
});
const actualDispatchedIds = dispatched.map(x => x.id);
this.logger.debug(`${actualDispatchedIds.length} marked for deletion`, {leaf: 'Delayed Activities'});
// get potential activities to delete
// but only include activities that don't have any actionedEvents
let activityIdsToDelete = Array.from(dispatched.reduce((acc, curr) => {
if(curr.activity.actionedEvents === null || curr.activity.actionedEvents.length === 0) {
acc.add(curr.activity.id);
}
if(curr.activity.submission !== undefined && curr.activity.submission !== null) {
if(curr.activity.submission.actionedEvents === null || curr.activity.submission.actionedEvents.length === 0) {
acc.add(curr.activity.submission.id);
}
}
return acc;
}, new Set<string>()));
const rawActCount = activityIdsToDelete.length;
let activeActCount = 0;
// if we have any potential activities to delete we now need to get any dispatched actions that reference these activities
// that are NOT the ones we are going to delete
if(activityIdsToDelete.length > 0) {
const activeDispatchedQuery = this.dispatchedActivityRepo.createQueryBuilder('dis')
.leftJoinAndSelect('dis.activity', 'activity')
.leftJoinAndSelect('activity.submission', 'submission')
.where(new NotBrackets((qb) => {
qb.where('dis.id IN (:...currIds)', {currIds: actualDispatchedIds});
}))
.andWhere(new Brackets((qb) => {
qb.where('activity._id IN (:...actMainIds)', {actMainIds: activityIdsToDelete})
qb.orWhere('submission._id IN (:...actSubIds)', {actSubIds: activityIdsToDelete})
}));
//const sql = activeDispatchedQuery.getSql();
const activeDispatched = await activeDispatchedQuery.getMany();
// all activity ids, from the actions to delete, that are being used by dispatched actions that are NOT the ones we are going to delete
const activeDispatchedIds = Array.from(activeDispatched.reduce((acc, curr) => {
acc.add(curr.activity.id);
if(curr.activity.submission !== undefined && curr.activity.submission !== null) {
acc.add(curr.activity.submission.id);
}
return acc;
}, new Set<string>()));
activeActCount = activeDispatchedIds.length;
// filter out any that are still in use
activityIdsToDelete = activityIdsToDelete.filter(x => !activeDispatchedIds.includes(x));
}
this.logger.debug(`Marked ${activityIdsToDelete.length} Activities created, by Delayed, for deletion (${rawActCount} w/o Events | ${activeActCount} used by other Delayed Activities)`, {leaf: 'Delayed Activities'});
if(actualDispatchedIds.length > 0) {
await this.dispatchedActivityRepo.delete(actualDispatchedIds);
if(val === undefined) {
await this.dispatchedActivityRepo.delete({manager: {id: this.managerEntity.id}});
this.delayedItems = [];
} else {
this.logger.warn('No dispatched ids found to delete');
const ids = typeof val === 'string' ? [val] : val;
await this.dispatchedActivityRepo.delete(ids);
this.delayedItems = this.delayedItems.filter(x => !ids.includes(x.id));
}
if(activityIdsToDelete.length > 0) {
await this.activityRepo.delete(activityIdsToDelete);
}
this.delayedItems = this.delayedItems.filter(x => !actualDispatchedIds.includes(x.id));
}
async initStats() {
@@ -966,6 +869,47 @@ export class SubredditResources {
}
}
async getSubredditBannedUser(val: string | RedditUser): Promise<CMBannedUser | undefined> {
const subName = this.subreddit.display_name;
const name = getActivityAuthorName(val);
const hash = `sub-${subName}-banned-${name}`;
if (this.ttl.authorTTL !== false) {
const cachedBanData = (await this.cache.get(hash)) as undefined | null | false | SnoowrapBannedUser;
if (cachedBanData !== undefined && cachedBanData !== null) {
this.logger.debug(`Cache Hit: Subreddit Banned User ${subName} ${name}`);
if(cachedBanData === false) {
return undefined;
}
return {...cachedBanData, user: new RedditUser({name: cachedBanData.name}, this.client, false)};
}
}
let bannedUsers = await this.subreddit.getBannedUsers({name});
let bannedUser: CMBannedUser | undefined;
if(bannedUsers.length > 0) {
const banData = bannedUsers[0] as SnoowrapBannedUser;
bannedUser = {...banData, user: new RedditUser({name: banData.name}, this.client, false)};
}
if (this.ttl.authorTTL !== false) {
// @ts-ignore
await this.cache.set(hash, bannedUsers.length > 0 ? bannedUsers[0] as SnoowrapBannedUser : false, {ttl: this.ttl.subredditTTL});
}
return bannedUser;
}
async addUserToSubredditBannedUserCache(data: BanOptions) {
if (this.ttl.authorTTL !== false) {
const subName = this.subreddit.display_name;
const name = getActivityAuthorName(data.name);
const hash = `sub-${subName}-banned-${name}`;
const banData: SnoowrapBannedUser = {date: dayjs().unix(), name: data.name, days_left: data.duration ?? null, note: data.banNote ?? ''};
await this.cache.set(hash, banData, {ttl: this.ttl.authorTTL})
}
}
async hasSubreddit(name: string) {
if (this.ttl.subredditTTL !== false) {
const hash = `sub-${name}`;
@@ -1135,13 +1079,13 @@ export class SubredditResources {
}
}
async getAuthorActivities(user: RedditUser, options: ActivityWindowCriteria, customListing?: NamedListing): Promise<SnoowrapActivity[]> {
async getAuthorActivities(user: RedditUser, options: ActivityWindowCriteria, customListing?: NamedListing, prefetchedActivities?: SnoowrapActivity[]): Promise<SnoowrapActivity[]> {
const {post} = await this.getAuthorActivitiesWithFilter(user, options, customListing);
const {post} = await this.getAuthorActivitiesWithFilter(user, options, customListing, prefetchedActivities);
return post;
}
async getAuthorActivitiesWithFilter(user: RedditUser, options: ActivityWindowCriteria, customListing?: NamedListing): Promise<FetchedActivitiesResult> {
async getAuthorActivitiesWithFilter(user: RedditUser, options: ActivityWindowCriteria, customListing?: NamedListing, prefetchedActivities?: SnoowrapActivity[]): Promise<FetchedActivitiesResult> {
let listFuncName: string;
let listFunc: ListingFunc;
@@ -1169,21 +1113,24 @@ export class SubredditResources {
...(cloneDeep(options)),
}
return await this.getActivities(user, criteriaWithDefaults, {func: listFunc, name: listFuncName});
return await this.getActivities(user, criteriaWithDefaults, {func: listFunc, name: listFuncName}, prefetchedActivities);
}
async getAuthorComments(user: RedditUser, options: ActivityWindowCriteria): Promise<Comment[]> {
return await this.getAuthorActivities(user, {...options, fetch: 'comment'}) as unknown as Promise<Comment[]>;
async getAuthorComments(user: RedditUser, options: ActivityWindowCriteria, prefetchedActivities?: SnoowrapActivity[]): Promise<Comment[]> {
return await this.getAuthorActivities(user, {...options, fetch: 'comment'}, undefined, prefetchedActivities) as unknown as Promise<Comment[]>;
}
async getAuthorSubmissions(user: RedditUser, options: ActivityWindowCriteria): Promise<Submission[]> {
async getAuthorSubmissions(user: RedditUser, options: ActivityWindowCriteria, prefetchedActivities?: SnoowrapActivity[]): Promise<Submission[]> {
return await this.getAuthorActivities(user, {
...options,
fetch: 'submission'
}) as unknown as Promise<Submission[]>;
}, undefined,prefetchedActivities) as unknown as Promise<Submission[]>;
}
async getActivities(user: RedditUser, options: ActivityWindowCriteria, listingData: NamedListing): Promise<FetchedActivitiesResult> {
async getActivities(user: RedditUser, options: ActivityWindowCriteria, listingData: NamedListing, prefetchedActivities: SnoowrapActivity[] = []): Promise<FetchedActivitiesResult> {
let cacheKey: string | undefined;
let fromCache = false;
try {
@@ -1192,7 +1139,6 @@ export class SubredditResources {
let apiCount = 1;
let preMaxTrigger: undefined | string;
let rawCount: number = 0;
let fromCache = false;
const hashObj = cloneDeep(options);
@@ -1205,13 +1151,23 @@ export class SubredditResources {
const userName = getActivityAuthorName(user);
const hash = objectHash.sha1(hashObj);
const cacheKey = `${userName}-${listingData.name}-${hash}`;
cacheKey = `${userName}-${listingData.name}-${hash}`;
if (this.ttl.authorTTL !== false) {
if (this.useSubredditAuthorCache) {
hashObj.subreddit = this.subreddit;
}
// check for cached request error!
//
// we cache reddit API request errors for 403/404 (suspended/shadowban) in memory so that
// we don't waste API calls making the same call repetitively since we know what the result will always be
const cachedRequestError = await this.memoryCache.get(cacheKey) as undefined | null | Error;
if(cachedRequestError !== undefined && cachedRequestError !== null) {
fromCache = true;
this.logger.debug(`In-memory cache found reddit request error for key ${cacheKey}. Must have been <5 sec ago. Throwing to save API calls!`);
throw cachedRequestError;
}
const cacheVal = await this.cache.get(cacheKey);
if(cacheVal === undefined || cacheVal === null) {
@@ -1318,12 +1274,24 @@ export class SubredditResources {
}
}
let unFilteredItems: SnoowrapActivity[] | undefined;
let preFilteredPrefetchedActivities = [...prefetchedActivities];
if(preFilteredPrefetchedActivities.length > 0) {
switch(options.fetch) {
// TODO this may not work if using a custom listingFunc that does not include fetch type
case 'comment':
preFilteredPrefetchedActivities = preFilteredPrefetchedActivities.filter(x => asComment(x));
break;
case 'submission':
preFilteredPrefetchedActivities = preFilteredPrefetchedActivities.filter(x => asSubmission(x));
break;
}
preFilteredPrefetchedActivities = await this.filterListingWithHistoryOptions(preFilteredPrefetchedActivities, user, options.filterOn?.pre);
}
let unFilteredItems: SnoowrapActivity[] | undefined = undefined;
pre = pre.concat(preFilteredPrefetchedActivities);
const { func: listingFunc } = listingData;
let listing = await listingFunc(getAuthorHistoryAPIOptions(options));
let hitEnd = false;
let offset = chunkSize;
@@ -1333,6 +1301,9 @@ export class SubredditResources {
timeOk = false;
let listSlice = listing.slice(offset - chunkSize);
// filter out any from slice that were already included from prefetched list so that prefetched aren't included twice
listSlice = preFilteredPrefetchedActivities.length === 0 ? listSlice : listSlice.filter(x => !preFilteredPrefetchedActivities.some(y => y.name === x.name));
let preListSlice = await this.filterListingWithHistoryOptions(listSlice, user, options.filterOn?.pre);
// its more likely the time criteria is going to be hit before the count criteria
@@ -1375,7 +1346,7 @@ export class SubredditResources {
if(satisfiedPreEndtime !== undefined || satisfiedPreCount !== undefined) {
if(unFilteredItems === undefined) {
unFilteredItems = [];
unFilteredItems = [...preFilteredPrefetchedActivities];
}
// window has pre filtering, need to check if fallback max would be hit
if(satisfiedPreEndtime !== undefined) {
@@ -1433,9 +1404,14 @@ export class SubredditResources {
} catch (err: any) {
if(isStatusError(err)) {
switch(err.statusCode) {
case 404:
throw new SimpleError('Reddit returned a 404 for user history. Likely this user is shadowbanned.', {isSerious: false});
case 403:
case 404:
if(!fromCache && cacheKey !== undefined) {
await this.memoryCache.set(cacheKey, err, {ttl: 5});
}
if(err.statusCode === 404) {
throw new SimpleError('Reddit returned a 404 for user history. Likely this user is shadowbanned.', {isSerious: false});
}
throw new MaybeSeriousErrorWithCause('Reddit returned a 403 for user history, likely this user is suspended.', {cause: err, isSerious: false});
default:
throw err;
@@ -1607,6 +1583,7 @@ export class SubredditResources {
usernotes,
ruleResults,
actionResults,
author: (val) => this.getAuthor(val)
});
}
@@ -1980,8 +1957,14 @@ export class SubredditResources {
if (crit[k] !== undefined) {
switch (k) {
case 'name':
const nameReg = crit[k] as RegExp;
if(!nameReg.test(subreddit.display_name)) {
const nameReg = crit[k] as RegExp | SubredditPlaceholderType;
// placeholder {{subreddit}} tests as true if the given subreddit matches the subreddit this bot is processing the activity from
if (asSubredditPlaceholder(nameReg)) {
if (this.subreddit.display_name !== subreddit.display_name) {
log.debug(`Failed: Expected => ${k}:${crit[k]} (${this.subreddit.display_name}) | Found => ${k}:${subreddit.display_name}`)
return false
}
} else if (!nameReg.test(subreddit.display_name)) {
return false;
}
break;

View File

@@ -46,83 +46,6 @@ import {ActionResultEntity} from "../Common/Entities/ActionResultEntity";
export const BOT_LINK = 'https://www.reddit.com/r/ContextModBot/comments/otz396/introduction_to_contextmodbot';
export interface AuthorTypedActivitiesOptions extends ActivityWindowCriteria {
type?: 'comment' | 'submission',
}
export const isSubreddit = async (subreddit: Subreddit, stateCriteria: SubredditCriteria | StrongSubredditCriteria, logger?: Logger) => {
delete stateCriteria.stateDescription;
if (Object.keys(stateCriteria).length === 0) {
return true;
}
const crit = isStrongSubredditState(stateCriteria) ? stateCriteria : toStrongSubredditState(stateCriteria, {defaultFlags: 'i'});
const log: Logger | undefined = logger !== undefined ? logger.child({leaf: 'Subreddit Check'}, mergeArr) : undefined;
return await (async () => {
for (const k of Object.keys(crit)) {
// @ts-ignore
if (crit[k] !== undefined) {
switch (k) {
case 'name':
const nameReg = crit[k] as RegExp;
if(!nameReg.test(subreddit.display_name)) {
return false;
}
break;
case 'isUserProfile':
const entity = parseRedditEntity(subreddit.display_name);
const entityIsUserProfile = entity.type === 'user';
if(crit[k] !== entityIsUserProfile) {
if(log !== undefined) {
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${entityIsUserProfile}`)
}
return false
}
break;
case 'over18':
case 'over_18':
// handling an edge case where user may have confused Comment/Submission state "over_18" with SubredditState "over18"
// @ts-ignore
if (crit[k] !== subreddit.over18) {
if(log !== undefined) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${subreddit.over18}`)
}
return false
}
break;
default:
// @ts-ignore
if (subreddit[k] !== undefined) {
// @ts-ignore
if (crit[k] !== subreddit[k]) {
if(log !== undefined) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${subreddit[k]}`)
}
return false
}
} else {
if(log !== undefined) {
log.warn(`Tried to test for Subreddit property '${k}' but it did not exist`);
}
}
break;
}
}
}
if(log !== undefined) {
log.debug(`Passed: ${JSON.stringify(stateCriteria)}`);
}
return true;
})() as boolean;
}
const renderContentCommentTruncate = truncateStringToLength(50);
const shortTitleTruncate = truncateStringToLength(15);
@@ -133,6 +56,7 @@ export interface TemplateContext {
ruleResults?: RuleResultEntity[]
actionResults?: ActionResultEntity[]
activity?: SnoowrapActivity
author?: (val: string | RedditUser) => Promise<RedditUser>
[key: string]: any
}
@@ -140,11 +64,25 @@ export const renderContent = async (template: string, data: TemplateContext = {}
const {
usernotes,
ruleResults,
author,
actionResults,
activity,
...restContext
} = data;
let fetchedUser: RedditUser | undefined;
// @ts-ignore
const user = async (): Promise<RedditUser> => {
if(fetchedUser === undefined) {
if(author !== undefined) {
// @ts-ignore
fetchedUser = await author(activity.author);
}
}
// @ts-ignore
return fetchedUser;
}
let view: GenericContentTemplateData = {
botLink: BOT_LINK,
...restContext
@@ -162,6 +100,7 @@ export const renderContent = async (template: string, data: TemplateContext = {}
conditional.spoiler = activity.spoiler;
conditional.op = true;
conditional.upvoteRatio = `${activity.upvote_ratio * 100}%`;
conditional.link_flair_text = activity.link_flair_text;
} else {
conditional.op = activity.is_submitter;
}
@@ -171,10 +110,25 @@ export const renderContent = async (template: string, data: TemplateContext = {}
view.modmailLink = `https://www.reddit.com/message/compose?to=%2Fr%2F${subreddit}&message=${encodeURIComponent(permalink)}`;
const author: any = {
toString: () => getActivityAuthorName(activity.author)
};
if(template.includes('{{item.author.')) {
// @ts-ignore
const auth = await user();
author.age = dayjs.unix(auth.created).fromNow(true);
author.linkKarma = auth.link_karma;
author.commentKarma = auth.comment_karma;
author.totalKarma = auth.comment_karma + auth.link_karma;
author.verified = auth.has_verified_email;
author.flairText = activity.author_flair_text;
}
const templateData: any = {
kind: activity instanceof Submission ? 'submission' : 'comment',
// @ts-ignore
author: getActivityAuthorName(await activity.author),
author,
votes: activity.score,
age: dayjs.duration(dayjs().diff(dayjs.unix(activity.created))).humanize(),
permalink,

View File

@@ -12,6 +12,7 @@ import {Logger} from "winston";
import {WebSetting} from "../../Common/WebEntities/WebSetting";
import {ErrorWithCause} from "pony-cause";
import {createCacheManager} from "../../Common/Cache";
import {MysqlDriver} from "typeorm/driver/mysql/MysqlDriver";
export interface CacheManagerStoreOptions {
prefix?: string
@@ -103,7 +104,12 @@ export class DatabaseStorageProvider extends StorageProvider {
}
createSessionStore(options?: TypeormStoreOptions): Store {
return new TypeormStore(options).connect(this.clientSessionRepo)
// https://github.com/freshgiammi-lab/connect-typeorm#implement-the-session-entity
// https://github.com/freshgiammi-lab/connect-typeorm/issues/8
// usage of LIMIT in subquery is not supported by mariadb/mysql
// limitSubquery: false -- turns off LIMIT usage
const realOptions = this.database.driver instanceof MysqlDriver ? {...options, limitSubquery: false} : options;
return new TypeormStore(realOptions).connect(this.clientSessionRepo)
}
async getSessionSecret(): Promise<string | undefined> {

View File

@@ -1297,7 +1297,7 @@
const durationDayjs = dayjs.duration(x.duration, 'seconds');
const durationDisplay = durationDayjs.humanize();
const cancelLink = `<a href="#" data-id="${x.id}" data-subreddit="${x.subreddit}" class="delayCancel">CANCEL</a>`;
return `<div>A <a href="https://reddit.com${x.permalink}">${x.submissionId !== undefined ? 'Comment' : 'Submission'}</a> by <a href="https://reddit.com/u/${x.author}">${x.author}</a>${isAll ? `, dispatched in <a href="https://reddit.com${x.subreddit}">${x.subreddit}</a> ,` : ''} queued by ${x.source} at ${queuedAtDisplay} for ${durationDisplay} (dispatches ${durationUntilNow.humanize(true)}) -- ${cancelLink}</div>`;
return `<div>A <a href="https://reddit.com${x.permalink}">${x.submissionId !== undefined ? 'Comment' : 'Submission'}</a>${isAll ? ` in <a href="https://reddit.com${x.subreddit}">${x.subreddit}</a> ` : ''} by <a href="https://reddit.com/u/${x.author}">${x.author}</a> queued by ${x.source} at ${queuedAtDisplay} for ${durationDisplay} (dispatches ${durationUntilNow.humanize(true)}) -- ${cancelLink}</div>`;
});
//let sub = resp.name;
if(sub === 'All') {

View File

@@ -1,6 +1,9 @@
import winston from 'winston';
import 'winston-daily-rotate-file';
import dayjs from 'dayjs';
import http from 'http';
import https from 'https';
import CacheableLookup from 'cacheable-lookup';
import utc from 'dayjs/plugin/utc.js';
import advancedFormat from 'dayjs/plugin/advancedFormat';
import tz from 'dayjs/plugin/timezone';
@@ -9,7 +12,6 @@ import relTime from 'dayjs/plugin/relativeTime.js';
import sameafter from 'dayjs/plugin/isSameOrAfter.js';
import samebefore from 'dayjs/plugin/isSameOrBefore.js';
import weekOfYear from 'dayjs/plugin/weekOfYear.js';
import {Manager} from "./Subreddit/Manager";
import {Command, Argument} from 'commander';
import {
@@ -40,6 +42,17 @@ dayjs.extend(tz);
dayjs.extend(advancedFormat);
dayjs.extend(weekOfYear);
const cacheable = new CacheableLookup({
// cache dns entries for 60 seconds
maxTtl: 60,
// fallback to node lookup for 10 minutes in the event of a failure for 10 minutes
fallbackDuration: 600
});
// replace node native request agents, globally, so they used cached dns lookup
cacheable.install(http.globalAgent);
cacheable.install(https.globalAgent);
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);

View File

@@ -46,7 +46,14 @@ import {ErrorWithCause, stackWithCauses} from "pony-cause";
import stringSimilarity from 'string-similarity';
import calculateCosineSimilarity from "./Utils/StringMatching/CosineSimilarity";
import levenSimilarity from "./Utils/StringMatching/levenSimilarity";
import {isRateLimitError, isRequestError, isScopeError, isStatusError, SimpleError} from "./Utils/Errors";
import {
isRateLimitError,
isRequestError,
isScopeError,
isSeriousError,
isStatusError,
SimpleError
} from "./Utils/Errors";
import merge from "deepmerge";
import {RulePremise} from "./Common/Entities/RulePremise";
import {RuleResultEntity as RuleResultEntity} from "./Common/Entities/RuleResultEntity";
@@ -70,19 +77,20 @@ import {
import {
ActivitySourceData,
ActivitySourceTypes,
ActivitySourceValue,
ActivitySourceValue, asSubredditPlaceholder,
ConfigFormat,
DurationVal,
ExternalUrlContext,
ImageHashCacheData,
ModUserNoteLabel,
modUserNoteLabels,
PollOn, pollOnTypeMapping, pollOnTypes,
RedditEntity,
RedditEntityType,
RelativeDateTimeMatch,
statFrequencies,
StatisticFrequency,
StatisticFrequencyOption,
StatisticFrequencyOption, subredditPlaceholder, SubredditPlaceholderType,
UrlContext,
WikiContext
} from "./Common/Infrastructure/Atomic";
@@ -1095,16 +1103,22 @@ export const createRetryHandler = (opts: RetryOptions, logger: Logger) => {
// if it's a request error but not a known "oh probably just a reddit blip" status code treat it as other, which should usually have a lower retry max
}
// linear backoff
otherRetryCount++;
let prefix = '';
if(isSeriousError(err)) {
// linear backoff
otherRetryCount++;
} else {
prefix = 'NON-SERIOUS ';
}
let msg = redditApiError ? `Error occurred while making a request to Reddit (${otherRetryCount}/${maxOtherRetry} in ${clearRetryCountAfter} minutes) but it was NOT a well-known "reddit blip" error.` : `Non-request error occurred (${otherRetryCount}/${maxOtherRetry} in ${clearRetryCountAfter} minutes).`;
if (maxOtherRetry < otherRetryCount) {
logger.warn(`${msg} Exceeded max allowed.`);
logger.warn(`${prefix}${msg} Exceeded max allowed.`);
return false;
}
if(waitOnRetry) {
const ms = (4 * 1000) * otherRetryCount;
logger.warn(`${msg} Will wait ${formatNumber(ms / 1000)} seconds before retrying`);
logger.warn(`${prefix}${msg} Will wait ${formatNumber(ms / 1000)} seconds before retrying`);
await sleep(ms);
}
return true;
@@ -1556,7 +1570,7 @@ export const testMaybeStringRegex = (test: string, subject: string, defaultFlags
}
export const isStrongSubredditState = (value: SubredditCriteria | StrongSubredditCriteria) => {
return value.name === undefined || value.name instanceof RegExp;
return value.name === undefined || value.name instanceof RegExp || asSubredditPlaceholder(value.name);
}
export const asStrongSubredditState = (value: any): value is StrongSubredditCriteria => {
@@ -1574,21 +1588,26 @@ export const toStrongSubredditState = (s: SubredditCriteria, opts?: StrongSubred
let nameValOriginallyRegex = false;
let nameReg: RegExp | undefined;
let nameReg: RegExp | undefined | SubredditPlaceholderType;
if (nameValRaw !== undefined) {
if (!(nameValRaw instanceof RegExp)) {
let nameVal = nameValRaw.trim();
nameReg = parseStringToRegex(nameVal, defaultFlags);
if (nameReg === undefined) {
// if sub state has `isUserProfile=true` and config did not provide a regex then
// assume the user wants to use the value in "name" to look for a user profile so we prefix created regex with u_
const parsedEntity = parseRedditEntity(nameVal, isUserProfile !== undefined && isUserProfile ? 'user' : 'subreddit');
// technically they could provide "u_Username" as the value for "name" and we will then match on it regardless of isUserProfile
// but like...why would they do that? There shouldn't be any subreddits that start with u_ that aren't user profiles anyway(?)
const regPrefix = parsedEntity.type === 'user' ? 'u_' : '';
nameReg = parseStringToRegex(`/^${regPrefix}${nameVal}$/`, defaultFlags);
if(asSubredditPlaceholder(nameVal)) {
nameReg = subredditPlaceholder;
nameValOriginallyRegex = false;
} else {
nameValOriginallyRegex = true;
nameReg = parseStringToRegex(nameVal, defaultFlags);
if (nameReg === undefined) {
// if sub state has `isUserProfile=true` and config did not provide a regex then
// assume the user wants to use the value in "name" to look for a user profile so we prefix created regex with u_
const parsedEntity = parseRedditEntity(nameVal, isUserProfile !== undefined && isUserProfile ? 'user' : 'subreddit');
// technically they could provide "u_Username" as the value for "name" and we will then match on it regardless of isUserProfile
// but like...why would they do that? There shouldn't be any subreddits that start with u_ that aren't user profiles anyway(?)
const regPrefix = parsedEntity.type === 'user' ? 'u_' : '';
nameReg = parseStringToRegex(`/^${regPrefix}${nameVal}$/`, defaultFlags);
} else {
nameValOriginallyRegex = true;
}
}
} else {
nameValOriginallyRegex = true;
@@ -2855,7 +2874,7 @@ export const generateSnoowrapEntityFromRedditThing = (data: RedditThing, client:
case 'user':
return new RedditUser({id: data.val}, client, false);
case 'subreddit':
return new Subreddit({name: data.val}, client, false);
return new Subreddit({id: data.val}, client, false);
case 'message':
return new PrivateMessage({id: data.val}, client, false)
@@ -3088,3 +3107,12 @@ export const toStrongSharingACLConfig = (data: SharingACLConfig | string[]): Str
exclude: (data.exclude ?? []).map(x => parseStringToRegexOrLiteralSearch(x))
}
}
export const toPollOn = (val: string | PollOn): PollOn => {
const clean = val.toLowerCase().trim();
const pVal = pollOnTypeMapping.get(clean);
if(pVal !== undefined) {
return pVal;
}
throw new SimpleError(`'${val}' is not a valid polling source. Valid sources: ${pollOnTypes.join(' | ')}`);
}