mirror of
https://github.com/FoxxMD/context-mod.git
synced 2026-01-14 16:08:02 -05:00
Compare commits
28 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
885e3fa765 | ||
|
|
0b2c0e6451 | ||
|
|
15806b5f1f | ||
|
|
bf42cdf356 | ||
|
|
e21acd86db | ||
|
|
5dca1c9602 | ||
|
|
5274584d92 | ||
|
|
1d386c53a5 | ||
|
|
d6e351b195 | ||
|
|
ea32dc0b62 | ||
|
|
dca57bb19e | ||
|
|
43919f7f9c | ||
|
|
a176b51148 | ||
|
|
75ac5297df | ||
|
|
0ef2b99bd6 | ||
|
|
9596a476b5 | ||
|
|
92f52cada5 | ||
|
|
a482e852c5 | ||
|
|
e9055e5205 | ||
|
|
df2c40d9c1 | ||
|
|
fc4eeb47fa | ||
|
|
9fb3eaa611 | ||
|
|
23394ab5c2 | ||
|
|
5417b26417 | ||
|
|
b6d638d6c5 | ||
|
|
af1dd09e2d | ||
|
|
c42e56c68f | ||
|
|
561a007850 |
@@ -100,7 +100,7 @@ Find detailed descriptions of all the Rules, with examples, below:
|
||||
* [Repeat Activity](/docs/examples/repeatActivity)
|
||||
* [History](/docs/examples/history)
|
||||
* [Author](/docs/examples/author)
|
||||
* Regex
|
||||
* [Regex](/docs/examples/regex)
|
||||
|
||||
### Rule Set
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ This directory contains example of valid, ready-to-go configurations for Context
|
||||
* [Repeat Activity](/docs/examples/repeatActivity)
|
||||
* [History](/docs/examples/history)
|
||||
* [Author](/docs/examples/author)
|
||||
* [Regex](/docs/examples/regex)
|
||||
* [Toolbox User Notes](/docs/examples/userNotes)
|
||||
* [Advanced Concepts](/docs/examples/advancedConcepts)
|
||||
* [Rule Sets](/docs/examples/advancedConcepts/ruleSets.json5)
|
||||
|
||||
20
docs/examples/regex/README.md
Normal file
20
docs/examples/regex/README.md
Normal file
@@ -0,0 +1,20 @@
|
||||
The **Regex** rule matches on text content from a comment or submission in the same way automod uses regex. The rule, however, provides additional functionality automod does not:
|
||||
|
||||
* Can set the **number** of matches that trigger the rule (`matchThreshold`)
|
||||
|
||||
Which can then be used in conjunction with a [`window`](https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md) to match against activities from the history of the Author of the Activity being checked (including the Activity being checked):
|
||||
|
||||
* Can set the **number of Activities** that meet the `matchThreshold` to trigger the rule (`activityMatchThreshold`)
|
||||
* Can set the **number of total matches** across all Activities to trigger the rule (`totalMatchThreshold`)
|
||||
* Can set the **type of Activities** to check (`lookAt`)
|
||||
* When an Activity is a Submission can **specify which parts of the Submission to match against** IE title, body, and/or url (`testOn`)
|
||||
|
||||
### Examples
|
||||
|
||||
* [Trigger if regex matches against the current activity](/docs/examples/regex/matchAnyCurrentActivity.json5)
|
||||
* [Trigger if regex matches 5 times against the current activity](/docs/examples/regex/matchThresholdCurrentActivity.json5)
|
||||
* [Trigger if regex matches against any part of a Submission](/docs/examples/regex/matchSubmissionParts.json5)
|
||||
* [Trigger if regex matches any of Author's last 10 activities](/docs/examples/regex/matchHistoryActivity.json5)
|
||||
* [Trigger if regex matches at least 3 of Author's last 10 activities](/docs/examples/regex/matchActivityThresholdHistory.json5)
|
||||
* [Trigger if there are 5 regex matches in the Author's last 10 activities](/docs/examples/regex/matchTotalHistoryActivity.json5)
|
||||
* [Trigger if there are 5 regex matches in the Author's last 10 comments](/docs/examples/regex/matchSubsetHistoryActivity.json5)
|
||||
20
docs/examples/regex/matchActivityThresholdHistory.json5
Normal file
20
docs/examples/regex/matchActivityThresholdHistory.json5
Normal file
@@ -0,0 +1,20 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if more than 3 activities in the last 10 match the regex
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// this differs from "totalMatchThreshold"
|
||||
//
|
||||
// activityMatchThreshold => # of activities from window must match regex
|
||||
// totalMatchThreshold => # of matches across all activities from window must match regex
|
||||
"activityMatchThreshold": "> 3",
|
||||
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
|
||||
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
|
||||
"window": 10,
|
||||
},
|
||||
]
|
||||
}
|
||||
14
docs/examples/regex/matchAnyCurrentActivity.json5
Normal file
14
docs/examples/regex/matchAnyCurrentActivity.json5
Normal file
@@ -0,0 +1,14 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if current activity has more than 0 matches
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// if "matchThreshold" is not specified it defaults to this -- default behavior is to trigger if there are any matches
|
||||
// "matchThreshold": "> 0"
|
||||
},
|
||||
]
|
||||
}
|
||||
15
docs/examples/regex/matchHistoryActivity.json5
Normal file
15
docs/examples/regex/matchHistoryActivity.json5
Normal file
@@ -0,0 +1,15 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if any activity in the last 10 (including current activity) match the regex
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
|
||||
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
|
||||
"window": 10,
|
||||
},
|
||||
]
|
||||
}
|
||||
19
docs/examples/regex/matchSubmissionParts.json5
Normal file
19
docs/examples/regex/matchSubmissionParts.json5
Normal file
@@ -0,0 +1,19 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
{
|
||||
// triggers if the current activity has more than 0 matches
|
||||
// if the activity is a submission then matches against title, body, and url
|
||||
// if "testOn" is not provided then `title, body` are the defaults
|
||||
"regex": "/fuck|shit|damn/",
|
||||
"testOn": [
|
||||
"title",
|
||||
"body",
|
||||
"url"
|
||||
]
|
||||
},
|
||||
]
|
||||
}
|
||||
23
docs/examples/regex/matchSubsetHistoryActivity.json5
Normal file
23
docs/examples/regex/matchSubsetHistoryActivity.json5
Normal file
@@ -0,0 +1,23 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if there are more than 5 regex matches in the last 10 activities (comments only)
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// this differs from "activityMatchThreshold"
|
||||
//
|
||||
// activityMatchThreshold => # of activities from window must match regex
|
||||
// totalMatchThreshold => # of matches across all activities from window must match regex
|
||||
"totalMatchThreshold": "> 5",
|
||||
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
|
||||
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
|
||||
"window": 10,
|
||||
// determines which activities from window to consider
|
||||
//defaults to "all" (submissions and comments)
|
||||
"lookAt": "comments",
|
||||
},
|
||||
]
|
||||
}
|
||||
13
docs/examples/regex/matchThresholdCurrentActivity.json5
Normal file
13
docs/examples/regex/matchThresholdCurrentActivity.json5
Normal file
@@ -0,0 +1,13 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// triggers if current activity has greater than 5 matches
|
||||
"matchThreshold": "> 5"
|
||||
},
|
||||
]
|
||||
}
|
||||
21
docs/examples/regex/matchTotalHistoryActivity.json5
Normal file
21
docs/examples/regex/matchTotalHistoryActivity.json5
Normal file
@@ -0,0 +1,21 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if there are more than 5 regex matches in the last 10 activities (comments or submission)
|
||||
{
|
||||
// triggers if there are more than 5 *total matches* across the last 10 activities
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// this differs from "activityMatchThreshold"
|
||||
//
|
||||
// activityMatchThreshold => # of activities from window must match regex
|
||||
// totalMatchThreshold => # of matches across all activities from window must match regex
|
||||
"totalMatchThreshold": "> 5",
|
||||
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
|
||||
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
|
||||
"window": 10,
|
||||
},
|
||||
]
|
||||
}
|
||||
@@ -2,23 +2,33 @@ import {ActionJson, ActionConfig} from "./index";
|
||||
import Action from "./index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {ActionProcessResult} from "../Common/interfaces";
|
||||
|
||||
export class ApproveAction extends Action {
|
||||
getKind() {
|
||||
return 'Approve';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
//snoowrap typing issue, thinks comments can't be locked
|
||||
// @ts-ignore
|
||||
if (item.approved) {
|
||||
this.logger.warn('Item is already approved');
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Item is already approved'
|
||||
}
|
||||
}
|
||||
if (!dryRun) {
|
||||
// @ts-ignore
|
||||
await item.approve();
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import Action from "./index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {Footer} from "../Common/interfaces";
|
||||
import {ActionProcessResult, Footer} from "../Common/interfaces";
|
||||
|
||||
export class BanAction extends Action {
|
||||
|
||||
@@ -33,7 +33,7 @@ export class BanAction extends Action {
|
||||
return 'Ban';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = this.message === undefined ? undefined : await this.resources.getContent(this.message, item.subreddit);
|
||||
const renderedBody = content === undefined ? undefined : await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
@@ -58,6 +58,11 @@ export class BanAction extends Action {
|
||||
duration: this.duration
|
||||
});
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
result: `Banned ${item.author.name} ${durText}${this.reason !== undefined ? ` (${this.reason})` : ''}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import Action, {ActionJson, ActionOptions} from "./index";
|
||||
import {Comment} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
|
||||
import {ActionProcessResult, Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
|
||||
import {RuleResult} from "../Rule";
|
||||
|
||||
export class CommentAction extends Action {
|
||||
@@ -32,7 +32,7 @@ export class CommentAction extends Action {
|
||||
return 'Comment';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = await this.resources.getContent(this.content, item.subreddit);
|
||||
const body = await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
@@ -44,7 +44,11 @@ export class CommentAction extends Action {
|
||||
|
||||
if(item.archived) {
|
||||
this.logger.warn('Cannot comment because Item is archived');
|
||||
return;
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Cannot comment because Item is archived'
|
||||
};
|
||||
}
|
||||
let reply: Comment;
|
||||
if(!dryRun) {
|
||||
@@ -62,6 +66,10 @@ export class CommentAction extends Action {
|
||||
// @ts-ignore
|
||||
await reply.distinguish({sticky: this.sticky});
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,24 +2,34 @@ import {ActionJson, ActionConfig} from "./index";
|
||||
import Action from "./index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {ActionProcessResult} from "../Common/interfaces";
|
||||
|
||||
export class LockAction extends Action {
|
||||
getKind() {
|
||||
return 'Lock';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
//snoowrap typing issue, thinks comments can't be locked
|
||||
// @ts-ignore
|
||||
if (item.locked) {
|
||||
this.logger.warn('Item is already locked');
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Item is already locked'
|
||||
};
|
||||
}
|
||||
if (!dryRun) {
|
||||
//snoowrap typing issue, thinks comments can't be locked
|
||||
// @ts-ignore
|
||||
await item.lock();
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,9 +2,9 @@ import Action, {ActionJson, ActionOptions} from "./index";
|
||||
import {Comment, ComposeMessageParams} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
|
||||
import {ActionProcessResult, Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {boolToString} from "../util";
|
||||
import {asSubmission, boolToString, isSubmission} from "../util";
|
||||
|
||||
export class MessageAction extends Action {
|
||||
content: string;
|
||||
@@ -34,7 +34,7 @@ export class MessageAction extends Action {
|
||||
return 'Message';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = await this.resources.getContent(this.content);
|
||||
const body = await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
@@ -50,7 +50,7 @@ export class MessageAction extends Action {
|
||||
text: renderedContent,
|
||||
// @ts-ignore
|
||||
fromSubreddit: this.asSubreddit ? await item.subreddit.fetch() : undefined,
|
||||
subject: this.title || `Concerning your ${item instanceof Submission ? 'Submission' : 'Comment'}`,
|
||||
subject: this.title || `Concerning your ${isSubmission(item) ? 'Submission' : 'Comment'}`,
|
||||
};
|
||||
|
||||
const msgPreview = `\r\n
|
||||
@@ -64,6 +64,10 @@ export class MessageAction extends Action {
|
||||
if (!dryRun) {
|
||||
await this.client.composeMessage(msgOpts);
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,24 +3,33 @@ import Action from "./index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {activityIsRemoved} from "../Utils/SnoowrapUtils";
|
||||
import {ActionProcessResult} from "../Common/interfaces";
|
||||
|
||||
export class RemoveAction extends Action {
|
||||
getKind() {
|
||||
return 'Remove';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
// issue with snoowrap typings, doesn't think prop exists on Submission
|
||||
// @ts-ignore
|
||||
if (activityIsRemoved(item)) {
|
||||
this.logger.warn('Item is already removed');
|
||||
return;
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Item is already removed',
|
||||
}
|
||||
}
|
||||
if (!dryRun) {
|
||||
// @ts-ignore
|
||||
await item.remove();
|
||||
}
|
||||
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {truncateStringToLength} from "../util";
|
||||
import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {RichContent} from "../Common/interfaces";
|
||||
import {ActionProcessResult, RichContent} from "../Common/interfaces";
|
||||
|
||||
// https://www.reddit.com/dev/api/oauth#POST_api_report
|
||||
// denotes 100 characters maximum
|
||||
@@ -23,7 +23,7 @@ export class ReportAction extends Action {
|
||||
return 'Report';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = await this.resources.getContent(this.content, item.subreddit);
|
||||
const renderedContent = await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
@@ -33,6 +33,11 @@ export class ReportAction extends Action {
|
||||
// @ts-ignore
|
||||
await item.report({reason: truncatedContent});
|
||||
}
|
||||
|
||||
return {
|
||||
dryRun,
|
||||
success: true
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import {SubmissionActionConfig} from "./index";
|
||||
import Action, {ActionJson, ActionOptions} from "../index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../../Rule";
|
||||
import {ActionProcessResult} from "../../Common/interfaces";
|
||||
|
||||
export class FlairAction extends Action {
|
||||
text: string;
|
||||
@@ -20,7 +21,8 @@ export class FlairAction extends Action {
|
||||
return 'Flair';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
if (item instanceof Submission) {
|
||||
if(!this.dryRun) {
|
||||
// @ts-ignore
|
||||
@@ -28,6 +30,15 @@ export class FlairAction extends Action {
|
||||
}
|
||||
} else {
|
||||
this.logger.warn('Cannot flair Comment');
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Cannot flair Comment',
|
||||
}
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {UserNote, UserNoteJson} from "../Subreddit/UserNotes";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {ActionProcessResult} from "../Common/interfaces";
|
||||
|
||||
|
||||
export class UserNoteAction extends Action {
|
||||
@@ -24,7 +25,7 @@ export class UserNoteAction extends Action {
|
||||
return 'User Note';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = await this.resources.getContent(this.content, item.subreddit);
|
||||
const renderedContent = await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
@@ -35,7 +36,11 @@ export class UserNoteAction extends Action {
|
||||
const existingNote = notes.find((x) => x.link.includes(item.id));
|
||||
if (existingNote) {
|
||||
this.logger.info(`Will not add note because one already exists for this Activity (${existingNote.time.local().format()}) and allowDuplicate=false`);
|
||||
return;
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: `Will not add note because one already exists for this Activity (${existingNote.time.local().format()}) and allowDuplicate=false`
|
||||
};
|
||||
}
|
||||
}
|
||||
if (!dryRun) {
|
||||
@@ -43,6 +48,10 @@ export class UserNoteAction extends Action {
|
||||
} else if (!await this.resources.userNotes.warningExists(this.type)) {
|
||||
this.logger.warn(`UserNote type '${this.type}' does not exist. If you meant to use this please add it through Toolbox first.`);
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
dryRun
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,9 +2,10 @@ import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {Logger} from "winston";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import {ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
|
||||
import {ActionProcessResult, ActionResult, ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
|
||||
import Author, {AuthorOptions} from "../Author/Author";
|
||||
import {mergeArr} from "../util";
|
||||
import LoggedError from "../Utils/LoggedError";
|
||||
|
||||
export abstract class Action {
|
||||
name?: string;
|
||||
@@ -53,47 +54,61 @@ export abstract class Action {
|
||||
return this.name === this.getKind() ? this.getKind() : `${this.getKind()} - ${this.name}`;
|
||||
}
|
||||
|
||||
async handle(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async handle(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
let actionRun = false;
|
||||
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
|
||||
if (!itemPass) {
|
||||
this.logger.verbose(`Activity did not pass 'itemIs' test, Action not run`);
|
||||
return;
|
||||
}
|
||||
const authorRun = async () => {
|
||||
|
||||
let actRes: ActionResult = {
|
||||
kind: this.getKind(),
|
||||
name: this.getActionUniqueName(),
|
||||
run: false,
|
||||
dryRun,
|
||||
success: false,
|
||||
};
|
||||
try {
|
||||
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
|
||||
if (!itemPass) {
|
||||
this.logger.verbose(`Activity did not pass 'itemIs' test, Action not run`);
|
||||
actRes.runReason = `Activity did not pass 'itemIs' test, Action not run`;
|
||||
return actRes;
|
||||
}
|
||||
if (this.authorIs.include !== undefined && this.authorIs.include.length > 0) {
|
||||
for (const auth of this.authorIs.include) {
|
||||
if (await this.resources.testAuthorCriteria(item, auth)) {
|
||||
await this.process(item, ruleResults, runtimeDryrun);
|
||||
return true;
|
||||
actRes.run = true;
|
||||
const results = await this.process(item, ruleResults, runtimeDryrun);
|
||||
return {...actRes, ...results};
|
||||
}
|
||||
}
|
||||
this.logger.verbose('Inclusive author criteria not matched, Action not run');
|
||||
return false;
|
||||
}
|
||||
if (!actionRun && this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
|
||||
actRes.runReason = 'Inclusive author criteria not matched';
|
||||
return actRes;
|
||||
} else if (this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
|
||||
for (const auth of this.authorIs.exclude) {
|
||||
if (await this.resources.testAuthorCriteria(item, auth, false)) {
|
||||
await this.process(item, ruleResults, runtimeDryrun);
|
||||
return true;
|
||||
actRes.run = true;
|
||||
const results = await this.process(item, ruleResults, runtimeDryrun);
|
||||
return {...actRes, ...results};
|
||||
}
|
||||
}
|
||||
this.logger.verbose('Exclusive author criteria not matched, Action not run');
|
||||
return false;
|
||||
actRes.runReason = 'Exclusive author criteria not matched';
|
||||
return actRes;
|
||||
}
|
||||
return null;
|
||||
|
||||
actRes.run = true;
|
||||
const results = await this.process(item, ruleResults, runtimeDryrun);
|
||||
return {...actRes, ...results};
|
||||
} catch (err) {
|
||||
if(!(err instanceof LoggedError)) {
|
||||
this.logger.error(`Encountered error while running`, err);
|
||||
}
|
||||
actRes.success = false;
|
||||
actRes.result = err.message;
|
||||
return actRes;
|
||||
}
|
||||
const authorRunResults = await authorRun();
|
||||
if (null === authorRunResults) {
|
||||
await this.process(item, ruleResults, runtimeDryrun);
|
||||
} else if (!authorRunResults) {
|
||||
return;
|
||||
}
|
||||
this.logger.verbose(`${dryRun ? 'DRYRUN - ' : ''}Done`);
|
||||
}
|
||||
|
||||
abstract process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryun?: boolean): Promise<void>;
|
||||
abstract process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryun?: boolean): Promise<ActionProcessResult>;
|
||||
}
|
||||
|
||||
export interface ActionOptions extends ActionConfig {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import {Check, CheckOptions, userResultCacheDefault, UserResultCacheOptions} from "./index";
|
||||
import {CommentState} from "../Common/interfaces";
|
||||
import {CommentState, UserResultCache} from "../Common/interfaces";
|
||||
import {Submission, Comment} from "snoowrap/dist/objects";
|
||||
import {RuleResult} from "../Rule";
|
||||
|
||||
export interface CommentCheckOptions extends CheckOptions {
|
||||
cacheUserResult?: UserResultCacheOptions;
|
||||
@@ -9,20 +10,12 @@ export interface CommentCheckOptions extends CheckOptions {
|
||||
export class CommentCheck extends Check {
|
||||
itemIs: CommentState[];
|
||||
|
||||
cacheUserResult: Required<UserResultCacheOptions>;
|
||||
|
||||
constructor(options: CommentCheckOptions) {
|
||||
super(options);
|
||||
const {
|
||||
itemIs = [],
|
||||
cacheUserResult = {},
|
||||
} = options;
|
||||
|
||||
this.cacheUserResult = {
|
||||
...userResultCacheDefault,
|
||||
...cacheUserResult
|
||||
}
|
||||
|
||||
this.itemIs = itemIs;
|
||||
this.logSummary();
|
||||
}
|
||||
@@ -31,7 +24,7 @@ export class CommentCheck extends Check {
|
||||
super.logSummary('comment');
|
||||
}
|
||||
|
||||
async getCacheResult(item: Submission | Comment): Promise<boolean | undefined> {
|
||||
async getCacheResult(item: Submission | Comment): Promise<UserResultCache | undefined> {
|
||||
if (this.cacheUserResult.enable) {
|
||||
return await this.resources.getCommentCheckCacheResult(item as Comment, {
|
||||
name: this.name,
|
||||
@@ -42,13 +35,22 @@ export class CommentCheck extends Check {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async setCacheResult(item: Submission | Comment, result: boolean): Promise<void> {
|
||||
async setCacheResult(item: Submission | Comment, result: UserResultCache): Promise<void> {
|
||||
if (this.cacheUserResult.enable) {
|
||||
const {result: outcome, ruleResults} = result;
|
||||
|
||||
const res: UserResultCache = {
|
||||
result: outcome,
|
||||
// don't need to cache rule results if check was not triggered
|
||||
// since we only use rule results for actions
|
||||
ruleResults: outcome ? ruleResults : []
|
||||
};
|
||||
|
||||
await this.resources.setCommentCheckCacheResult(item as Comment, {
|
||||
name: this.name,
|
||||
authorIs: this.authorIs,
|
||||
itemIs: this.itemIs
|
||||
}, result, this.cacheUserResult.ttl)
|
||||
}, res, this.cacheUserResult.ttl)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import {Check, CheckOptions} from "./index";
|
||||
import {SubmissionState} from "../Common/interfaces";
|
||||
import {SubmissionState, UserResultCache} from "../Common/interfaces";
|
||||
import {Submission, Comment} from "snoowrap/dist/objects";
|
||||
import {RuleResult} from "../Rule";
|
||||
|
||||
export class SubmissionCheck extends Check {
|
||||
itemIs: SubmissionState[];
|
||||
@@ -15,11 +16,4 @@ export class SubmissionCheck extends Check {
|
||||
logSummary() {
|
||||
super.logSummary('submission');
|
||||
}
|
||||
|
||||
async getCacheResult(item: Submission | Comment) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async setCacheResult(item: Submission | Comment, result: boolean) {
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,12 +16,13 @@ import {
|
||||
truncateStringToLength
|
||||
} from "../util";
|
||||
import {
|
||||
ActionResult,
|
||||
ChecksActivityState,
|
||||
CommentState,
|
||||
JoinCondition,
|
||||
JoinOperands,
|
||||
SubmissionState,
|
||||
TypedActivityStates
|
||||
TypedActivityStates, UserResultCache
|
||||
} from "../Common/interfaces";
|
||||
import * as RuleSchema from '../Schema/Rule.json';
|
||||
import * as RuleSetSchema from '../Schema/RuleSet.json';
|
||||
@@ -45,6 +46,7 @@ export abstract class Check implements ICheck {
|
||||
include: AuthorCriteria[],
|
||||
exclude: AuthorCriteria[]
|
||||
};
|
||||
cacheUserResult: Required<UserResultCacheOptions>;
|
||||
dryRun?: boolean;
|
||||
notifyOnTrigger: boolean;
|
||||
resources: SubredditResources;
|
||||
@@ -62,6 +64,7 @@ export abstract class Check implements ICheck {
|
||||
actions = [],
|
||||
notifyOnTrigger = false,
|
||||
subredditName,
|
||||
cacheUserResult = {},
|
||||
itemIs = [],
|
||||
authorIs: {
|
||||
include = [],
|
||||
@@ -88,6 +91,10 @@ export abstract class Check implements ICheck {
|
||||
exclude: exclude.map(x => new Author(x)),
|
||||
include: include.map(x => new Author(x)),
|
||||
}
|
||||
this.cacheUserResult = {
|
||||
...userResultCacheDefault,
|
||||
...cacheUserResult
|
||||
}
|
||||
this.dryRun = dryRun;
|
||||
for (const r of rules) {
|
||||
if (r instanceof Rule || r instanceof RuleSet) {
|
||||
@@ -170,10 +177,14 @@ export abstract class Check implements ICheck {
|
||||
}
|
||||
}
|
||||
|
||||
abstract getCacheResult(item: Submission | Comment) : Promise<boolean | undefined>;
|
||||
abstract setCacheResult(item: Submission | Comment, result: boolean): void;
|
||||
async getCacheResult(item: Submission | Comment) : Promise<UserResultCache | undefined> {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async runRules(item: Submission | Comment, existingResults: RuleResult[] = []): Promise<[boolean, RuleResult[]]> {
|
||||
async setCacheResult(item: Submission | Comment, result: UserResultCache): Promise<void> {
|
||||
}
|
||||
|
||||
async runRules(item: Submission | Comment, existingResults: RuleResult[] = []): Promise<[boolean, RuleResult[], boolean?]> {
|
||||
try {
|
||||
let allRuleResults: RuleResult[] = [];
|
||||
let allResults: (RuleResult | RuleSetResult)[] = [];
|
||||
@@ -182,7 +193,7 @@ export abstract class Check implements ICheck {
|
||||
const cacheResult = await this.getCacheResult(item);
|
||||
if(cacheResult !== undefined) {
|
||||
this.logger.verbose(`Skipping rules run because result was found in cache, Check Triggered Result: ${cacheResult}`);
|
||||
return [cacheResult, allRuleResults];
|
||||
return [cacheResult.result, cacheResult.ruleResults, true];
|
||||
}
|
||||
|
||||
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
|
||||
@@ -264,23 +275,27 @@ export abstract class Check implements ICheck {
|
||||
}
|
||||
}
|
||||
|
||||
async runActions(item: Submission | Comment, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<Action[]> {
|
||||
async runActions(item: Submission | Comment, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionResult[]> {
|
||||
const dr = runtimeDryrun || this.dryRun;
|
||||
this.logger.debug(`${dr ? 'DRYRUN - ' : ''}Running Actions`);
|
||||
const runActions: Action[] = [];
|
||||
const runActions: ActionResult[] = [];
|
||||
for (const a of this.actions) {
|
||||
if(!a.enabled) {
|
||||
runActions.push({
|
||||
kind: a.getKind(),
|
||||
name: a.getActionUniqueName(),
|
||||
run: false,
|
||||
success: false,
|
||||
runReason: 'Not enabled',
|
||||
dryRun: (a.dryRun || dr) || false,
|
||||
});
|
||||
this.logger.info(`Action ${a.getActionUniqueName()} not run because it is not enabled.`);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
await a.handle(item, ruleResults, runtimeDryrun);
|
||||
runActions.push(a);
|
||||
} catch (err) {
|
||||
this.logger.error(`Action ${a.getActionUniqueName()} encountered an error while running`, err);
|
||||
}
|
||||
const res = await a.handle(item, ruleResults, runtimeDryrun);
|
||||
runActions.push(res);
|
||||
}
|
||||
this.logger.info(`${dr ? 'DRYRUN - ' : ''}Ran Actions: ${runActions.map(x => x.getActionUniqueName()).join(' | ')}`);
|
||||
this.logger.info(`${dr ? 'DRYRUN - ' : ''}Ran Actions: ${runActions.map(x => x.name).join(' | ')}`);
|
||||
return runActions;
|
||||
}
|
||||
}
|
||||
@@ -337,6 +352,7 @@ export interface CheckOptions extends ICheck {
|
||||
notifyOnTrigger?: boolean
|
||||
resources: SubredditResources
|
||||
client: Snoowrap
|
||||
cacheUserResult?: UserResultCacheOptions;
|
||||
}
|
||||
|
||||
export interface CheckJson extends ICheck {
|
||||
@@ -371,6 +387,8 @@ export interface CheckJson extends ICheck {
|
||||
* @default false
|
||||
* */
|
||||
notifyOnTrigger?: boolean,
|
||||
|
||||
cacheUserResult?: UserResultCacheOptions;
|
||||
}
|
||||
|
||||
export interface SubmissionCheckJson extends CheckJson {
|
||||
@@ -388,6 +406,9 @@ export interface SubmissionCheckJson extends CheckJson {
|
||||
* 3. The rule results are not likely to change while cache is valid
|
||||
* */
|
||||
export interface UserResultCacheOptions {
|
||||
/**
|
||||
* @default false
|
||||
* */
|
||||
enable?: boolean,
|
||||
/**
|
||||
* The amount of time, in seconds, to cache this result
|
||||
@@ -396,17 +417,23 @@ export interface UserResultCacheOptions {
|
||||
* @examples [60]
|
||||
* */
|
||||
ttl?: number,
|
||||
/**
|
||||
* In the event the cache returns a triggered result should the actions for the check also be run?
|
||||
*
|
||||
* @default true
|
||||
* */
|
||||
runActions?: boolean
|
||||
}
|
||||
|
||||
export const userResultCacheDefault: Required<UserResultCacheOptions> = {
|
||||
enable: false,
|
||||
ttl: 60,
|
||||
runActions: true,
|
||||
}
|
||||
|
||||
export interface CommentCheckJson extends CheckJson {
|
||||
kind: 'comment'
|
||||
itemIs?: CommentState[]
|
||||
cacheUserResult?: UserResultCacheOptions
|
||||
}
|
||||
|
||||
export type CheckStructuredJson = SubmissionCheckStructuredJson | CommentCheckStructuredJson;
|
||||
|
||||
@@ -3,6 +3,7 @@ import {Cache} from 'cache-manager';
|
||||
import {MESSAGE} from 'triple-beam';
|
||||
import Poll from "snoostorm/out/util/Poll";
|
||||
import Snoowrap from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
|
||||
/**
|
||||
* An ISO 8601 Duration
|
||||
@@ -381,48 +382,79 @@ export interface PollingOptions extends PollingDefaults {
|
||||
|
||||
export interface TTLConfig {
|
||||
/**
|
||||
* Amount of time, in seconds, author activities (Comments/Submission) should be cached
|
||||
* Amount of time, in seconds, author activity history (Comments/Submission) should be cached
|
||||
*
|
||||
* * If `0` or `true` will cache indefinitely (not recommended)
|
||||
* * If `false` will not cache
|
||||
*
|
||||
* * ENV => `AUTHOR_TTL`
|
||||
* * ARG => `--authorTTL <sec>`
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
authorTTL?: number;
|
||||
authorTTL?: number | boolean;
|
||||
/**
|
||||
* Amount of time, in seconds, wiki content pages should be cached
|
||||
*
|
||||
* * If `0` or `true` will cache indefinitely (not recommended)
|
||||
* * If `false` will not cache
|
||||
*
|
||||
* @examples [300]
|
||||
* @default 300
|
||||
* */
|
||||
wikiTTL?: number;
|
||||
|
||||
wikiTTL?: number | boolean;
|
||||
/**
|
||||
* Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
|
||||
* Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
|
||||
*
|
||||
* * If `0` or `true` will cache indefinitely (not recommended)
|
||||
* * If `false` will not cache
|
||||
*
|
||||
* @examples [300]
|
||||
* @default 300
|
||||
* */
|
||||
userNotesTTL?: number;
|
||||
userNotesTTL?: number | boolean;
|
||||
/**
|
||||
* Amount of time, in seconds, a submission should be cached
|
||||
*
|
||||
* * If `0` or `true` will cache indefinitely (not recommended)
|
||||
* * If `false` will not cache
|
||||
*
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
submissionTTL?: number;
|
||||
submissionTTL?: number | boolean;
|
||||
/**
|
||||
* Amount of time, in seconds, a comment should be cached
|
||||
*
|
||||
* * If `0` or `true` will cache indefinitely (not recommended)
|
||||
* * If `false` will not cache
|
||||
*
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
commentTTL?: number;
|
||||
commentTTL?: number | boolean;
|
||||
/**
|
||||
* Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)
|
||||
*
|
||||
* This is especially useful if when polling high-volume comments and your checks rely on author/item filters
|
||||
*
|
||||
* * If `0` or `true` will cache indefinitely (not recommended)
|
||||
* * If `false` will not cache
|
||||
*
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
filterCriteriaTTL?: number;
|
||||
filterCriteriaTTL?: number | boolean;
|
||||
}
|
||||
|
||||
export interface SubredditCacheConfig extends TTLConfig {
|
||||
export interface CacheConfig extends TTLConfig {
|
||||
/**
|
||||
* The cache provider and, optionally, a custom configuration for that provider
|
||||
*
|
||||
* If not present or `null` provider will be `memory`.
|
||||
*
|
||||
* To specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`
|
||||
* */
|
||||
provider?: CacheProvider | CacheOptions
|
||||
}
|
||||
|
||||
@@ -503,7 +535,7 @@ export interface ManagerOptions {
|
||||
/**
|
||||
* Per-subreddit config for caching TTL values. If set to `false` caching is disabled.
|
||||
* */
|
||||
caching?: SubredditCacheConfig
|
||||
caching?: CacheConfig
|
||||
|
||||
/**
|
||||
* Use this option to override the `dryRun` setting for all `Checks`
|
||||
@@ -692,12 +724,12 @@ export type CacheProvider = 'memory' | 'redis' | 'none';
|
||||
// provider: CacheOptions
|
||||
// }
|
||||
export type StrongCache = {
|
||||
authorTTL: number,
|
||||
userNotesTTL: number,
|
||||
wikiTTL: number,
|
||||
submissionTTL: number,
|
||||
commentTTL: number,
|
||||
filterCriteriaTTL: number,
|
||||
authorTTL: number | boolean,
|
||||
userNotesTTL: number | boolean,
|
||||
wikiTTL: number | boolean,
|
||||
submissionTTL: number | boolean,
|
||||
commentTTL: number | boolean,
|
||||
filterCriteriaTTL: number | boolean,
|
||||
provider: CacheOptions
|
||||
}
|
||||
|
||||
@@ -757,6 +789,8 @@ export interface CacheOptions {
|
||||
* @examples [500]
|
||||
* */
|
||||
max?: number
|
||||
|
||||
[key:string]: any
|
||||
}
|
||||
|
||||
export type NotificationProvider = 'discord';
|
||||
@@ -1053,60 +1087,11 @@ export interface BotInstanceJsonConfig {
|
||||
}
|
||||
|
||||
/**
|
||||
* Settings to configure the default caching behavior for each suberddit
|
||||
* Settings to configure the default caching behavior for this bot
|
||||
*
|
||||
* Every setting not specified will default to what is specified by the global operator caching config
|
||||
* */
|
||||
caching?: {
|
||||
/**
|
||||
* Amount of time, in seconds, author activity history (Comments/Submission) should be cached
|
||||
*
|
||||
* * ENV => `AUTHOR_TTL`
|
||||
* * ARG => `--authorTTL <sec>`
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
authorTTL?: number;
|
||||
/**
|
||||
* Amount of time, in seconds, wiki content pages should be cached
|
||||
* @examples [300]
|
||||
* @default 300
|
||||
* */
|
||||
wikiTTL?: number;
|
||||
/**
|
||||
* Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
|
||||
* @examples [300]
|
||||
* @default 300
|
||||
* */
|
||||
userNotesTTL?: number;
|
||||
/**
|
||||
* Amount of time, in seconds, a submission should be cached
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
submissionTTL?: number;
|
||||
/**
|
||||
* Amount of time, in seconds, a comment should be cached
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
commentTTL?: number;
|
||||
/**
|
||||
* Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)
|
||||
*
|
||||
* This is especially useful if when polling high-volume comments and your checks rely on author/item filters
|
||||
*
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
filterCriteriaTTL?: number;
|
||||
/**
|
||||
* The cache provider and, optionally, a custom configuration for that provider
|
||||
*
|
||||
* If not present or `null` provider will be `memory`.
|
||||
*
|
||||
* To specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`
|
||||
* */
|
||||
provider?: CacheProvider | CacheOptions
|
||||
}
|
||||
caching?: CacheConfig
|
||||
/**
|
||||
* Settings related to managing heavy API usage.
|
||||
* */
|
||||
@@ -1214,6 +1199,13 @@ export interface OperatorJsonConfig {
|
||||
path?: string,
|
||||
},
|
||||
|
||||
/**
|
||||
* Settings to configure the default caching behavior globally
|
||||
*
|
||||
* These settings will be used by each bot, and subreddit, that does not specify their own
|
||||
* */
|
||||
caching?: CacheConfig
|
||||
|
||||
bots?: BotInstanceJsonConfig[]
|
||||
|
||||
/**
|
||||
@@ -1230,23 +1222,30 @@ export interface OperatorJsonConfig {
|
||||
* @examples [8085]
|
||||
* */
|
||||
port?: number,
|
||||
|
||||
/**
|
||||
* Caching provider to use for session and invite data
|
||||
*
|
||||
* If none is provided the top-level caching provider is used
|
||||
* */
|
||||
caching?: 'memory' | 'redis' | CacheOptions
|
||||
/**
|
||||
* Settings to configure the behavior of user sessions -- the session is what the web interface uses to identify logged in users.
|
||||
* */
|
||||
session?: {
|
||||
/**
|
||||
* The cache provider to use.
|
||||
* Number of seconds a session should be valid for.
|
||||
*
|
||||
* The default should be sufficient for almost all use cases
|
||||
* Default is 1 day
|
||||
*
|
||||
* @default "memory"
|
||||
* @examples ["memory"]
|
||||
* @default 86400
|
||||
* @examples [86400]
|
||||
* */
|
||||
provider?: 'memory' | 'redis' | CacheOptions,
|
||||
maxAge?: number
|
||||
/**
|
||||
* The secret value used to encrypt session data
|
||||
*
|
||||
* If provider is persistent (redis) specifying a value here will ensure sessions are valid between application restarts
|
||||
* If provider is persistent (`redis`) specifying a value here will ensure sessions are valid between application restarts
|
||||
*
|
||||
* When not present or `null` a random string is generated on application start
|
||||
*
|
||||
@@ -1254,6 +1253,21 @@ export interface OperatorJsonConfig {
|
||||
* */
|
||||
secret?: string,
|
||||
}
|
||||
|
||||
/**
|
||||
* Settings related to oauth flow invites
|
||||
* */
|
||||
invites?: {
|
||||
/**
|
||||
* Number of seconds an invite should be valid for
|
||||
*
|
||||
* If `0` or not specified (default) invites do not expire
|
||||
*
|
||||
* @default 0
|
||||
* @examples [0]
|
||||
* */
|
||||
maxAge?: number
|
||||
}
|
||||
/**
|
||||
* The default log level to filter to in the web interface
|
||||
*
|
||||
@@ -1364,12 +1378,17 @@ export interface OperatorConfig extends OperatorJsonConfig {
|
||||
level: LogLevel,
|
||||
path?: string,
|
||||
},
|
||||
caching: StrongCache,
|
||||
web: {
|
||||
port: number,
|
||||
caching: CacheOptions,
|
||||
session: {
|
||||
provider: CacheOptions,
|
||||
maxAge: number,
|
||||
secret: string,
|
||||
}
|
||||
},
|
||||
invites: {
|
||||
maxAge: number
|
||||
},
|
||||
logLevel?: LogLevel,
|
||||
maxLogs: number,
|
||||
clients: BotConnection[]
|
||||
@@ -1410,3 +1429,37 @@ export interface LogInfo {
|
||||
labels?: string[]
|
||||
bot?: string
|
||||
}
|
||||
|
||||
export interface ActionResult {
|
||||
kind: string,
|
||||
name: string,
|
||||
run: boolean,
|
||||
runReason?: string,
|
||||
dryRun: boolean,
|
||||
success: boolean,
|
||||
result?: string,
|
||||
}
|
||||
|
||||
export interface ActionProcessResult {
|
||||
success: boolean,
|
||||
dryRun: boolean,
|
||||
result?: string
|
||||
}
|
||||
|
||||
export interface ActionedEvent {
|
||||
activity: {
|
||||
peek: string
|
||||
link: string
|
||||
}
|
||||
author: string
|
||||
timestamp: number
|
||||
check: string
|
||||
ruleSummary: string,
|
||||
ruleResults: RuleResult[]
|
||||
actionResults: ActionResult[]
|
||||
}
|
||||
|
||||
export interface UserResultCache {
|
||||
result: boolean,
|
||||
ruleResults: RuleResult[]
|
||||
}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import {Logger} from "winston";
|
||||
import {
|
||||
buildCacheOptionsFromProvider,
|
||||
buildCacheOptionsFromProvider, buildCachePrefix,
|
||||
createAjvFactory,
|
||||
mergeArr,
|
||||
normalizeName,
|
||||
overwriteMerge,
|
||||
parseBool, randomId,
|
||||
readJson,
|
||||
readConfigFile,
|
||||
removeUndefinedKeys
|
||||
} from "./util";
|
||||
import {CommentCheck} from "./Check/CommentCheck";
|
||||
@@ -43,6 +43,7 @@ import {operatorConfig} from "./Utils/CommandConfig";
|
||||
import merge from 'deepmerge';
|
||||
import * as process from "process";
|
||||
import {cacheOptDefaults, cacheTTLDefaults} from "./Common/defaults";
|
||||
import objectHash from "object-hash";
|
||||
|
||||
export interface ConfigBuilderOptions {
|
||||
logger: Logger,
|
||||
@@ -291,10 +292,6 @@ export const parseDefaultBotInstanceFromArgs = (args: any): BotInstanceJsonConfi
|
||||
polling: {
|
||||
sharedMod,
|
||||
},
|
||||
caching: {
|
||||
provider: caching,
|
||||
authorTTL
|
||||
},
|
||||
nanny: {
|
||||
softLimit,
|
||||
hardLimit
|
||||
@@ -316,6 +313,8 @@ export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
|
||||
sessionSecret,
|
||||
web,
|
||||
mode,
|
||||
caching,
|
||||
authorTTL,
|
||||
} = args || {};
|
||||
|
||||
const data = {
|
||||
@@ -328,6 +327,10 @@ export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
|
||||
level: logLevel,
|
||||
path: logDir === true ? `${process.cwd()}/logs` : undefined,
|
||||
},
|
||||
caching: {
|
||||
provider: caching,
|
||||
authorTTL
|
||||
},
|
||||
web: {
|
||||
enabled: web,
|
||||
port,
|
||||
@@ -387,13 +390,6 @@ export const parseDefaultBotInstanceFromEnv = (): BotInstanceJsonConfig => {
|
||||
polling: {
|
||||
sharedMod: parseBool(process.env.SHARE_MOD),
|
||||
},
|
||||
caching: {
|
||||
provider: {
|
||||
// @ts-ignore
|
||||
store: process.env.CACHING as (CacheProvider | undefined)
|
||||
},
|
||||
authorTTL: process.env.AUTHOR_TTL !== undefined ? parseInt(process.env.AUTHOR_TTL) : undefined
|
||||
},
|
||||
nanny: {
|
||||
softLimit: process.env.SOFT_LIMIT !== undefined ? parseInt(process.env.SOFT_LIMIT) : undefined,
|
||||
hardLimit: process.env.HARD_LIMIT !== undefined ? parseInt(process.env.HARD_LIMIT) : undefined
|
||||
@@ -414,6 +410,13 @@ export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
|
||||
level: process.env.LOG_LEVEL,
|
||||
path: process.env.LOG_DIR === 'true' ? `${process.cwd()}/logs` : undefined,
|
||||
},
|
||||
caching: {
|
||||
provider: {
|
||||
// @ts-ignore
|
||||
store: process.env.CACHING as (CacheProvider | undefined)
|
||||
},
|
||||
authorTTL: process.env.AUTHOR_TTL !== undefined ? parseInt(process.env.AUTHOR_TTL) : undefined
|
||||
},
|
||||
web: {
|
||||
port: process.env.PORT !== undefined ? parseInt(process.env.PORT) : undefined,
|
||||
session: {
|
||||
@@ -473,7 +476,7 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
|
||||
if (operatorConfig !== undefined) {
|
||||
let rawConfig;
|
||||
try {
|
||||
rawConfig = await readJson(operatorConfig, {log: initLogger});
|
||||
rawConfig = await readConfigFile(operatorConfig, {log: initLogger}) as object;
|
||||
} catch (err) {
|
||||
initLogger.error('Cannot continue app startup because operator config file was not parseable.');
|
||||
err.logged = true;
|
||||
@@ -493,10 +496,18 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
|
||||
const defaultBotInstanceFromEnv = parseDefaultBotInstanceFromEnv();
|
||||
const {bots: botInstancesFromFile = [], ...restConfigFile} = configFromFile;
|
||||
|
||||
const defaultBotInstance = merge.all([defaultBotInstanceFromEnv, defaultBotInstanceFromArgs], {
|
||||
const mergedConfig = merge.all([opConfigFromEnv, restConfigFile, opConfigFromArgs], {
|
||||
arrayMerge: overwriteMerge,
|
||||
});
|
||||
|
||||
const defaultBotInstance = merge.all([defaultBotInstanceFromEnv, defaultBotInstanceFromArgs], {
|
||||
arrayMerge: overwriteMerge,
|
||||
}) as BotInstanceJsonConfig;
|
||||
|
||||
if(configFromFile.caching !== undefined) {
|
||||
defaultBotInstance.caching = configFromFile.caching;
|
||||
}
|
||||
|
||||
let botInstances = [];
|
||||
if(botInstancesFromFile.length === 0) {
|
||||
botInstances = [defaultBotInstance];
|
||||
@@ -504,10 +515,6 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
|
||||
botInstances = botInstancesFromFile.map(x => merge.all([defaultBotInstance, x], {arrayMerge: overwriteMerge}));
|
||||
}
|
||||
|
||||
const mergedConfig = merge.all([opConfigFromEnv, restConfigFile, opConfigFromArgs], {
|
||||
arrayMerge: overwriteMerge,
|
||||
});
|
||||
|
||||
return removeUndefinedKeys({...mergedConfig, bots: botInstances}) as OperatorJsonConfig;
|
||||
}
|
||||
|
||||
@@ -522,12 +529,17 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
level = 'verbose',
|
||||
path,
|
||||
} = {},
|
||||
caching: opCache,
|
||||
web: {
|
||||
port = 8085,
|
||||
maxLogs = 200,
|
||||
caching: webCaching = {},
|
||||
session: {
|
||||
secret = randomId(),
|
||||
provider: sessionProvider = { store: 'memory' },
|
||||
maxAge: sessionMaxAge = 86400,
|
||||
} = {},
|
||||
invites: {
|
||||
maxAge: inviteMaxAge = 0,
|
||||
} = {},
|
||||
clients,
|
||||
credentials: webCredentials,
|
||||
@@ -541,8 +553,44 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
bots = [],
|
||||
} = data;
|
||||
|
||||
let cache: StrongCache;
|
||||
let defaultProvider: CacheOptions;
|
||||
|
||||
if(opCache === undefined) {
|
||||
defaultProvider = {
|
||||
store: 'memory',
|
||||
...cacheOptDefaults
|
||||
};
|
||||
cache = {
|
||||
...cacheTTLDefaults,
|
||||
provider: defaultProvider
|
||||
};
|
||||
|
||||
} else {
|
||||
const {provider, ...restConfig} = opCache;
|
||||
if(typeof provider === 'string') {
|
||||
defaultProvider = {
|
||||
store: provider as CacheProvider,
|
||||
...cacheOptDefaults
|
||||
};
|
||||
} else {
|
||||
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
|
||||
defaultProvider = {
|
||||
store,
|
||||
...cacheOptDefaults,
|
||||
...rest,
|
||||
};
|
||||
}
|
||||
cache = {
|
||||
...cacheTTLDefaults,
|
||||
...restConfig,
|
||||
provider: defaultProvider,
|
||||
}
|
||||
}
|
||||
|
||||
let hydratedBots: BotInstanceConfig[] = bots.map(x => {
|
||||
const {
|
||||
name: botName,
|
||||
polling: {
|
||||
sharedMod = false,
|
||||
limit = 100,
|
||||
@@ -572,10 +620,10 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
} = x;
|
||||
|
||||
|
||||
let cache: StrongCache;
|
||||
let botCache: StrongCache;
|
||||
|
||||
if(caching === undefined) {
|
||||
cache = {
|
||||
botCache = {
|
||||
...cacheTTLDefaults,
|
||||
provider: {
|
||||
store: 'memory',
|
||||
@@ -585,7 +633,7 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
} else {
|
||||
const {provider, ...restConfig} = caching;
|
||||
if (typeof provider === 'string') {
|
||||
cache = {
|
||||
botCache = {
|
||||
...cacheTTLDefaults,
|
||||
...restConfig,
|
||||
provider: {
|
||||
@@ -595,7 +643,7 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
}
|
||||
} else {
|
||||
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
|
||||
cache = {
|
||||
botCache = {
|
||||
...cacheTTLDefaults,
|
||||
...restConfig,
|
||||
provider: {
|
||||
@@ -607,7 +655,18 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
}
|
||||
}
|
||||
|
||||
const botCreds = {
|
||||
clientId: (ci as string),
|
||||
clientSecret: (cs as string),
|
||||
...restCred,
|
||||
};
|
||||
if (botCache.provider.prefix === undefined || botCache.provider.prefix === defaultProvider.prefix) {
|
||||
// need to provide unique prefix to bot
|
||||
botCache.provider.prefix = buildCachePrefix([botCache.provider.prefix, 'bot', (botName || objectHash.sha1(botCreds))]);
|
||||
}
|
||||
|
||||
return {
|
||||
name: botName,
|
||||
snoowrap,
|
||||
subreddits: {
|
||||
names,
|
||||
@@ -616,12 +675,8 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
heartbeatInterval,
|
||||
dryRun,
|
||||
},
|
||||
credentials: {
|
||||
clientId: (ci as string),
|
||||
clientSecret: (cs as string),
|
||||
...restCred,
|
||||
},
|
||||
caching: cache,
|
||||
credentials: botCreds,
|
||||
caching: botCache,
|
||||
polling: {
|
||||
sharedMod,
|
||||
limit,
|
||||
@@ -637,6 +692,7 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
const defaultOperators = typeof name === 'string' ? [name] : name;
|
||||
|
||||
const config: OperatorConfig = {
|
||||
@@ -649,19 +705,19 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
level,
|
||||
path
|
||||
},
|
||||
caching: cache,
|
||||
web: {
|
||||
port,
|
||||
caching: {
|
||||
...defaultProvider,
|
||||
...webCaching
|
||||
},
|
||||
invites: {
|
||||
maxAge: inviteMaxAge,
|
||||
},
|
||||
session: {
|
||||
secret,
|
||||
provider: typeof sessionProvider === 'string' ? {
|
||||
...buildCacheOptionsFromProvider({
|
||||
ttl: 86400000,
|
||||
store: sessionProvider,
|
||||
})
|
||||
} : {
|
||||
...buildCacheOptionsFromProvider(sessionProvider),
|
||||
ttl: 86400000,
|
||||
},
|
||||
maxAge: sessionMaxAge,
|
||||
},
|
||||
maxLogs,
|
||||
clients: clients === undefined ? [{host: 'localhost:8095', secret: apiSecret}] : clients,
|
||||
|
||||
@@ -5,9 +5,10 @@ import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {getAttributionIdentifier} from "../Utils/SnoowrapUtils";
|
||||
import dayjs from "dayjs";
|
||||
import {
|
||||
asSubmission,
|
||||
comparisonTextOp,
|
||||
FAIL,
|
||||
formatNumber,
|
||||
formatNumber, getActivitySubredditName, isSubmission,
|
||||
parseGenericValueOrPercentComparison,
|
||||
parseSubredditName,
|
||||
PASS
|
||||
@@ -190,9 +191,9 @@ export class AttributionRule extends Rule {
|
||||
let activities = thresholdOn === 'submissions' ? await this.resources.getAuthorSubmissions(item.author, {window: window}) : await this.resources.getAuthorActivities(item.author, {window: window});
|
||||
activities = activities.filter(act => {
|
||||
if (include.length > 0) {
|
||||
return include.some(x => x === act.subreddit.display_name.toLowerCase());
|
||||
return include.some(x => x === getActivitySubredditName(act).toLowerCase());
|
||||
} else if (exclude.length > 0) {
|
||||
return !exclude.some(x => x === act.subreddit.display_name.toLowerCase())
|
||||
return !exclude.some(x => x === getActivitySubredditName(act).toLowerCase())
|
||||
}
|
||||
return true;
|
||||
});
|
||||
@@ -219,7 +220,7 @@ export class AttributionRule extends Rule {
|
||||
|
||||
const realDomains: DomainInfo[] = domains.map(x => {
|
||||
if(x === SUBMISSION_DOMAIN) {
|
||||
if(!(item instanceof Submission)) {
|
||||
if(!(asSubmission(item))) {
|
||||
throw new SimpleError('Cannot run Attribution Rule with the domain SELF:AGG on a Comment');
|
||||
}
|
||||
return getAttributionIdentifier(item, consolidateMediaDomains);
|
||||
@@ -228,7 +229,7 @@ export class AttributionRule extends Rule {
|
||||
});
|
||||
const realDomainIdents = realDomains.map(x => x.aliases).flat(1).map(x => x.toLowerCase());
|
||||
|
||||
const submissions: Submission[] = thresholdOn === 'submissions' ? activities as Submission[] : activities.filter(x => x instanceof Submission) as Submission[];
|
||||
const submissions: Submission[] = thresholdOn === 'submissions' ? activities as Submission[] : activities.filter(x => isSubmission(x)) as Submission[];
|
||||
const aggregatedSubmissions = submissions.reduce((acc: Map<string, DomainAgg>, sub) => {
|
||||
const domainInfo = getAttributionIdentifier(sub, consolidateMediaDomains)
|
||||
|
||||
|
||||
@@ -5,9 +5,10 @@ import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {getAuthorActivities} from "../Utils/SnoowrapUtils";
|
||||
import dayjs from "dayjs";
|
||||
import {
|
||||
asSubmission,
|
||||
comparisonTextOp,
|
||||
FAIL,
|
||||
formatNumber,
|
||||
formatNumber, getActivitySubredditName, isSubmission,
|
||||
parseGenericValueOrPercentComparison, parseSubredditName,
|
||||
PASS,
|
||||
percentFromString
|
||||
@@ -112,9 +113,9 @@ export class HistoryRule extends Rule {
|
||||
let activities = await this.resources.getAuthorActivities(item.author, {window: window});
|
||||
activities = activities.filter(act => {
|
||||
if (this.include.length > 0) {
|
||||
return this.include.some(x => x === act.subreddit.display_name.toLowerCase());
|
||||
return this.include.some(x => x === getActivitySubredditName(act).toLowerCase());
|
||||
} else if (this.exclude.length > 0) {
|
||||
return !this.exclude.some(x => x === act.subreddit.display_name.toLowerCase())
|
||||
return !this.exclude.some(x => x === getActivitySubredditName(act).toLowerCase())
|
||||
}
|
||||
return true;
|
||||
});
|
||||
@@ -125,7 +126,7 @@ export class HistoryRule extends Rule {
|
||||
|
||||
const activityTotal = activities.length;
|
||||
const {submissionTotal, commentTotal, opTotal} = activities.reduce((acc, act) => {
|
||||
if(act instanceof Submission) {
|
||||
if(asSubmission(act)) {
|
||||
return {...acc, submissionTotal: acc.submissionTotal + 1};
|
||||
}
|
||||
let a = {...acc, commentTotal: acc.commentTotal + 1};
|
||||
@@ -169,7 +170,7 @@ export class HistoryRule extends Rule {
|
||||
const firstActivity = activities[0];
|
||||
const lastActivity = activities[activities.length - 1];
|
||||
|
||||
const activityTotalWindow = dayjs.duration(dayjs(firstActivity.created_utc * 1000).diff(dayjs(lastActivity.created_utc * 1000)));
|
||||
const activityTotalWindow = activities.length === 0 ? dayjs.duration(0, 's') : dayjs.duration(dayjs(firstActivity.created_utc * 1000).diff(dayjs(lastActivity.created_utc * 1000)));
|
||||
|
||||
criteriaResults.push({
|
||||
criteria,
|
||||
@@ -244,7 +245,7 @@ export class HistoryRule extends Rule {
|
||||
submissionPercent: formatNumber((submissionTotal/activityTotal)*100),
|
||||
opPercent: formatNumber((opTotal/commentTotal)*100),
|
||||
criteria,
|
||||
window: typeof window === 'number' ? `${activityTotal} Items` : activityTotalWindow.humanize(true),
|
||||
window: typeof window === 'number' || activityTotal === 0 ? `${activityTotal} Items` : activityTotalWindow.humanize(true),
|
||||
triggered,
|
||||
submissionTrigger,
|
||||
commentTrigger,
|
||||
|
||||
@@ -2,8 +2,8 @@ import {Rule, RuleJSONConfig, RuleOptions, RulePremise, RuleResult} from "./inde
|
||||
import {Comment, VoteableContent} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {
|
||||
activityWindowText,
|
||||
comparisonTextOp, FAIL, formatNumber,
|
||||
activityWindowText, asSubmission,
|
||||
comparisonTextOp, FAIL, formatNumber, getActivitySubredditName, isSubmission,
|
||||
parseGenericValueOrPercentComparison, parseSubredditName,
|
||||
parseUsableLinkIdentifier,
|
||||
PASS
|
||||
@@ -67,14 +67,14 @@ export class RecentActivityRule extends Rule {
|
||||
|
||||
let viableActivity = activities;
|
||||
if (this.useSubmissionAsReference) {
|
||||
if (!(item instanceof Submission)) {
|
||||
if (!asSubmission(item)) {
|
||||
this.logger.warn('Cannot use post as reference because triggered item is not a Submission');
|
||||
} else if (item.is_self) {
|
||||
this.logger.warn('Cannot use post as reference because triggered Submission is not a link type');
|
||||
} else {
|
||||
const usableUrl = parseLink(await item.url);
|
||||
viableActivity = viableActivity.filter((x) => {
|
||||
if (!(x instanceof Submission)) {
|
||||
if (!asSubmission(x)) {
|
||||
return false;
|
||||
}
|
||||
if (x.url === undefined) {
|
||||
@@ -85,7 +85,7 @@ export class RecentActivityRule extends Rule {
|
||||
}
|
||||
}
|
||||
const groupedActivity = viableActivity.reduce((grouped, activity) => {
|
||||
const s = activity.subreddit.display_name.toLowerCase();
|
||||
const s = getActivitySubredditName(activity).toLowerCase();
|
||||
grouped[s] = (grouped[s] || []).concat(activity);
|
||||
return grouped;
|
||||
}, {} as Record<string, (Submission | Comment)[]>);
|
||||
@@ -96,17 +96,21 @@ export class RecentActivityRule extends Rule {
|
||||
for (const triggerSet of this.thresholds) {
|
||||
let currCount = 0;
|
||||
const presentSubs = [];
|
||||
const {threshold = '>= 1', subreddits = []} = triggerSet;
|
||||
let combinedKarma = 0;
|
||||
const {threshold = '>= 1', subreddits = [], karma: karmaThreshold} = triggerSet;
|
||||
for (const sub of subreddits.map(x => parseSubredditName(x))) {
|
||||
const isub = sub.toLowerCase();
|
||||
const {[isub]: tSub = []} = groupedActivity;
|
||||
if (tSub.length > 0) {
|
||||
currCount += tSub.length;
|
||||
presentSubs.push(sub);
|
||||
for(const a of tSub) {
|
||||
combinedKarma += a.score;
|
||||
}
|
||||
}
|
||||
}
|
||||
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(threshold);
|
||||
let sum = {subsWithActivity: presentSubs, subreddits, count: currCount, threshold, triggered: false, testValue: currCount.toString()};
|
||||
let sum = {subsWithActivity: presentSubs, combinedKarma, karmaThreshold, subreddits, count: currCount, threshold, triggered: false, testValue: currCount.toString()};
|
||||
if (isPercent) {
|
||||
sum.testValue = `${formatNumber((currCount / viableActivity.length) * 100)}%`;
|
||||
if (comparisonTextOp(currCount / viableActivity.length, operator, value / 100)) {
|
||||
@@ -117,6 +121,15 @@ export class RecentActivityRule extends Rule {
|
||||
sum.triggered = true;
|
||||
totalTriggeredOn = sum;
|
||||
}
|
||||
// if we would trigger on threshold need to also test for karma
|
||||
if(totalTriggeredOn !== undefined && karmaThreshold !== undefined) {
|
||||
const {operator: opKarma, value: valueKarma} = parseGenericValueOrPercentComparison(karmaThreshold);
|
||||
if(!comparisonTextOp(combinedKarma, opKarma, valueKarma)) {
|
||||
sum.triggered = false;
|
||||
totalTriggeredOn = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
summaries.push(sum);
|
||||
// if either trigger condition is hit end the iteration early
|
||||
if (totalTriggeredOn !== undefined) {
|
||||
@@ -150,10 +163,12 @@ export class RecentActivityRule extends Rule {
|
||||
subreddits = [],
|
||||
subsWithActivity = [],
|
||||
threshold,
|
||||
triggered
|
||||
triggered,
|
||||
combinedKarma,
|
||||
karmaThreshold,
|
||||
} = summary;
|
||||
const relevantSubs = subsWithActivity.length === 0 ? subreddits : subsWithActivity;
|
||||
const totalSummary = `${testValue} activities over ${relevantSubs.length} subreddits ${triggered ? 'met' : 'did not meet'} threshold of ${threshold}`;
|
||||
const totalSummary = `${testValue} activities over ${relevantSubs.length} subreddits${karmaThreshold !== undefined ? ` with ${combinedKarma} combined karma` : ''} ${triggered ? 'met' : 'did not meet'} threshold of ${threshold}${karmaThreshold !== undefined ? ` and ${karmaThreshold} combined karma` : ''}`;
|
||||
return {
|
||||
result: totalSummary,
|
||||
data: {
|
||||
@@ -163,7 +178,8 @@ export class RecentActivityRule extends Rule {
|
||||
subCount: relevantSubs.length,
|
||||
totalCount: count,
|
||||
threshold,
|
||||
testValue
|
||||
testValue,
|
||||
karmaThreshold,
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -191,6 +207,21 @@ export interface SubThreshold extends SubredditCriteria {
|
||||
* @examples [">= 1"]
|
||||
* */
|
||||
threshold?: string
|
||||
|
||||
/**
|
||||
* Test the **combined karma** from Activities found in the specified subreddits
|
||||
*
|
||||
* Value is a string containing a comparison operator and a number of **combined karma** to compare against
|
||||
*
|
||||
* If specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied
|
||||
*
|
||||
* The syntax is `(< OR > OR <= OR >=) <number>`
|
||||
*
|
||||
* * EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
|
||||
* */
|
||||
karma?: string
|
||||
}
|
||||
|
||||
interface RecentActivityConfig extends ActivityWindow, ReferenceSubmission {
|
||||
|
||||
@@ -2,7 +2,8 @@ import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
|
||||
import {Comment} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {
|
||||
comparisonTextOp, FAIL, isExternalUrlSubmission, parseGenericValueComparison,
|
||||
asSubmission,
|
||||
comparisonTextOp, FAIL, isExternalUrlSubmission, isSubmission, parseGenericValueComparison,
|
||||
parseGenericValueOrPercentComparison, parseRegex,
|
||||
PASS
|
||||
} from "../util";
|
||||
@@ -323,7 +324,7 @@ export class RegexRule extends Rule {
|
||||
let m: string[] = [];
|
||||
// determine what content we are testing
|
||||
let contents: string[] = [];
|
||||
if (a instanceof Submission) {
|
||||
if (asSubmission(a)) {
|
||||
for (const l of testOn) {
|
||||
switch (l) {
|
||||
case 'title':
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
|
||||
import {Comment} from "snoowrap";
|
||||
import {
|
||||
activityWindowText,
|
||||
comparisonTextOp, FAIL, isExternalUrlSubmission, isRedditMedia,
|
||||
activityWindowText, asSubmission,
|
||||
comparisonTextOp, FAIL, getActivitySubredditName, isExternalUrlSubmission, isRedditMedia,
|
||||
parseGenericValueComparison, parseSubredditName,
|
||||
parseUsableLinkIdentifier as linkParser, PASS
|
||||
} from "../util";
|
||||
@@ -25,7 +25,7 @@ interface RepeatActivityReducer {
|
||||
|
||||
const getActivityIdentifier = (activity: (Submission | Comment), length = 200) => {
|
||||
let identifier: string;
|
||||
if (activity instanceof Submission) {
|
||||
if (asSubmission(activity)) {
|
||||
if (activity.is_self) {
|
||||
identifier = `${activity.title}${activity.selftext.slice(0, length)}`;
|
||||
} else if(isRedditMedia(activity)) {
|
||||
@@ -96,15 +96,15 @@ export class RepeatActivityRule extends Rule {
|
||||
|
||||
async process(item: Submission|Comment): Promise<[boolean, RuleResult]> {
|
||||
let referenceUrl;
|
||||
if(item instanceof Submission && this.useSubmissionAsReference) {
|
||||
if(asSubmission(item) && this.useSubmissionAsReference) {
|
||||
referenceUrl = await item.url;
|
||||
}
|
||||
|
||||
let filterFunc = (x: any) => true;
|
||||
if(this.include.length > 0) {
|
||||
filterFunc = (x: Submission|Comment) => this.include.includes(x.subreddit.display_name.toLowerCase());
|
||||
filterFunc = (x: Submission|Comment) => this.include.includes(getActivitySubredditName(x).toLowerCase());
|
||||
} else if(this.exclude.length > 0) {
|
||||
filterFunc = (x: Submission|Comment) => !this.exclude.includes(x.subreddit.display_name.toLowerCase());
|
||||
filterFunc = (x: Submission|Comment) => !this.exclude.includes(getActivitySubredditName(x).toLowerCase());
|
||||
}
|
||||
|
||||
let activities: (Submission | Comment)[] = [];
|
||||
@@ -223,7 +223,7 @@ export class RepeatActivityRule extends Rule {
|
||||
};
|
||||
for (let set of value) {
|
||||
const test = comparisonTextOp(set.length, operator, thresholdValue);
|
||||
const md = set.map((x: (Comment | Submission)) => `[${x instanceof Submission ? x.title : getActivityIdentifier(x, 50)}](https://reddit.com${x.permalink}) in ${x.subreddit_name_prefixed} on ${dayjs(x.created_utc * 1000).utc().format()}`);
|
||||
const md = set.map((x: (Comment | Submission)) => `[${asSubmission(x) ? x.title : getActivityIdentifier(x, 50)}](https://reddit.com${x.permalink}) in ${x.subreddit_name_prefixed} on ${dayjs(x.created_utc * 1000).utc().format()}`);
|
||||
|
||||
summaryData.sets.push(set);
|
||||
summaryData.largestTrigger = Math.max(summaryData.largestTrigger, set.length);
|
||||
|
||||
@@ -686,7 +686,96 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"CacheConfig": {
|
||||
"properties": {
|
||||
"authorTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"commentTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, a comment should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"filterCriteriaTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CacheOptions"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"none",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"description": "The cache provider and, optionally, a custom configuration for that provider\n\nIf not present or `null` provider will be `memory`.\n\nTo specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`"
|
||||
},
|
||||
"submissionTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, a submission should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"userNotesTTL": {
|
||||
"default": 300,
|
||||
"description": "Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"wikiTTL": {
|
||||
"default": 300,
|
||||
"description": "Amount of time, in seconds, wiki content pages should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"CacheOptions": {
|
||||
"additionalProperties": {
|
||||
},
|
||||
"description": "Configure granular settings for a cache provider with this object",
|
||||
"properties": {
|
||||
"auth_pass": {
|
||||
@@ -2338,6 +2427,11 @@
|
||||
"description": "At least one count property must be present. If both are present then either can trigger the rule",
|
||||
"minProperties": 1,
|
||||
"properties": {
|
||||
"karma": {
|
||||
"description": "Test the **combined karma** from Activities found in the specified subreddits\n\nValue is a string containing a comparison operator and a number of **combined karma** to compare against\n\nIf specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"subreddits": {
|
||||
"description": "A list of Subreddits (by name, case-insensitive) to look for.\n\nEX [\"mealtimevideos\",\"askscience\"]",
|
||||
"examples": [
|
||||
@@ -2439,6 +2533,10 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"cacheUserResult": {
|
||||
"$ref": "#/definitions/UserResultCacheOptions",
|
||||
"description": "Cache the result of this check based on the comment author and the submission id\n\nThis is useful in this type of scenario:\n\n1. This check is configured to run on comments for specific submissions with high volume activity\n2. The rules being run are not dependent on the content of the comment\n3. The rule results are not likely to change while cache is valid"
|
||||
},
|
||||
"condition": {
|
||||
"default": "AND",
|
||||
"description": "Under what condition should a set of run `Rule` objects be considered \"successful\"?\n\nIf `OR` then **any** triggered `Rule` object results in success.\n\nIf `AND` then **all** `Rule` objects must be triggered to result in success.",
|
||||
@@ -2599,74 +2697,6 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"SubredditCacheConfig": {
|
||||
"properties": {
|
||||
"authorTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, author activities (Comments/Submission) should be cached",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"commentTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, a comment should be cached",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"filterCriteriaTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CacheOptions"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"none",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"submissionTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, a submission should be cached",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"userNotesTTL": {
|
||||
"default": 300,
|
||||
"description": "Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"wikiTTL": {
|
||||
"default": 300,
|
||||
"description": "Amount of time, in seconds, wiki content pages should be cached",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"UserNoteActionJson": {
|
||||
"description": "Add a Toolbox User Note to the Author of this Activity",
|
||||
"properties": {
|
||||
@@ -2810,6 +2840,12 @@
|
||||
"description": "Cache the result of this check based on the comment author and the submission id\n\nThis is useful in this type of scenario:\n\n1. This check is configured to run on comments for specific submissions with high volume activity\n2. The rules being run are not dependent on the content of the comment\n3. The rule results are not likely to change while cache is valid",
|
||||
"properties": {
|
||||
"enable": {
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"runActions": {
|
||||
"default": true,
|
||||
"description": "In the event the cache returns a triggered result should the actions for the check also be run?",
|
||||
"type": "boolean"
|
||||
},
|
||||
"ttl": {
|
||||
@@ -2826,7 +2862,7 @@
|
||||
},
|
||||
"properties": {
|
||||
"caching": {
|
||||
"$ref": "#/definitions/SubredditCacheConfig",
|
||||
"$ref": "#/definitions/CacheConfig",
|
||||
"description": "Per-subreddit config for caching TTL values. If set to `false` caching is disabled."
|
||||
},
|
||||
"checks": {
|
||||
|
||||
@@ -23,74 +23,8 @@
|
||||
"description": "The configuration for an **individual reddit account** ContextMod will run as a bot.\n\nMultiple bot configs may be specified (one per reddit account).\n\n**NOTE:** If `bots` is not specified in a `FILE` then a default `bot` is generated using `ENV/ARG` values IE `CLIENT_ID`, etc...but if `bots` IS specified the default is not generated.",
|
||||
"properties": {
|
||||
"caching": {
|
||||
"description": "Settings to configure the default caching behavior for each suberddit",
|
||||
"properties": {
|
||||
"authorTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"commentTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, a comment should be cached",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"filterCriteriaTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CacheOptions"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"none",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"description": "The cache provider and, optionally, a custom configuration for that provider\n\nIf not present or `null` provider will be `memory`.\n\nTo specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`"
|
||||
},
|
||||
"submissionTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, a submission should be cached",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"userNotesTTL": {
|
||||
"default": 300,
|
||||
"description": "Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"wikiTTL": {
|
||||
"default": 300,
|
||||
"description": "Amount of time, in seconds, wiki content pages should be cached",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
"$ref": "#/definitions/CacheConfig",
|
||||
"description": "Settings to configure the default caching behavior for this bot\n\nEvery setting not specified will default to what is specified by the global operator caching config"
|
||||
},
|
||||
"credentials": {
|
||||
"$ref": "#/definitions/RedditCredentials",
|
||||
@@ -242,7 +176,96 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"CacheConfig": {
|
||||
"properties": {
|
||||
"authorTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"commentTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, a comment should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"filterCriteriaTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CacheOptions"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"none",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"description": "The cache provider and, optionally, a custom configuration for that provider\n\nIf not present or `null` provider will be `memory`.\n\nTo specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`"
|
||||
},
|
||||
"submissionTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, a submission should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"userNotesTTL": {
|
||||
"default": 300,
|
||||
"description": "Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"wikiTTL": {
|
||||
"default": 300,
|
||||
"description": "Amount of time, in seconds, wiki content pages should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"CacheOptions": {
|
||||
"additionalProperties": {
|
||||
},
|
||||
"description": "Configure granular settings for a cache provider with this object",
|
||||
"properties": {
|
||||
"auth_pass": {
|
||||
@@ -527,6 +550,10 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"caching": {
|
||||
"$ref": "#/definitions/CacheConfig",
|
||||
"description": "Settings to configure the default caching behavior globally\n\nThese settings will be used by each bot, and subreddit, that does not specify their own"
|
||||
},
|
||||
"logging": {
|
||||
"description": "Settings to configure global logging defaults",
|
||||
"properties": {
|
||||
@@ -605,6 +632,21 @@
|
||||
"web": {
|
||||
"description": "Settings for the web interface",
|
||||
"properties": {
|
||||
"caching": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CacheOptions"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"description": "Caching provider to use for session and invite data\n\nIf none is provided the top-level caching provider is used"
|
||||
},
|
||||
"clients": {
|
||||
"description": "A list of CM Servers this Client should connect to.\n\nIf not specified a default `BotConnection` for this instance is generated",
|
||||
"examples": [
|
||||
@@ -631,6 +673,20 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"invites": {
|
||||
"description": "Settings related to oauth flow invites",
|
||||
"properties": {
|
||||
"maxAge": {
|
||||
"default": 0,
|
||||
"description": "Number of seconds an invite should be valid for\n\n If `0` or not specified (default) invites do not expire",
|
||||
"examples": [
|
||||
0
|
||||
],
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"logLevel": {
|
||||
"description": "The default log level to filter to in the web interface\n\nIf not specified or `null` will be same as global `logLevel`",
|
||||
"enum": [
|
||||
@@ -674,27 +730,16 @@
|
||||
"session": {
|
||||
"description": "Settings to configure the behavior of user sessions -- the session is what the web interface uses to identify logged in users.",
|
||||
"properties": {
|
||||
"provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CacheOptions"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"default": "memory",
|
||||
"description": "The cache provider to use.\n\nThe default should be sufficient for almost all use cases",
|
||||
"maxAge": {
|
||||
"default": 86400,
|
||||
"description": "Number of seconds a session should be valid for.\n\nDefault is 1 day",
|
||||
"examples": [
|
||||
"memory"
|
||||
]
|
||||
86400
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"secret": {
|
||||
"description": "The secret value used to encrypt session data\n\nIf provider is persistent (redis) specifying a value here will ensure sessions are valid between application restarts\n\nWhen not present or `null` a random string is generated on application start",
|
||||
"description": "The secret value used to encrypt session data\n\nIf provider is persistent (`redis`) specifying a value here will ensure sessions are valid between application restarts\n\nWhen not present or `null` a random string is generated on application start",
|
||||
"examples": [
|
||||
"definitelyARandomString"
|
||||
],
|
||||
|
||||
@@ -1195,6 +1195,11 @@
|
||||
"description": "At least one count property must be present. If both are present then either can trigger the rule",
|
||||
"minProperties": 1,
|
||||
"properties": {
|
||||
"karma": {
|
||||
"description": "Test the **combined karma** from Activities found in the specified subreddits\n\nValue is a string containing a comparison operator and a number of **combined karma** to compare against\n\nIf specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"subreddits": {
|
||||
"description": "A list of Subreddits (by name, case-insensitive) to look for.\n\nEX [\"mealtimevideos\",\"askscience\"]",
|
||||
"examples": [
|
||||
|
||||
@@ -1172,6 +1172,11 @@
|
||||
"description": "At least one count property must be present. If both are present then either can trigger the rule",
|
||||
"minProperties": 1,
|
||||
"properties": {
|
||||
"karma": {
|
||||
"description": "Test the **combined karma** from Activities found in the specified subreddits\n\nValue is a string containing a comparison operator and a number of **combined karma** to compare against\n\nIf specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"subreddits": {
|
||||
"description": "A list of Subreddits (by name, case-insensitive) to look for.\n\nEX [\"mealtimevideos\",\"askscience\"]",
|
||||
"examples": [
|
||||
|
||||
@@ -6,13 +6,15 @@ import {
|
||||
cacheStats,
|
||||
createRetryHandler,
|
||||
determineNewResults, formatNumber,
|
||||
mergeArr, parseFromJsonOrYamlToObject, pollingInfo, sleep, totalFromMapStats,
|
||||
mergeArr, parseFromJsonOrYamlToObject, pollingInfo, resultsSummary, sleep, totalFromMapStats, triggeredIndicator,
|
||||
} from "../util";
|
||||
import {Poll} from "snoostorm";
|
||||
import pEvent from "p-event";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {ConfigBuilder, buildPollingOptions} from "../ConfigBuilder";
|
||||
import {
|
||||
ActionedEvent,
|
||||
ActionResult,
|
||||
DEFAULT_POLLING_INTERVAL,
|
||||
DEFAULT_POLLING_LIMIT, Invokee,
|
||||
ManagerOptions, ManagerStateChangeOption, PAUSED,
|
||||
@@ -36,6 +38,7 @@ import {queue, QueueObject} from 'async';
|
||||
import {JSONConfig} from "../JsonConfig";
|
||||
import {CheckStructuredJson} from "../Check";
|
||||
import NotificationManager from "../Notification/NotificationManager";
|
||||
import action from "../Web/Server/routes/authenticated/user/action";
|
||||
|
||||
export interface RunningState {
|
||||
state: RunState,
|
||||
@@ -82,6 +85,7 @@ export interface ManagerStats {
|
||||
actionsRunTotal: number
|
||||
actionsRunSinceStart: number,
|
||||
actionsRunSinceStartTotal: number
|
||||
actionedEvents: number
|
||||
cache: {
|
||||
provider: string,
|
||||
currentKeyCount: number,
|
||||
@@ -165,6 +169,7 @@ export class Manager {
|
||||
rulesUniqueRollingAvg: number = 0;
|
||||
actionsRun: Map<string, number> = new Map();
|
||||
actionsRunSinceStart: Map<string, number> = new Map();
|
||||
actionedEvents: ActionedEvent[] = [];
|
||||
|
||||
getStats = async (): Promise<ManagerStats> => {
|
||||
const data: any = {
|
||||
@@ -188,6 +193,7 @@ export class Manager {
|
||||
actionsRunTotal: totalFromMapStats(this.actionsRun),
|
||||
actionsRunSinceStart: this.actionsRunSinceStart,
|
||||
actionsRunSinceStartTotal: totalFromMapStats(this.actionsRunSinceStart),
|
||||
actionedEvents: this.actionedEvents.length,
|
||||
cache: {
|
||||
provider: 'none',
|
||||
currentKeyCount: 0,
|
||||
@@ -540,10 +546,22 @@ export class Manager {
|
||||
let checksRun = 0;
|
||||
let actionsRun = 0;
|
||||
let totalRulesRun = 0;
|
||||
let runActions: Action[] = [];
|
||||
let runActions: ActionResult[] = [];
|
||||
let actionedEvent: ActionedEvent = {
|
||||
activity: {
|
||||
peek: ePeek,
|
||||
link: item.permalink
|
||||
},
|
||||
author: item.author.name,
|
||||
timestamp: Date.now(),
|
||||
check: '',
|
||||
ruleSummary: '',
|
||||
ruleResults: [],
|
||||
actionResults: [],
|
||||
}
|
||||
let triggered = false;
|
||||
|
||||
try {
|
||||
let triggered = false;
|
||||
for (const check of checks) {
|
||||
if (checkNames.length > 0 && !checkNames.map(x => x.toLowerCase()).some(x => x === check.name.toLowerCase())) {
|
||||
this.logger.warn(`Check ${check.name} not in array of requested checks to run, skipping...`);
|
||||
@@ -555,14 +573,22 @@ export class Manager {
|
||||
}
|
||||
checksRun++;
|
||||
triggered = false;
|
||||
let isFromCache = false;
|
||||
let currentResults: RuleResult[] = [];
|
||||
try {
|
||||
const [checkTriggered, checkResults] = await check.runRules(item, allRuleResults);
|
||||
await check.setCacheResult(item, checkTriggered);
|
||||
const [checkTriggered, checkResults, fromCache = false] = await check.runRules(item, allRuleResults);
|
||||
isFromCache = fromCache;
|
||||
if(!fromCache) {
|
||||
await check.setCacheResult(item, {result: checkTriggered, ruleResults: checkResults});
|
||||
}
|
||||
currentResults = checkResults;
|
||||
totalRulesRun += checkResults.length;
|
||||
allRuleResults = allRuleResults.concat(determineNewResults(allRuleResults, checkResults));
|
||||
triggered = checkTriggered;
|
||||
if(triggered && fromCache && !check.cacheUserResult.runActions) {
|
||||
this.logger.info('Check was triggered but cache result options specified NOT to run actions...counting as check NOT triggered');
|
||||
triggered = false;
|
||||
}
|
||||
} catch (e) {
|
||||
if (e.logged !== true) {
|
||||
this.logger.warn(`Running rules for Check ${check.name} failed due to uncaught exception`, e);
|
||||
@@ -570,13 +596,20 @@ export class Manager {
|
||||
}
|
||||
|
||||
if (triggered) {
|
||||
actionedEvent.check = check.name;
|
||||
actionedEvent.ruleResults = currentResults;
|
||||
if(isFromCache) {
|
||||
actionedEvent.ruleSummary = `Check result was found in cache: ${triggeredIndicator(true)}`;
|
||||
} else {
|
||||
actionedEvent.ruleSummary = resultsSummary(currentResults, check.condition);
|
||||
}
|
||||
this.checksTriggered.set(check.name, (this.checksTriggered.get(check.name) || 0) + 1);
|
||||
this.checksTriggeredSinceStart.set(check.name, (this.checksTriggeredSinceStart.get(check.name) || 0) + 1);
|
||||
runActions = await check.runActions(item, currentResults.filter(x => x.triggered), dryRun);
|
||||
actionsRun = runActions.length;
|
||||
|
||||
if(check.notifyOnTrigger) {
|
||||
const ar = runActions.map(x => x.getActionUniqueName()).join(', ');
|
||||
const ar = runActions.map(x => x.name).join(', ');
|
||||
this.notificationManager.handle('eventActioned', 'Check Triggered', `Check "${check.name}" was triggered on Event: \n\n ${ePeek} \n\n with the following actions run: ${ar}`);
|
||||
}
|
||||
break;
|
||||
@@ -606,9 +639,15 @@ export class Manager {
|
||||
this.rulesTriggeredSinceStartTotal += triggeredRulesTotal;
|
||||
|
||||
for (const a of runActions) {
|
||||
const name = a.getActionUniqueName();
|
||||
const name = a.name;
|
||||
this.actionsRun.set(name, (this.actionsRun.get(name) || 0) + 1);
|
||||
this.actionsRunSinceStart.set(name, (this.actionsRunSinceStart.get(name) || 0) + 1)
|
||||
this.actionsRunSinceStart.set(name, (this.actionsRunSinceStart.get(name) || 0) + 1);
|
||||
}
|
||||
actionedEvent.actionResults = runActions;
|
||||
if(triggered) {
|
||||
this.actionedEvents.unshift(actionedEvent);
|
||||
// save last 25 triggered events
|
||||
this.actionedEvents = this.actionedEvents.slice(0, 25);
|
||||
}
|
||||
|
||||
this.logger.verbose(`Run Stats: Checks ${checksRun} | Rules => Total: ${totalRulesRun} Unique: ${allRuleResults.length} Cached: ${totalRulesRun - allRuleResults.length} Rolling Avg: ~${formatNumber(this.rulesUniqueRollingAvg)}/s | Actions ${actionsRun}`);
|
||||
|
||||
@@ -12,9 +12,10 @@ import Subreddit from 'snoowrap/dist/objects/Subreddit';
|
||||
import winston, {Logger} from "winston";
|
||||
import fetch from 'node-fetch';
|
||||
import {
|
||||
buildCacheOptionsFromProvider,
|
||||
asSubmission,
|
||||
buildCacheOptionsFromProvider, buildCachePrefix,
|
||||
cacheStats, createCacheManager,
|
||||
formatNumber,
|
||||
formatNumber, getActivityAuthorName,
|
||||
mergeArr,
|
||||
parseExternalUrl,
|
||||
parseWikiContext
|
||||
@@ -23,8 +24,8 @@ import LoggedError from "../Utils/LoggedError";
|
||||
import {
|
||||
BotInstanceConfig,
|
||||
CacheOptions, CommentState,
|
||||
Footer, OperatorConfig, ResourceStats, SubmissionState,
|
||||
SubredditCacheConfig, TTLConfig, TypedActivityStates
|
||||
Footer, OperatorConfig, ResourceStats, StrongCache, SubmissionState,
|
||||
CacheConfig, TTLConfig, TypedActivityStates, UserResultCache
|
||||
} from "../Common/interfaces";
|
||||
import UserNotes from "./UserNotes";
|
||||
import Mustache from "mustache";
|
||||
@@ -34,11 +35,12 @@ import {SPoll} from "./Streams";
|
||||
import {Cache} from 'cache-manager';
|
||||
import {Submission, Comment} from "snoowrap/dist/objects";
|
||||
import {cacheTTLDefaults} from "../Common/defaults";
|
||||
import {check} from "tcp-port-used";
|
||||
|
||||
export const DEFAULT_FOOTER = '\r\n*****\r\nThis action was performed by [a bot.]({{botLink}}) Mention a moderator or [send a modmail]({{modmailLink}}) if you any ideas, questions, or concerns about this action.';
|
||||
|
||||
export interface SubredditResourceConfig extends Footer {
|
||||
caching?: SubredditCacheConfig,
|
||||
caching?: CacheConfig,
|
||||
subreddit: Subreddit,
|
||||
logger: Logger;
|
||||
client: Snoowrap
|
||||
@@ -52,19 +54,20 @@ interface SubredditResourceOptions extends Footer {
|
||||
subreddit: Subreddit,
|
||||
logger: Logger;
|
||||
client: Snoowrap;
|
||||
prefix? :string;
|
||||
}
|
||||
|
||||
export interface SubredditResourceSetOptions extends SubredditCacheConfig, Footer {
|
||||
export interface SubredditResourceSetOptions extends CacheConfig, Footer {
|
||||
}
|
||||
|
||||
export class SubredditResources {
|
||||
//enabled!: boolean;
|
||||
protected useSubredditAuthorCache!: boolean;
|
||||
protected authorTTL: number = cacheTTLDefaults.authorTTL;
|
||||
protected wikiTTL: number = cacheTTLDefaults.wikiTTL;
|
||||
protected submissionTTL: number = cacheTTLDefaults.submissionTTL;
|
||||
protected commentTTL: number = cacheTTLDefaults.commentTTL;
|
||||
protected filterCriteriaTTL: number = cacheTTLDefaults.filterCriteriaTTL;
|
||||
protected authorTTL: number | false = cacheTTLDefaults.authorTTL;
|
||||
protected wikiTTL: number | false = cacheTTLDefaults.wikiTTL;
|
||||
protected submissionTTL: number | false = cacheTTLDefaults.submissionTTL;
|
||||
protected commentTTL: number | false = cacheTTLDefaults.commentTTL;
|
||||
protected filterCriteriaTTL: number | false = cacheTTLDefaults.filterCriteriaTTL;
|
||||
name: string;
|
||||
protected logger: Logger;
|
||||
userNotes: UserNotes;
|
||||
@@ -75,6 +78,7 @@ export class SubredditResources {
|
||||
cacheType: string
|
||||
cacheSettingsHash?: string;
|
||||
pruneInterval?: any;
|
||||
prefix?: string
|
||||
|
||||
stats: { cache: ResourceStats };
|
||||
|
||||
@@ -87,8 +91,11 @@ export class SubredditResources {
|
||||
authorTTL,
|
||||
wikiTTL,
|
||||
filterCriteriaTTL,
|
||||
submissionTTL,
|
||||
commentTTL,
|
||||
},
|
||||
cache,
|
||||
prefix,
|
||||
cacheType,
|
||||
cacheSettingsHash,
|
||||
client,
|
||||
@@ -96,11 +103,14 @@ export class SubredditResources {
|
||||
|
||||
this.cacheSettingsHash = cacheSettingsHash;
|
||||
this.cache = cache;
|
||||
this.prefix = prefix;
|
||||
this.client = client;
|
||||
this.cacheType = cacheType;
|
||||
this.authorTTL = authorTTL;
|
||||
this.wikiTTL = wikiTTL;
|
||||
this.filterCriteriaTTL = filterCriteriaTTL;
|
||||
this.authorTTL = authorTTL === true ? 0 : authorTTL;
|
||||
this.submissionTTL = submissionTTL === true ? 0 : submissionTTL;
|
||||
this.commentTTL = commentTTL === true ? 0 : commentTTL;
|
||||
this.wikiTTL = wikiTTL === true ? 0 : wikiTTL;
|
||||
this.filterCriteriaTTL = filterCriteriaTTL === true ? 0 : filterCriteriaTTL;
|
||||
this.subreddit = subreddit;
|
||||
this.name = name;
|
||||
if (logger === undefined) {
|
||||
@@ -122,7 +132,7 @@ export class SubredditResources {
|
||||
this.userNotes = new UserNotes(userNotesTTL, this.subreddit, this.logger, this.cache, cacheUseCB)
|
||||
|
||||
if(this.cacheType === 'memory' && this.cacheSettingsHash !== 'default') {
|
||||
const min = Math.min(...([wikiTTL, authorTTL, userNotesTTL].filter(x => x !== 0)));
|
||||
const min = Math.min(...([this.wikiTTL, this.authorTTL, this.submissionTTL, this.commentTTL, this.filterCriteriaTTL].filter(x => typeof x === 'number' && x !== 0) as number[]));
|
||||
if(min > 0) {
|
||||
// set default prune interval
|
||||
this.pruneInterval = setInterval(() => {
|
||||
@@ -137,6 +147,9 @@ export class SubredditResources {
|
||||
|
||||
async getCacheKeyCount() {
|
||||
if (this.cache.store.keys !== undefined) {
|
||||
if(this.cacheType === 'redis') {
|
||||
return (await this.cache.store.keys(`${this.prefix}*`)).length;
|
||||
}
|
||||
return (await this.cache.store.keys()).length;
|
||||
}
|
||||
return 0;
|
||||
@@ -203,13 +216,13 @@ export class SubredditResources {
|
||||
async getActivity(item: Submission | Comment) {
|
||||
try {
|
||||
let hash = '';
|
||||
if (item instanceof Submission && this.submissionTTL > 0) {
|
||||
if (this.submissionTTL !== false && asSubmission(item)) {
|
||||
hash = `sub-${item.name}`;
|
||||
await this.stats.cache.submission.identifierRequestCount.set(hash, (await this.stats.cache.submission.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
this.stats.cache.submission.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.submission.requests++;
|
||||
const cachedSubmission = await this.cache.get(hash);
|
||||
if (cachedSubmission !== undefined) {
|
||||
if (cachedSubmission !== undefined && cachedSubmission !== null) {
|
||||
this.logger.debug(`Cache Hit: Submission ${item.name}`);
|
||||
return cachedSubmission;
|
||||
}
|
||||
@@ -218,13 +231,13 @@ export class SubredditResources {
|
||||
this.stats.cache.submission.miss++;
|
||||
await this.cache.set(hash, submission, {ttl: this.submissionTTL});
|
||||
return submission;
|
||||
} else if (this.commentTTL > 0) {
|
||||
} else if (this.commentTTL !== false) {
|
||||
hash = `comm-${item.name}`;
|
||||
await this.stats.cache.comment.identifierRequestCount.set(hash, (await this.stats.cache.comment.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
this.stats.cache.comment.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.comment.requests++;
|
||||
const cachedComment = await this.cache.get(hash);
|
||||
if (cachedComment !== undefined) {
|
||||
if (cachedComment !== undefined && cachedComment !== null) {
|
||||
this.logger.debug(`Cache Hit: Comment ${item.name}`);
|
||||
return cachedComment;
|
||||
}
|
||||
@@ -244,29 +257,37 @@ export class SubredditResources {
|
||||
}
|
||||
|
||||
async getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
|
||||
if (this.authorTTL > 0) {
|
||||
const userName = user.name;
|
||||
const hashObj: any = {...options, userName};
|
||||
const userName = getActivityAuthorName(user);
|
||||
if (this.authorTTL !== false) {
|
||||
const hashObj: any = options;
|
||||
if (this.useSubredditAuthorCache) {
|
||||
hashObj.subreddit = this.name;
|
||||
hashObj.subreddit = this.subreddit;
|
||||
}
|
||||
const hash = objectHash.sha1({...options, userName});
|
||||
const hash = `authorActivities-${userName}-${options.type || 'overview'}-${objectHash.sha1(hashObj)}`;
|
||||
|
||||
this.stats.cache.author.requests++;
|
||||
await this.stats.cache.author.identifierRequestCount.set(user.name, (await this.stats.cache.author.identifierRequestCount.wrap(user.name, () => 0) as number) + 1);
|
||||
await this.stats.cache.author.identifierRequestCount.set(userName, (await this.stats.cache.author.identifierRequestCount.wrap(userName, () => 0) as number) + 1);
|
||||
this.stats.cache.author.requestTimestamps.push(Date.now());
|
||||
let miss = false;
|
||||
const cacheVal = await this.cache.wrap(hash, async () => {
|
||||
miss = true;
|
||||
if(typeof user === 'string') {
|
||||
// @ts-ignore
|
||||
user = await this.client.getUser(userName);
|
||||
}
|
||||
return await getAuthorActivities(user, options);
|
||||
}, {ttl: this.authorTTL});
|
||||
if (!miss) {
|
||||
this.logger.debug(`Cache Hit: ${userName} (${options.type || 'overview'})`);
|
||||
this.logger.debug(`Cache Hit: ${userName} (Hash ${hash})`);
|
||||
} else {
|
||||
this.stats.cache.author.miss++;
|
||||
}
|
||||
return cacheVal as Array<Submission | Comment>;
|
||||
}
|
||||
if(typeof user === 'string') {
|
||||
// @ts-ignore
|
||||
user = await this.client.getUser(userName);
|
||||
}
|
||||
return await getAuthorActivities(user, options);
|
||||
}
|
||||
|
||||
@@ -298,14 +319,14 @@ export class SubredditResources {
|
||||
}
|
||||
|
||||
// try to get cached value first
|
||||
let hash = `${subreddit.display_name}-${cacheKey}`;
|
||||
if (this.wikiTTL > 0) {
|
||||
let hash = `${subreddit.display_name}-content-${cacheKey}`;
|
||||
if (this.wikiTTL !== false) {
|
||||
await this.stats.cache.content.identifierRequestCount.set(cacheKey, (await this.stats.cache.content.identifierRequestCount.wrap(cacheKey, () => 0) as number) + 1);
|
||||
this.stats.cache.content.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.content.requests++;
|
||||
const cachedContent = await this.cache.get(hash);
|
||||
if (cachedContent !== undefined) {
|
||||
this.logger.debug(`Cache Hit: ${cacheKey}`);
|
||||
if (cachedContent !== undefined && cachedContent !== null) {
|
||||
this.logger.debug(`Content Cache Hit: ${cacheKey}`);
|
||||
return cachedContent as string;
|
||||
} else {
|
||||
this.stats.cache.content.miss++;
|
||||
@@ -349,7 +370,7 @@ export class SubredditResources {
|
||||
}
|
||||
}
|
||||
|
||||
if (this.wikiTTL > 0) {
|
||||
if (this.wikiTTL !== false) {
|
||||
this.cache.set(hash, wikiContent, {ttl: this.wikiTTL});
|
||||
}
|
||||
|
||||
@@ -357,9 +378,15 @@ export class SubredditResources {
|
||||
}
|
||||
|
||||
async testAuthorCriteria(item: (Comment | Submission), authorOpts: AuthorCriteria, include = true) {
|
||||
if (this.filterCriteriaTTL > 0) {
|
||||
const hashObj = {itemId: item.id, ...authorOpts, include};
|
||||
const hash = `authorCrit-${objectHash.sha1(hashObj)}`;
|
||||
if (this.filterCriteriaTTL !== false) {
|
||||
// in the criteria check we only actually use the `item` to get the author flair
|
||||
// which will be the same for the entire subreddit
|
||||
//
|
||||
// so we can create a hash only using subreddit-author-criteria
|
||||
// and ignore the actual item
|
||||
const hashObj = {...authorOpts, include};
|
||||
const userName = getActivityAuthorName(item.author);
|
||||
const hash = `authorCrit-${this.subreddit.display_name}-${userName}-${objectHash.sha1(hashObj)}`;
|
||||
await this.stats.cache.authorCrit.identifierRequestCount.set(hash, (await this.stats.cache.authorCrit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
this.stats.cache.authorCrit.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.authorCrit.requests++;
|
||||
@@ -367,9 +394,9 @@ export class SubredditResources {
|
||||
const cachedAuthorTest = await this.cache.wrap(hash, async () => {
|
||||
miss = true;
|
||||
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
|
||||
}, {ttl: this.authorTTL});
|
||||
}, {ttl: this.filterCriteriaTTL});
|
||||
if (!miss) {
|
||||
this.logger.debug(`Cache Hit: Author Check on ${item.id}`);
|
||||
this.logger.debug(`Cache Hit: Author Check on ${userName} (Hash ${hash})`);
|
||||
} else {
|
||||
this.stats.cache.authorCrit.miss++;
|
||||
}
|
||||
@@ -380,7 +407,7 @@ export class SubredditResources {
|
||||
}
|
||||
|
||||
async testItemCriteria(i: (Comment | Submission), s: TypedActivityStates) {
|
||||
if (this.filterCriteriaTTL > 0) {
|
||||
if (this.filterCriteriaTTL !== false) {
|
||||
let item = i;
|
||||
let states = s;
|
||||
// optimize for submission only checks on comment item
|
||||
@@ -392,19 +419,18 @@ export class SubredditResources {
|
||||
states = (states[0] as CommentState).submissionState as SubmissionState[];
|
||||
}
|
||||
try {
|
||||
const hashObj = {itemId: item.name, ...states};
|
||||
const hash = `itemCrit-${objectHash.sha1(hashObj)}`;
|
||||
const hash = `itemCrit-${item.name}-${objectHash.sha1(states)}`;
|
||||
await this.stats.cache.itemCrit.identifierRequestCount.set(hash, (await this.stats.cache.itemCrit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
this.stats.cache.itemCrit.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.itemCrit.requests++;
|
||||
const cachedItem = await this.cache.get(hash);
|
||||
if (cachedItem !== undefined) {
|
||||
this.logger.debug(`Cache Hit: Item Check on ${item.name}`);
|
||||
if (cachedItem !== undefined && cachedItem !== null) {
|
||||
this.logger.debug(`Cache Hit: Item Check on ${item.name} (Hash ${hash})`);
|
||||
return cachedItem as boolean;
|
||||
}
|
||||
const itemResult = await this.isItem(item, states, this.logger);
|
||||
this.stats.cache.itemCrit.miss++;
|
||||
const res = await this.cache.set(hash, itemResult, {ttl: this.filterCriteriaTTL});
|
||||
await this.cache.set(hash, itemResult, {ttl: this.filterCriteriaTTL});
|
||||
return itemResult;
|
||||
} catch (err) {
|
||||
if (err.logged !== true) {
|
||||
@@ -514,36 +540,26 @@ export class SubredditResources {
|
||||
return false
|
||||
}
|
||||
|
||||
async getCommentCheckCacheResult(item: Comment, checkConfig: object): Promise<boolean | undefined> {
|
||||
const criteria = {
|
||||
author: item.author.name,
|
||||
submission: item.link_id,
|
||||
...checkConfig
|
||||
}
|
||||
const hash = objectHash.sha1(criteria);
|
||||
async getCommentCheckCacheResult(item: Comment, checkConfig: object): Promise<UserResultCache | undefined> {
|
||||
const userName = getActivityAuthorName(item.author);
|
||||
const hash = `commentUserResult-${userName}-${item.link_id}-${objectHash.sha1(checkConfig)}`;
|
||||
this.stats.cache.commentCheck.requests++;
|
||||
const result = await this.cache.get(hash) as boolean | undefined;
|
||||
let result = await this.cache.get(hash) as UserResultCache | undefined | null;
|
||||
if(result === null) {
|
||||
result = undefined;
|
||||
}
|
||||
if(result === undefined) {
|
||||
this.stats.cache.commentCheck.miss++;
|
||||
}
|
||||
this.logger.debug(`Cache Hit: Comment Check for ${item.author.name} in Submission ${item.link_id}`);
|
||||
this.logger.debug(`Cache Hit: Comment Check for ${userName} in Submission ${item.link_id} (Hash ${hash})`);
|
||||
return result;
|
||||
}
|
||||
|
||||
async setCommentCheckCacheResult(item: Comment, checkConfig: object, result: boolean, ttl: number) {
|
||||
const criteria = {
|
||||
author: item.author.name,
|
||||
submission: item.link_id,
|
||||
...checkConfig
|
||||
}
|
||||
const hash = objectHash.sha1(criteria);
|
||||
// don't set if result is already cached
|
||||
if(undefined !== await this.cache.get(hash)) {
|
||||
this.logger.debug(`Check result already cached for User ${item.author.name} on Submission ${item.link_id}`);
|
||||
} else {
|
||||
await this.cache.set(hash, result, { ttl });
|
||||
this.logger.debug(`Cached check result '${result}' for User ${item.author.name} on Submission ${item.link_id} for ${ttl} seconds`);
|
||||
}
|
||||
async setCommentCheckCacheResult(item: Comment, checkConfig: object, result: UserResultCache, ttl: number) {
|
||||
const userName = getActivityAuthorName(item.author);
|
||||
const hash = `commentUserResult-${userName}-${item.link_id}-${objectHash.sha1(checkConfig)}`
|
||||
await this.cache.set(hash, result, { ttl });
|
||||
this.logger.debug(`Cached check result '${result.result}' for User ${userName} on Submission ${item.link_id} for ${ttl} seconds (Hash ${hash})`);
|
||||
}
|
||||
|
||||
async generateFooter(item: Submission | Comment, actionFooter?: false | string) {
|
||||
@@ -566,6 +582,7 @@ export class BotResourcesManager {
|
||||
enabled: boolean = true;
|
||||
modStreams: Map<string, SPoll<Snoowrap.Submission | Snoowrap.Comment>> = new Map();
|
||||
defaultCache: Cache;
|
||||
defaultCacheConfig: StrongCache
|
||||
cacheType: string = 'none';
|
||||
cacheHash: string;
|
||||
ttlDefaults: Required<TTLConfig>;
|
||||
@@ -582,16 +599,20 @@ export class BotResourcesManager {
|
||||
filterCriteriaTTL,
|
||||
provider,
|
||||
},
|
||||
name,
|
||||
credentials,
|
||||
caching,
|
||||
} = config;
|
||||
caching.provider.prefix = buildCachePrefix([caching.provider.prefix, 'SHARED']);
|
||||
this.cacheHash = objectHash.sha1(caching);
|
||||
this.defaultCacheConfig = caching;
|
||||
this.ttlDefaults = {authorTTL, userNotesTTL, wikiTTL, commentTTL, submissionTTL, filterCriteriaTTL};
|
||||
|
||||
const options = provider;
|
||||
this.cacheType = options.store;
|
||||
this.defaultCache = createCacheManager(options);
|
||||
if (this.cacheType === 'memory') {
|
||||
const min = Math.min(...([this.ttlDefaults.wikiTTL, this.ttlDefaults.authorTTL, this.ttlDefaults.userNotesTTL].filter(x => x !== 0)));
|
||||
const min = Math.min(...([this.ttlDefaults.wikiTTL, this.ttlDefaults.authorTTL, this.ttlDefaults.userNotesTTL].filter(x => typeof x === 'number' && x !== 0) as number[]));
|
||||
if (min > 0) {
|
||||
// set default prune interval
|
||||
this.pruneInterval = setInterval(() => {
|
||||
@@ -622,11 +643,12 @@ export class BotResourcesManager {
|
||||
cacheType: this.cacheType,
|
||||
cacheSettingsHash: hash,
|
||||
ttl: this.ttlDefaults,
|
||||
prefix: this.defaultCacheConfig.provider.prefix,
|
||||
...init,
|
||||
};
|
||||
|
||||
if(caching !== undefined) {
|
||||
const {provider = 'memory', ...rest} = caching;
|
||||
const {provider = this.defaultCacheConfig.provider, ...rest} = caching;
|
||||
let cacheConfig = {
|
||||
provider: buildCacheOptionsFromProvider(provider),
|
||||
ttl: {
|
||||
@@ -638,10 +660,14 @@ export class BotResourcesManager {
|
||||
// only need to create private if there settings are actually different than the default
|
||||
if(hash !== this.cacheHash) {
|
||||
const {provider: trueProvider, ...trueRest} = cacheConfig;
|
||||
const defaultPrefix = trueProvider.prefix;
|
||||
const subPrefix = defaultPrefix === this.defaultCacheConfig.provider.prefix ? buildCachePrefix([(defaultPrefix !== undefined ? defaultPrefix.replace('SHARED', '') : defaultPrefix), subName]) : trueProvider.prefix;
|
||||
trueProvider.prefix = subPrefix;
|
||||
opts = {
|
||||
cache: createCacheManager(trueProvider),
|
||||
cacheType: trueProvider.store,
|
||||
cacheSettingsHash: hash,
|
||||
prefix: subPrefix,
|
||||
...init,
|
||||
...trueRest,
|
||||
};
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
import dayjs, {Dayjs} from "dayjs";
|
||||
import {Comment, RedditUser, WikiPage} from "snoowrap";
|
||||
import {COMMENT_URL_ID, deflateUserNotes, inflateUserNotes, parseLinkIdentifier, SUBMISSION_URL_ID} from "../util";
|
||||
import {
|
||||
COMMENT_URL_ID,
|
||||
deflateUserNotes, getActivityAuthorName,
|
||||
inflateUserNotes,
|
||||
isScopeError,
|
||||
parseLinkIdentifier,
|
||||
SUBMISSION_URL_ID
|
||||
} from "../util";
|
||||
import Subreddit from "snoowrap/dist/objects/Subreddit";
|
||||
import {Logger} from "winston";
|
||||
import LoggedError from "../Utils/LoggedError";
|
||||
@@ -48,7 +55,7 @@ export interface RawNote {
|
||||
export type UserNotesConstants = Pick<any, "users" | "warnings">;
|
||||
|
||||
export class UserNotes {
|
||||
notesTTL: number;
|
||||
notesTTL: number | false;
|
||||
subreddit: Subreddit;
|
||||
wiki: WikiPage;
|
||||
moderators?: RedditUser[];
|
||||
@@ -63,8 +70,8 @@ export class UserNotes {
|
||||
debounceCB: any;
|
||||
batchCount: number = 0;
|
||||
|
||||
constructor(ttl: number, subreddit: Subreddit, logger: Logger, cache: Cache, cacheCB: Function) {
|
||||
this.notesTTL = ttl;
|
||||
constructor(ttl: number | boolean, subreddit: Subreddit, logger: Logger, cache: Cache, cacheCB: Function) {
|
||||
this.notesTTL = ttl === true ? 0 : ttl;
|
||||
this.subreddit = subreddit;
|
||||
this.logger = logger;
|
||||
this.wiki = subreddit.getWikiPage('usernotes');
|
||||
@@ -74,10 +81,11 @@ export class UserNotes {
|
||||
}
|
||||
|
||||
async getUserNotes(user: RedditUser): Promise<UserNote[]> {
|
||||
const userName = getActivityAuthorName(user);
|
||||
let notes: UserNote[] | undefined = [];
|
||||
|
||||
if (this.users !== undefined) {
|
||||
notes = this.users.get(user.name);
|
||||
notes = this.users.get(userName);
|
||||
if (notes !== undefined) {
|
||||
this.logger.debug('Returned cached notes');
|
||||
return notes;
|
||||
@@ -85,7 +93,7 @@ export class UserNotes {
|
||||
}
|
||||
|
||||
const payload = await this.retrieveData();
|
||||
const rawNotes = payload.blob[user.name];
|
||||
const rawNotes = payload.blob[userName];
|
||||
if (rawNotes !== undefined) {
|
||||
if (this.moderators === undefined) {
|
||||
this.moderators = await this.subreddit.getModerators();
|
||||
@@ -94,7 +102,7 @@ export class UserNotes {
|
||||
// sort in ascending order by time
|
||||
notes.sort((a, b) => a.time.isBefore(b.time) ? -1 : 1);
|
||||
if (this.notesTTL > 0 && this.cache !== undefined) {
|
||||
this.users.set(user.name, notes);
|
||||
this.users.set(userName, notes);
|
||||
}
|
||||
return notes;
|
||||
} else {
|
||||
@@ -105,6 +113,7 @@ export class UserNotes {
|
||||
async addUserNote(item: (Submission|Comment), type: string | number, text: string = ''): Promise<UserNote>
|
||||
{
|
||||
const payload = await this.retrieveData();
|
||||
const userName = getActivityAuthorName(item.author);
|
||||
|
||||
// idgaf
|
||||
// @ts-ignore
|
||||
@@ -120,16 +129,16 @@ export class UserNotes {
|
||||
}
|
||||
const newNote = new UserNote(dayjs(), text, mod, type, `https://reddit.com${item.permalink}`);
|
||||
|
||||
if(payload.blob[item.author.name] === undefined) {
|
||||
payload.blob[item.author.name] = {ns: []};
|
||||
if(payload.blob[userName] === undefined) {
|
||||
payload.blob[userName] = {ns: []};
|
||||
}
|
||||
payload.blob[item.author.name].ns.push(newNote.toRaw(payload.constants));
|
||||
payload.blob[userName].ns.push(newNote.toRaw(payload.constants));
|
||||
|
||||
await this.saveData(payload);
|
||||
if(this.notesTTL > 0) {
|
||||
const currNotes = this.users.get(item.author.name) || [];
|
||||
const currNotes = this.users.get(userName) || [];
|
||||
currNotes.push(newNote);
|
||||
this.users.set(item.author.name, currNotes);
|
||||
this.users.set(userName, currNotes);
|
||||
}
|
||||
return newNote;
|
||||
}
|
||||
@@ -144,7 +153,7 @@ export class UserNotes {
|
||||
let cacheMiss;
|
||||
if (this.notesTTL > 0) {
|
||||
const cachedPayload = await this.cache.get(this.identifier);
|
||||
if (cachedPayload !== undefined) {
|
||||
if (cachedPayload !== undefined && cachedPayload !== null) {
|
||||
this.cacheCB(false);
|
||||
return cachedPayload as unknown as RawUserNotesPayload;
|
||||
}
|
||||
@@ -153,14 +162,15 @@ export class UserNotes {
|
||||
}
|
||||
|
||||
try {
|
||||
if(cacheMiss && this.debounceCB !== undefined) {
|
||||
// timeout is still delayed. its our wiki data and we want it now! cm cacheworth 877 cache now
|
||||
this.logger.debug(`Detected missed cache on usernotes retrieval while batch (${this.batchCount}) save is in progress, executing save immediately before retrieving new notes...`);
|
||||
clearTimeout(this.saveDebounce);
|
||||
await this.debounceCB();
|
||||
this.debounceCB = undefined;
|
||||
this.saveDebounce = undefined;
|
||||
}
|
||||
// DISABLED for now because I think its causing issues
|
||||
// if(cacheMiss && this.debounceCB !== undefined) {
|
||||
// // timeout is still delayed. its our wiki data and we want it now! cm cacheworth 877 cache now
|
||||
// this.logger.debug(`Detected missed cache on usernotes retrieval while batch (${this.batchCount}) save is in progress, executing save immediately before retrieving new notes...`);
|
||||
// clearTimeout(this.saveDebounce);
|
||||
// await this.debounceCB();
|
||||
// this.debounceCB = undefined;
|
||||
// this.saveDebounce = undefined;
|
||||
// }
|
||||
// @ts-ignore
|
||||
this.wiki = await this.subreddit.getWikiPage('usernotes').fetch();
|
||||
const wikiContent = this.wiki.content_md;
|
||||
@@ -169,7 +179,7 @@ export class UserNotes {
|
||||
|
||||
userNotes.blob = inflateUserNotes(userNotes.blob);
|
||||
|
||||
if (this.notesTTL > 0) {
|
||||
if (this.notesTTL !== false) {
|
||||
await this.cache.set(`${this.subreddit.display_name}-usernotes`, userNotes, {ttl: this.notesTTL});
|
||||
this.users = new Map();
|
||||
}
|
||||
@@ -187,30 +197,36 @@ export class UserNotes {
|
||||
const blob = deflateUserNotes(payload.blob);
|
||||
const wikiPayload = {text: JSON.stringify({...payload, blob}), reason: 'ContextBot edited usernotes'};
|
||||
try {
|
||||
if (this.notesTTL > 0) {
|
||||
if (this.notesTTL !== false) {
|
||||
// DISABLED for now because if it fails throws an uncaught rejection
|
||||
// and need to figured out how to handle this other than just logging (want to interrupt action flow too?)
|
||||
//
|
||||
// debounce usernote save by 5 seconds -- effectively batch usernote saves
|
||||
//
|
||||
// so that if we are processing a ton of checks that write user notes we aren't calling to save the wiki page on every call
|
||||
// since we also have everything in cache (most likely...)
|
||||
//
|
||||
// TODO might want to increase timeout to 10 seconds
|
||||
if(this.saveDebounce !== undefined) {
|
||||
clearTimeout(this.saveDebounce);
|
||||
}
|
||||
this.debounceCB = (async function () {
|
||||
const p = wikiPayload;
|
||||
// @ts-ignore
|
||||
const self = this as UserNotes;
|
||||
// @ts-ignore
|
||||
self.wiki = await self.subreddit.getWikiPage('usernotes').edit(p);
|
||||
self.logger.debug(`Batch saved ${self.batchCount} usernotes`);
|
||||
self.debounceCB = undefined;
|
||||
self.saveDebounce = undefined;
|
||||
self.batchCount = 0;
|
||||
}).bind(this);
|
||||
this.saveDebounce = setTimeout(this.debounceCB,5000);
|
||||
this.batchCount++;
|
||||
this.logger.debug(`Saving Usernotes has been debounced for 5 seconds (${this.batchCount} batched)`)
|
||||
// if(this.saveDebounce !== undefined) {
|
||||
// clearTimeout(this.saveDebounce);
|
||||
// }
|
||||
// this.debounceCB = (async function () {
|
||||
// const p = wikiPayload;
|
||||
// // @ts-ignore
|
||||
// const self = this as UserNotes;
|
||||
// // @ts-ignore
|
||||
// self.wiki = await self.subreddit.getWikiPage('usernotes').edit(p);
|
||||
// self.logger.debug(`Batch saved ${self.batchCount} usernotes`);
|
||||
// self.debounceCB = undefined;
|
||||
// self.saveDebounce = undefined;
|
||||
// self.batchCount = 0;
|
||||
// }).bind(this);
|
||||
// this.saveDebounce = setTimeout(this.debounceCB,5000);
|
||||
// this.batchCount++;
|
||||
// this.logger.debug(`Saving Usernotes has been debounced for 5 seconds (${this.batchCount} batched)`)
|
||||
|
||||
// @ts-ignore
|
||||
await this.subreddit.getWikiPage('usernotes').edit(wikiPayload);
|
||||
await this.cache.set(this.identifier, payload, {ttl: this.notesTTL});
|
||||
this.users = new Map();
|
||||
} else {
|
||||
@@ -220,7 +236,13 @@ export class UserNotes {
|
||||
|
||||
return payload as RawUserNotesPayload;
|
||||
} catch (err) {
|
||||
const msg = `Could not edit usernotes. Make sure at least one moderator has used toolbox and usernotes before and that this account has editing permissions`;
|
||||
let msg = 'Could not edit usernotes.';
|
||||
// Make sure at least one moderator has used toolbox and usernotes before and that this account has editing permissions`;
|
||||
if(isScopeError(err)) {
|
||||
msg = `${msg} The bot account did not have sufficient OAUTH scope to perform this action. You must re-authenticate the bot and ensure it has has 'wikiedit' permissions.`
|
||||
} else {
|
||||
msg = `${msg} Make sure at least one moderator has used toolbox, created a usernote, and that this account has editing permissions for the wiki page.`;
|
||||
}
|
||||
this.logger.error(msg, err);
|
||||
throw new LoggedError(msg);
|
||||
}
|
||||
|
||||
@@ -504,7 +504,8 @@ export const itemContentPeek = async (item: (Comment | Submission), peekLength =
|
||||
peek = `${truncatePeek(item.title)} by ${author} https://reddit.com${item.permalink}`;
|
||||
|
||||
} else if (item instanceof Comment) {
|
||||
content = truncatePeek(item.body);
|
||||
// replace newlines with spaces to make peek more compact
|
||||
content = truncatePeek(item.body.replaceAll('\n', ' '));
|
||||
peek = `${truncatePeek(content)} by ${author} in https://reddit.com${item.permalink}`;
|
||||
}
|
||||
|
||||
|
||||
@@ -8,12 +8,13 @@ import passport from 'passport';
|
||||
import {Strategy as CustomStrategy} from 'passport-custom';
|
||||
import {OperatorConfig, BotConnection, LogInfo} from "../../Common/interfaces";
|
||||
import {
|
||||
buildCachePrefix,
|
||||
createCacheManager, filterLogBySubreddit,
|
||||
formatLogLineToHtml,
|
||||
intersect, isLogLineMinLevel,
|
||||
LogEntry, parseFromJsonOrYamlToObject, parseInstanceLogInfoName, parseInstanceLogName,
|
||||
parseSubredditLogName, permissions,
|
||||
randomId, sleep
|
||||
randomId, sleep, triggeredIndicator
|
||||
} from "../../util";
|
||||
import {Cache} from "cache-manager";
|
||||
import session, {Session, SessionData} from "express-session";
|
||||
@@ -42,6 +43,7 @@ import {booleanMiddle} from "../Common/middleware";
|
||||
import {BotInstance, CMInstance} from "../interfaces";
|
||||
import { URL } from "url";
|
||||
import {MESSAGE} from "triple-beam";
|
||||
import Autolinker from "autolinker";
|
||||
|
||||
const emitter = new EventEmitter();
|
||||
|
||||
@@ -119,9 +121,16 @@ const webClient = async (options: OperatorConfig) => {
|
||||
},
|
||||
web: {
|
||||
port,
|
||||
caching,
|
||||
caching: {
|
||||
prefix
|
||||
},
|
||||
invites: {
|
||||
maxAge: invitesMaxAge,
|
||||
},
|
||||
session: {
|
||||
provider,
|
||||
secret,
|
||||
maxAge: sessionMaxAge,
|
||||
},
|
||||
maxLogs,
|
||||
clients,
|
||||
@@ -134,8 +143,6 @@ const webClient = async (options: OperatorConfig) => {
|
||||
},
|
||||
} = options;
|
||||
|
||||
const connectedUsers: ConnectUserObj = {};
|
||||
|
||||
const webOps = operators.map(x => x.toLowerCase());
|
||||
|
||||
const logger = getLogger({defaultLabel: 'Web', ...options.logging}, 'Web');
|
||||
@@ -159,11 +166,15 @@ const webClient = async (options: OperatorConfig) => {
|
||||
throw new SimpleError(`Specified port for web interface (${port}) is in use or not available. Cannot start web server.`);
|
||||
}
|
||||
|
||||
if (provider.store === 'none') {
|
||||
logger.warn(`Cannot use 'none' for session store or else no one can use the interface...falling back to 'memory'`);
|
||||
provider.store = 'memory';
|
||||
if (caching.store === 'none') {
|
||||
logger.warn(`Cannot use 'none' for web caching or else no one can use the interface...falling back to 'memory'`);
|
||||
caching.store = 'memory';
|
||||
}
|
||||
//const webCache = createCacheManager(provider) as Cache;
|
||||
//const webCachePrefix = buildCachePrefix([prefix, 'web']);
|
||||
const webCache = createCacheManager({...caching, prefix: buildCachePrefix([prefix, 'web'])}) as Cache;
|
||||
|
||||
//const previousSessions = await webCache.get
|
||||
const connectedUsers: ConnectUserObj = {};
|
||||
|
||||
//<editor-fold desc=Session and Auth>
|
||||
/*
|
||||
@@ -217,9 +228,9 @@ const webClient = async (options: OperatorConfig) => {
|
||||
|
||||
const sessionObj = session({
|
||||
cookie: {
|
||||
maxAge: provider.ttl,
|
||||
maxAge: sessionMaxAge * 1000,
|
||||
},
|
||||
store: new CacheManagerStore(createCacheManager(provider) as Cache),
|
||||
store: new CacheManagerStore(webCache, {prefix: 'sess:'}),
|
||||
resave: false,
|
||||
saveUninitialized: false,
|
||||
secret,
|
||||
@@ -279,7 +290,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
return res.render('error', {error: errContent});
|
||||
}
|
||||
// @ts-ignore
|
||||
const invite = invites.get(req.session.inviteId) as inviteData;
|
||||
const invite = await webCache.get(`invite:${req.session.inviteId}`) as InviteData;
|
||||
const client = await Snoowrap.fromAuthCode({
|
||||
userAgent: `web:contextBot:web`,
|
||||
clientId: invite.clientId,
|
||||
@@ -290,7 +301,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
// @ts-ignore
|
||||
const user = await client.getMe();
|
||||
// @ts-ignore
|
||||
invites.delete(req.session.inviteId);
|
||||
await webCache.del(`invite:${req.session.inviteId}`);
|
||||
let data: any = {
|
||||
accessToken: client.accessToken,
|
||||
refreshToken: client.refreshToken,
|
||||
@@ -346,7 +357,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
});
|
||||
|
||||
let token = randomId();
|
||||
interface inviteData {
|
||||
interface InviteData {
|
||||
permissions: string[],
|
||||
subreddit?: string,
|
||||
instance?: string,
|
||||
@@ -355,7 +366,6 @@ const webClient = async (options: OperatorConfig) => {
|
||||
redirectUri: string
|
||||
creator: string
|
||||
}
|
||||
const invites: Map<string, inviteData> = new Map();
|
||||
|
||||
const helperAuthed = async (req: express.Request, res: express.Response, next: Function) => {
|
||||
|
||||
@@ -386,14 +396,14 @@ const webClient = async (options: OperatorConfig) => {
|
||||
});
|
||||
});
|
||||
|
||||
app.getAsync('/auth/invite', (req, res) => {
|
||||
app.getAsync('/auth/invite', async (req, res) => {
|
||||
const {invite: inviteId} = req.query;
|
||||
|
||||
if(inviteId === undefined) {
|
||||
return res.render('error', {error: '`invite` param is missing from URL'});
|
||||
}
|
||||
const invite = invites.get(inviteId as string);
|
||||
if(invite === undefined) {
|
||||
const invite = await webCache.get(`invite:${inviteId}`) as InviteData | undefined | null;
|
||||
if(invite === undefined || invite === null) {
|
||||
return res.render('error', {error: 'Invite with the given id does not exist'});
|
||||
}
|
||||
|
||||
@@ -429,7 +439,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
}
|
||||
|
||||
const inviteId = code || randomId();
|
||||
invites.set(inviteId, {
|
||||
await webCache.set(`invite:${inviteId}`, {
|
||||
permissions,
|
||||
clientId: (ci || clientId).trim(),
|
||||
clientSecret: (ce || clientSecret).trim(),
|
||||
@@ -437,7 +447,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
instance,
|
||||
subreddit,
|
||||
creator: (req.user as Express.User).name,
|
||||
});
|
||||
}, {ttl: invitesMaxAge * 1000});
|
||||
return res.send(inviteId);
|
||||
});
|
||||
|
||||
@@ -446,8 +456,8 @@ const webClient = async (options: OperatorConfig) => {
|
||||
if(inviteId === undefined) {
|
||||
return res.render('error', {error: '`invite` param is missing from URL'});
|
||||
}
|
||||
const invite = invites.get(inviteId as string);
|
||||
if(invite === undefined) {
|
||||
const invite = await webCache.get(`invite:${inviteId}`) as InviteData | undefined | null;
|
||||
if(invite === undefined || invite === null) {
|
||||
return res.render('error', {error: 'Invite with the given id does not exist'});
|
||||
}
|
||||
|
||||
@@ -643,9 +653,9 @@ const webClient = async (options: OperatorConfig) => {
|
||||
req.session.level = 'verbose';
|
||||
req.session.sort = 'descending';
|
||||
req.session.save();
|
||||
// @ts-ignore
|
||||
connectedUsers[req.session.id] = {};
|
||||
}
|
||||
// @ts-ignore
|
||||
connectedUsers[req.session.id] = {};
|
||||
next();
|
||||
}
|
||||
|
||||
@@ -822,6 +832,73 @@ const webClient = async (options: OperatorConfig) => {
|
||||
return res.send(resp);
|
||||
});
|
||||
|
||||
app.getAsync('/events', [ensureAuthenticatedApi, defaultSession, instanceWithPermissions, botWithPermissions, createUserToken], async (req: express.Request, res: express.Response) => {
|
||||
const {subreddit} = req.query as any;
|
||||
const resp = await got.get(`${(req.instance as CMInstance).normalUrl}/events`, {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${req.token}`,
|
||||
},
|
||||
searchParams: {
|
||||
subreddit,
|
||||
bot: req.bot?.botName
|
||||
}
|
||||
}).json() as [any];
|
||||
|
||||
return res.render('events', {
|
||||
data: resp.map((x) => {
|
||||
const {timestamp, activity: {peek, link}, ruleResults = [], actionResults = [], ...rest} = x;
|
||||
const time = dayjs(timestamp).local().format();
|
||||
const formattedPeek = Autolinker.link(peek, {
|
||||
email: false,
|
||||
phone: false,
|
||||
mention: false,
|
||||
hashtag: false,
|
||||
stripPrefix: false,
|
||||
sanitizeHtml: true,
|
||||
});
|
||||
const formattedRuleResults = ruleResults.map((y: any) => {
|
||||
const {triggered, result, ...restY} = y;
|
||||
let t = 'Not Triggered';
|
||||
if(triggered === null) {
|
||||
t = 'Skipped';
|
||||
} else if(triggered === true) {
|
||||
t = 'Triggered';
|
||||
}
|
||||
return {
|
||||
...restY,
|
||||
triggered: t,
|
||||
result: result || '-'
|
||||
};
|
||||
});
|
||||
const formattedActionResults = actionResults.map((y: any) => {
|
||||
const {run, runReason, success, result, dryRun, ...restA} = y;
|
||||
let res = '';
|
||||
if(!run) {
|
||||
res = `Not Run - ${runReason === undefined ? '(No Reason)' : runReason}`;
|
||||
} else {
|
||||
res = `Success: ${triggeredIndicator(success)}${result !== undefined ? ` - ${result}` : ''}`;
|
||||
}
|
||||
return {
|
||||
...restA,
|
||||
dryRun: dryRun ? ' (DRYRUN)' : '',
|
||||
result: res
|
||||
};
|
||||
});
|
||||
return {
|
||||
...rest,
|
||||
timestamp: time,
|
||||
activity: {
|
||||
link,
|
||||
peek: formattedPeek,
|
||||
},
|
||||
ruleResults: formattedRuleResults,
|
||||
actionResults: formattedActionResults
|
||||
}
|
||||
}),
|
||||
title: `${subreddit} Actioned Events`
|
||||
});
|
||||
});
|
||||
|
||||
app.getAsync('/logs/settings/update',[ensureAuthenticated], async (req: express.Request, res: express.Response) => {
|
||||
const e = req.query;
|
||||
for (const [setting, val] of Object.entries(req.query)) {
|
||||
|
||||
@@ -33,7 +33,8 @@ const managerStats: ManagerStats = {
|
||||
rulesRunSinceStartTotal: 0,
|
||||
rulesRunTotal: 0,
|
||||
rulesTriggeredSinceStartTotal: 0,
|
||||
rulesTriggeredTotal: 0
|
||||
rulesTriggeredTotal: 0,
|
||||
actionedEvents: 0,
|
||||
};
|
||||
const botStats: BotStats = {
|
||||
apiAvg: '-',
|
||||
|
||||
@@ -31,3 +31,22 @@ export const botRoute = (required = true) => async (req: Request, res: Response,
|
||||
}
|
||||
return next();
|
||||
}
|
||||
|
||||
export const subredditRoute = (required = true) => async (req: Request, res: Response, next: Function) => {
|
||||
|
||||
const bot = req.serverBot;
|
||||
|
||||
const {subreddit} = req.query as any;
|
||||
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
|
||||
if (!isOperator && !realManagers.includes(subreddit)) {
|
||||
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
|
||||
}
|
||||
const manager = bot.subManagers.find(x => x.displayLabel === subreddit);
|
||||
if (manager === undefined) {
|
||||
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
|
||||
}
|
||||
|
||||
req.manager = manager;
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
@@ -1,31 +1,31 @@
|
||||
import {Request, Response} from 'express';
|
||||
import {authUserCheck, botRoute} from "../../../middleware";
|
||||
import {authUserCheck, botRoute, subredditRoute} from "../../../middleware";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import winston from 'winston';
|
||||
import {COMMENT_URL_ID, parseLinkIdentifier, SUBMISSION_URL_ID} from "../../../../../util";
|
||||
import {booleanMiddle} from "../../../../Common/middleware";
|
||||
import {Manager} from "../../../../../Subreddit/Manager";
|
||||
|
||||
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
|
||||
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);
|
||||
|
||||
const config = async (req: Request, res: Response) => {
|
||||
const bot = req.serverBot;
|
||||
|
||||
const {subreddit} = req.query as any;
|
||||
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
|
||||
if (!isOperator && !realManagers.includes(subreddit)) {
|
||||
return res.status(400).send('Cannot retrieve config for subreddit you do not manage or is not run by the bot')
|
||||
}
|
||||
const manager = bot.subManagers.find(x => x.displayLabel === subreddit);
|
||||
if (manager === undefined) {
|
||||
return res.status(400).send('Cannot retrieve config for subreddit you do not manage or is not run by the bot')
|
||||
}
|
||||
const manager = req.manager as Manager;
|
||||
|
||||
// @ts-ignore
|
||||
const wiki = await manager.subreddit.getWikiPage(manager.wikiLocation).fetch();
|
||||
return res.send(wiki.content_md);
|
||||
};
|
||||
export const configRoute = [authUserCheck(), botRoute(), config];
|
||||
export const configRoute = [authUserCheck(), botRoute(), subredditRoute(), config];
|
||||
|
||||
const actionedEvents = async (req: Request, res: Response) => {
|
||||
|
||||
const manager = req.manager as Manager;
|
||||
|
||||
return res.json(manager.actionedEvents);
|
||||
};
|
||||
export const actionedEventsRoute = [authUserCheck(), botRoute(), subredditRoute(), actionedEvents];
|
||||
|
||||
const action = async (req: Request, res: Response) => {
|
||||
const bot = req.serverBot;
|
||||
|
||||
@@ -221,7 +221,8 @@ const status = () => {
|
||||
}, cumRaw);
|
||||
const cacheReq = subManagerData.reduce((acc, curr) => acc + curr.stats.cache.totalRequests, 0);
|
||||
const cacheMiss = subManagerData.reduce((acc, curr) => acc + curr.stats.cache.totalMiss, 0);
|
||||
const aManagerWithDefaultResources = bot.subManagers.find(x => x.resources !== undefined && x.resources.cacheSettingsHash === 'default');
|
||||
const sharedSub = subManagerData.find(x => x.stats.cache.isShared);
|
||||
const sharedCount = sharedSub !== undefined ? sharedSub.stats.cache.currentKeyCount : 0;
|
||||
let allManagerData: any = {
|
||||
name: 'All',
|
||||
status: bot.running ? 'RUNNING' : 'NOT RUNNING',
|
||||
@@ -243,7 +244,7 @@ const status = () => {
|
||||
stats: {
|
||||
...rest,
|
||||
cache: {
|
||||
currentKeyCount: aManagerWithDefaultResources !== undefined ? await aManagerWithDefaultResources.resources.getCacheKeyCount() : 'N/A',
|
||||
currentKeyCount: sharedCount + subManagerData.reduce((acc, curr) => curr.stats.cache.isShared ? acc : acc + curr.stats.cache.currentKeyCount,0),
|
||||
isShared: false,
|
||||
totalRequests: cacheReq,
|
||||
totalMiss: cacheMiss,
|
||||
|
||||
@@ -22,7 +22,7 @@ import SimpleError from "../../Utils/SimpleError";
|
||||
import {heartbeat} from "./routes/authenticated/applicationRoutes";
|
||||
import logs from "./routes/authenticated/user/logs";
|
||||
import status from './routes/authenticated/user/status';
|
||||
import {actionRoute, configRoute} from "./routes/authenticated/user";
|
||||
import {actionedEventsRoute, actionRoute, configRoute} from "./routes/authenticated/user";
|
||||
import action from "./routes/authenticated/user/action";
|
||||
import {authUserCheck, botRoute} from "./middleware";
|
||||
import {opStats} from "../Common/util";
|
||||
@@ -189,6 +189,8 @@ const rcbServer = async function (options: OperatorConfig) {
|
||||
|
||||
server.getAsync('/config', ...configRoute);
|
||||
|
||||
server.getAsync('/events', ...actionedEventsRoute);
|
||||
|
||||
server.getAsync('/action', ...action);
|
||||
|
||||
server.getAsync('/check', ...actionRoute);
|
||||
|
||||
100
src/Web/assets/views/events.ejs
Normal file
100
src/Web/assets/views/events.ejs
Normal file
@@ -0,0 +1,100 @@
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/tailwindcss/2.0.3/tailwind.min.css"
|
||||
integrity="sha512-wl80ucxCRpLkfaCnbM88y4AxnutbGk327762eM9E/rRTvY/ZGAHWMZrYUq66VQBYMIYDFpDdJAOGSLyIPHZ2IQ=="
|
||||
crossorigin="anonymous"/>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/tailwindcss/2.0.3/tailwind-dark.min.css"
|
||||
integrity="sha512-WvyKyiVHgInX5UQt67447ExtRRZG/8GUijaq1MpqTNYp8wY4/EJOG5bI80sRp/5crDy4Z6bBUydZI2OFV3Vbtg=="
|
||||
crossorigin="anonymous"/>
|
||||
<script src="https://code.iconify.design/1/1.0.4/iconify.min.js"></script>
|
||||
<link rel="stylesheet" href="/public/themeToggle.css">
|
||||
<link rel="stylesheet" href="/public/app.css">
|
||||
<title><%= title %></title>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1.0">
|
||||
<!--icons from https://heroicons.com -->
|
||||
<style>
|
||||
.peek a {
|
||||
display: none;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body style="user-select: none;" class="">
|
||||
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
|
||||
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
|
||||
<%- include('partials/title') %>
|
||||
<div class="container mx-auto">
|
||||
<div class="grid">
|
||||
<div class="bg-white dark:bg-gray-500 dark:text-white px-3 py-6 space-y-3">
|
||||
<% if(data.length === 0) { %>
|
||||
No events have been actioned yet!
|
||||
<% } %>
|
||||
<% data.forEach(function (eRes){ %>
|
||||
<div class="shadow-lg">
|
||||
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 dark:bg-gray-700 dark:text-white">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<span class="peek"><%- eRes.activity.peek %></span><a target="_blank" href="https://reddit.com<%= eRes.activity.link%>">(Link)</a>
|
||||
</div>
|
||||
<div class="flex items-center flex-end">
|
||||
<%= eRes.timestamp %>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="p-4 pl-6 pt-3 space-y-2">
|
||||
<div><span class="font-semibold">Check:</span> <%= eRes.check %><span class="px-3">➔</span><%= eRes.ruleSummary %></div>
|
||||
<div>
|
||||
<span class="font-semibold">Rules:</span>
|
||||
<ul class="list-inside list-disc">
|
||||
<% eRes.ruleResults.forEach(function (ruleResult) { %>
|
||||
<li><%= ruleResult.name %> - <%= ruleResult.triggered%> - <%= ruleResult.result %></li>
|
||||
<% }) %>
|
||||
</ul>
|
||||
</div>
|
||||
<div><span class="font-semibold">Actions</span>
|
||||
<ul class="list-inside list-disc">
|
||||
<% eRes.actionResults.forEach(function (aRes) { %>
|
||||
<li><%= aRes.name %><%= aRes.dryRun %> - <%= aRes.result %></li>
|
||||
<% }) %>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<% }) %>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<%- include('partials/footer') %>
|
||||
</div>
|
||||
<script>
|
||||
document.querySelectorAll('.theme').forEach(el => {
|
||||
el.addEventListener('click', e => {
|
||||
e.preventDefault();
|
||||
if (e.target.id === 'dark') {
|
||||
document.body.classList.add('dark');
|
||||
localStorage.setItem('ms-dark', 'yes');
|
||||
} else {
|
||||
document.body.classList.remove('dark');
|
||||
localStorage.setItem('ms-dark', 'no');
|
||||
}
|
||||
document.querySelectorAll('.theme').forEach(el => {
|
||||
el.classList.remove('font-bold', 'no-underline', 'pointer-events-none');
|
||||
});
|
||||
e.target.classList.add('font-bold', 'no-underline', 'pointer-events-none');
|
||||
})
|
||||
})
|
||||
|
||||
document.querySelector("#themeToggle").checked = localStorage.getItem('ms-dark') !== 'no';
|
||||
document.querySelector("#themeToggle").onchange = (e) => {
|
||||
if (e.target.checked === true) {
|
||||
document.body.classList.add('dark');
|
||||
localStorage.setItem('ms-dark', 'yes');
|
||||
} else {
|
||||
document.body.classList.remove('dark');
|
||||
localStorage.setItem('ms-dark', 'no');
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -335,14 +335,19 @@
|
||||
<div data-subreddit="<%= data.name %>"
|
||||
class="stats botStats reloadStats">
|
||||
<label>Events</label>
|
||||
<span><%= data.stats.eventsCheckedSinceStartTotal === undefined ? '-' : data.stats.eventsCheckedSinceStartTotal %></span>
|
||||
|
||||
<span>
|
||||
<%= data.stats.eventsCheckedSinceStartTotal === undefined ? '-' : data.stats.eventsCheckedSinceStartTotal %>
|
||||
</span>
|
||||
<label>Checks</label>
|
||||
<span class="has-tooltip">
|
||||
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
|
||||
<span><%= data.stats.checksTriggeredSinceStartTotal %></span> Triggered / <span><%= data.stats.checksRunSinceStartTotal %></span> Run
|
||||
</span>
|
||||
<span><%= data.stats.checksTriggeredSinceStartTotal %></span> T / <span><%= data.stats.checksRunSinceStartTotal %></span> R
|
||||
<% if (data.name !== 'All') { %>
|
||||
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>&subreddit=<%= data.name %>" class="underline" style="text-decoration-style: dotted"><%= data.stats.checksTriggeredSinceStartTotal %> T</a>
|
||||
<% } else { %>
|
||||
<%= data.stats.checksTriggeredSinceStartTotal %> T
|
||||
<% } %>/ <span><%= data.stats.checksRunSinceStartTotal %></span> R
|
||||
</span>
|
||||
|
||||
<label>Rules</label>
|
||||
@@ -360,14 +365,19 @@
|
||||
<% } %>
|
||||
<div data-subreddit="<%= data.name %>" class="stats botStats allStats">
|
||||
<label>Events</label>
|
||||
<span><%= data.stats.eventsCheckedTotal %></span>
|
||||
|
||||
<span>
|
||||
<%= data.stats.eventsCheckedTotal %>
|
||||
</span>
|
||||
<label>Checks</label>
|
||||
<span class="has-tooltip">
|
||||
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
|
||||
<span><%= data.stats.checksTriggeredTotal %></span> Triggered / <span><%= data.stats.checksRunTotal %></span> Run
|
||||
</span>
|
||||
<span class="cursor-help"><span><%= data.stats.checksTriggeredTotal %></span> T / <span><%= data.stats.checksRunTotal %></span> R</span>
|
||||
<% if (data.name !== 'All') { %>
|
||||
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>&subreddit=<%= data.name %>" class="underline" style="text-decoration-style: dotted"><%= data.stats.checksTriggeredTotal %> T</a>
|
||||
<% } else { %>
|
||||
<%= data.stats.checksTriggeredTotal %> T
|
||||
<% } %>/ <span><%= data.stats.checksRunTotal %></span> R</span>
|
||||
</span>
|
||||
|
||||
<label>Rules</label>
|
||||
@@ -632,7 +642,7 @@
|
||||
const url = urlInput.value;
|
||||
const dryRun = dryRunCheck.checked ? 1 : 0;
|
||||
|
||||
const fetchUrl = `/api/check?instanceId=<%= instanceId %>%bot=${bot}&url=${url}&dryRun=${dryRun}&subreddit=${subreddit}`;
|
||||
const fetchUrl = `/api/check?instance=<%= instanceId %>&bot=${bot}&url=${url}&dryRun=${dryRun}&subreddit=${subreddit}`;
|
||||
fetch(fetchUrl);
|
||||
|
||||
urlInput.value = '';
|
||||
|
||||
2
src/Web/types/express/index.d.ts
vendored
2
src/Web/types/express/index.d.ts
vendored
@@ -1,6 +1,7 @@
|
||||
import {App} from "../../../App";
|
||||
import Bot from "../../../Bot";
|
||||
import {BotInstance, CMInstance} from "../../interfaces";
|
||||
import {Manager} from "../../../Subreddit/Manager";
|
||||
|
||||
declare global {
|
||||
declare namespace Express {
|
||||
@@ -10,6 +11,7 @@ declare global {
|
||||
instance?: CMInstance,
|
||||
bot?: BotInstance,
|
||||
serverBot: Bot,
|
||||
manager?: Manager,
|
||||
}
|
||||
interface User {
|
||||
name: string
|
||||
|
||||
@@ -20,7 +20,7 @@ import {App} from "./App";
|
||||
import apiServer from './Web/Server/server';
|
||||
import clientServer from './Web/Client';
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {COMMENT_URL_ID, parseLinkIdentifier, SUBMISSION_URL_ID} from "./util";
|
||||
import {COMMENT_URL_ID, isScopeError, parseLinkIdentifier, SUBMISSION_URL_ID} from "./util";
|
||||
import LoggedError from "./Utils/LoggedError";
|
||||
import {buildOperatorConfigWithDefaults, parseOperatorConfigFromSources} from "./ConfigBuilder";
|
||||
import {getLogger} from "./Utils/loggerFactory";
|
||||
@@ -197,11 +197,8 @@ const program = new Command();
|
||||
} catch (err) {
|
||||
if (!err.logged && !(err instanceof LoggedError)) {
|
||||
const logger = winston.loggers.get('app');
|
||||
if (err.name === 'StatusCodeError' && err.response !== undefined) {
|
||||
const authHeader = err.response.headers['www-authenticate'];
|
||||
if (authHeader !== undefined && authHeader.includes('insufficient_scope')) {
|
||||
logger.error('Reddit responded with a 403 insufficient_scope, did you choose the correct scopes?');
|
||||
}
|
||||
if(isScopeError(err)) {
|
||||
logger.error('Reddit responded with a 403 insufficient_scope which means the bot is lacking necessary OAUTH scopes to perform general actions.');
|
||||
}
|
||||
logger.error(err);
|
||||
}
|
||||
|
||||
66
src/util.ts
66
src/util.ts
@@ -28,6 +28,7 @@ import crypto from "crypto";
|
||||
import Autolinker from 'autolinker';
|
||||
import {create as createMemoryStore} from './Utils/memoryStore';
|
||||
import {MESSAGE} from "triple-beam";
|
||||
import {RedditUser} from "snoowrap/dist/objects";
|
||||
|
||||
const {format} = winston;
|
||||
const {combine, printf, timestamp, label, splat, errors} = format;
|
||||
@@ -824,7 +825,7 @@ export const isRedditMedia = (act: Submission): boolean => {
|
||||
}
|
||||
|
||||
export const isExternalUrlSubmission = (act: Comment | Submission): boolean => {
|
||||
return act instanceof Submission && !act.is_self && !isRedditMedia(act);
|
||||
return asSubmission(act) && !act.is_self && !isRedditMedia(act);
|
||||
}
|
||||
|
||||
export const parseRegex = (r: string | RegExp, val: string, flags?: string): RegExResult => {
|
||||
@@ -855,12 +856,19 @@ export const parseRegex = (r: string | RegExp, val: string, flags?: string): Reg
|
||||
}
|
||||
}
|
||||
|
||||
export async function readJson(path: string, opts: any) {
|
||||
export async function readConfigFile(path: string, opts: any) {
|
||||
const {log, throwOnNotFound = true} = opts;
|
||||
try {
|
||||
await promises.access(path, constants.R_OK);
|
||||
const data = await promises.readFile(path);
|
||||
return JSON.parse(data as unknown as string);
|
||||
const [configObj, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(data as unknown as string);
|
||||
if(configObj !== undefined) {
|
||||
return configObj as object;
|
||||
}
|
||||
log.error(`Could not parse wiki page contents as JSON or YAML:`);
|
||||
log.error(jsonErr);
|
||||
log.error(yamlErr);
|
||||
throw new SimpleError('Could not parse wiki page contents as JSON or YAML');
|
||||
} catch (e) {
|
||||
const {code} = e;
|
||||
if (code === 'ENOENT') {
|
||||
@@ -947,7 +955,7 @@ export const buildCacheOptionsFromProvider = (provider: CacheProvider | any): Ca
|
||||
}
|
||||
|
||||
export const createCacheManager = (options: CacheOptions): Cache => {
|
||||
const {store, max, ttl = 60, host = 'localhost', port, auth_pass, db} = options;
|
||||
const {store, max, ttl = 60, host = 'localhost', port, auth_pass, db, ...rest} = options;
|
||||
switch (store) {
|
||||
case 'none':
|
||||
return cacheManager.caching({store: 'none', max, ttl});
|
||||
@@ -958,7 +966,8 @@ export const createCacheManager = (options: CacheOptions): Cache => {
|
||||
port,
|
||||
auth_pass,
|
||||
db,
|
||||
ttl
|
||||
ttl,
|
||||
...rest,
|
||||
});
|
||||
case 'memory':
|
||||
default:
|
||||
@@ -984,3 +993,50 @@ export const snooLogWrapper = (logger: Logger) => {
|
||||
trace: (...args: any[]) => logger.debug(args.slice(0, 2).join(' '), [args.slice(2)]),
|
||||
}
|
||||
}
|
||||
|
||||
export const isScopeError = (err: any): boolean => {
|
||||
if(typeof err === 'object' && err.name === 'StatusCodeError' && err.response !== undefined) {
|
||||
const authHeader = err.response.headers['www-authenticate'];
|
||||
return authHeader !== undefined && authHeader.includes('insufficient_scope');
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cached activities lose type information when deserialized so need to check properties as well to see if the object is the shape of a Submission
|
||||
* */
|
||||
export const isSubmission = (value: any) => {
|
||||
return value instanceof Submission || value.domain !== undefined;
|
||||
}
|
||||
|
||||
export const asSubmission = (value: any): value is Submission => {
|
||||
return isSubmission(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialized activities store subreddit and user properties as their string representations (instead of proxy)
|
||||
* */
|
||||
export const getActivitySubredditName = (activity: any): string => {
|
||||
if(typeof activity.subreddit === 'string') {
|
||||
return activity.subreddit;
|
||||
}
|
||||
return activity.subreddit.display_name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialized activities store subreddit and user properties as their string representations (instead of proxy)
|
||||
* */
|
||||
export const getActivityAuthorName = (author: RedditUser | string): string => {
|
||||
if(typeof author === 'string') {
|
||||
return author;
|
||||
}
|
||||
return author.name;
|
||||
}
|
||||
|
||||
export const buildCachePrefix = (parts: any[]): string => {
|
||||
const prefix = parts.filter(x => typeof x === 'string' && x !== '').map(x => x.trim()).map(x => x.split(':')).flat().filter(x => x !== '').join(':')
|
||||
if(prefix !== '') {
|
||||
return `${prefix}:`;
|
||||
}
|
||||
return prefix;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user