Compare commits

...

37 Commits

Author SHA1 Message Date
FoxxMD
e2590e50f8 Merge branch 'edge' 2022-01-28 17:27:51 -05:00
FoxxMD
7e8745d226 fix(polling): Fix shared polling behavior for nanny mode changes
* On hard limit stop shared streams
* On nanny mode turned off restart any stopped shared streams
2022-01-27 16:49:03 -05:00
FoxxMD
e2efc85833 fix(polling): Fix running state not changed on error
* Set running to false when error is caught. Was not caught on last stream refactor which changed polling behavior to end if any error is caught rather than waiting for external source to clear interval
* Add debugging/error messages on polling start/stop
2022-01-27 16:47:43 -05:00
FoxxMD
41038b9bcd feat(logging): Implement richer errors everywhere
* Use ErrorWithCause so we can get and print a chain of error causes
* Make reddit error response in stack trace more readable by replacing them with a "translated" parent response and add them as the cause
* Properly handle error formatting for winston by looking at shape of log object for error rather than testing instanceof (see comments in errorAwareFormat)
* Fix formatting in web interface for log lines with white-space pre css and properly splitting timestamp from rest of the message
2022-01-27 16:27:03 -05:00
FoxxMD
9fe8c9568c refactor: Move SimpleError into main Errors module 2022-01-27 11:48:23 -05:00
FoxxMD
9614f7a209 refactor(logging): Implement snoowrap errors "the right way" and implement consolidated logging function
* Implement declaration file for snoowrap errors so they can be imported directly
* Implement logging function to handle boilerplate for known error responses (reddit HTTP response, rate limit, etc.)
2022-01-27 11:43:39 -05:00
FoxxMD
8dbaaf6798 fix(logging): Defaults for log file dir 2022-01-26 12:28:56 -05:00
FoxxMD
c14ad6cb76 feat(logging): Implement separate logging options for each transport type
* Add properties for file, console, and stream in logging object of operator config
* Each property inherits a (useful) subset of winston transport options
2022-01-26 12:09:03 -05:00
FoxxMD
adda280dd3 fix(logging): Fix parsing log dir
* Correct else condition to use log dir when value is not true
* Set level to 'debug' on init logger if no value is provided to help with debugging
2022-01-26 10:27:01 -05:00
FoxxMD
15fd47bdb4 fix(polling): Correct typings for stream getter and check isFinished for Listing 2022-01-26 10:11:06 -05:00
FoxxMD
78b6d8b7b6 feat(polling): Add debug messages when streams are stopped 2022-01-26 10:00:09 -05:00
FoxxMD
61bc63ccc5 fix(polling): Emit config change event to bot only after manager has rebuilt polling 2022-01-26 09:50:30 -05:00
FoxxMD
05df8b7fe2 fix(polling): Use manager eventState to control shared stream callback rather than removing callback when events are stopped
Should prevent edge cases where shared streams are re-parsed while managers are stopped (hard limit) and then removed due to there being no callbacks
2022-01-25 18:07:15 -05:00
FoxxMD
3cb7dffb90 fix(polling): Prevent endless loop when trying to enforce continuity on a stream with no items returned 2022-01-25 09:25:59 -05:00
FoxxMD
d0aafc34b9 feat(remove): Add option to mark activity as spam 2022-01-21 13:03:05 -05:00
FoxxMD
d2e1b5019f chore: Update packages 2022-01-21 13:02:31 -05:00
FoxxMD
aaed0d3419 Merge branch 'edge' 2022-01-21 10:46:11 -05:00
FoxxMD
2a77c71645 fix(usernotes): Fix wiki entity handling to avoid unhandled rejection
Since snoowrap's WikiPage isn't a "real" object setting it as a property on the class means if it rejects the whole application crashes. Fix this by building wiki proxy every time we need it before awaiting promise for edit/retrieval so that promise scope is bound to the function we are in (that has try-catch)
2022-01-20 14:10:39 -05:00
FoxxMD
780e5c185e refactor(author filter): Strongly structure comparison/matching data for more consistent manipulation and output
* Use interface for comparison results at both criteria property level and criteria level
* Implement summary functions to build string results of comparisons
* Output all comparisons to debug and provide summaries to verbose (when applicable)
2022-01-20 14:08:54 -05:00
FoxxMD
38e2a4e69a fix(filter): Missing return on flair failure comparison 2022-01-19 15:49:44 -05:00
FoxxMD
7e0c34b6a3 fix(userflair): Fix wrong assignment for css 2022-01-19 13:10:11 -05:00
FoxxMD
e3ceb90d6f fix(filter): Fix default excludeCondition type
* Expected (prior) behavior is that all exclude criteria must pass, not just one
* Fix missing AND condition logic when all conditions pass
2022-01-19 13:09:45 -05:00
FoxxMD
6977e3bcdf feat(author): Add flair template criteria for author/submission
* Add filtering by flairTemplate id for author/submission
* Refactor flair properties for author/submission to accept string or array of strings
2022-01-19 12:48:58 -05:00
FoxxMD
f382cddc2a fix(filter): Change array merging behavior for authorIs defaults to be more sane
* Don't just overwrite (duh)
* Drop any default filters that include object keys that are also present in user-defined filters -- this way user-defined always takes precedence on merge
2022-01-19 11:52:18 -05:00
FoxxMD
99a5642bdf fix(ui): Change time formatting from 12 to 24 hour 2022-01-18 16:49:07 -05:00
FoxxMD
174d832ab0 docs: Pretty up readme header 2022-01-18 16:08:40 -05:00
FoxxMD
3ee7586fe2 fix(approve): Fix touched entity 2022-01-18 13:37:56 -05:00
FoxxMD
e2c724b4ae feat(approve): Implement approving parent submission of comment 2022-01-18 13:37:22 -05:00
FoxxMD
d581f19a36 feat(logs): Use log objects in api to improve parsing client-side
* Add options for /logs endpoint to stream objects instead of strings
* Always return log objects from /status endpoint -- fixes bug where all bots/subreddits got lines from logs that had newlines
* Return context-aware, formatted log lines to client to reduce line length IE if returning to botA -> subA then do not need to include labels for botA,subA #40
* Shorten timestamp to just time and wrap full timestamp in tooltip #40
* Emit log objects to client to reduce parsing complexity (don't have to regex for bot/subreddit name)
2022-01-18 12:59:59 -05:00
FoxxMD
48dea24bea feat: Improve first-run display in ui and add system view
* Fix bugs in UI when bot does not have a name (configured incorrectly)
* Implement instance system log view for operators
2022-01-18 10:38:39 -05:00
FoxxMD
5fc2a693a0 fix(config): Fix empty yaml config document initialization 2022-01-18 00:06:52 -05:00
FoxxMD
7be0722140 fix(bot): Fix limit rate expiration getter when there is no client initialized 2022-01-18 00:06:24 -05:00
FoxxMD
6ab9fe4bf4 feat(config): Implement persisting bots from invite process to application and config
* write to config when bot is added
* replace/add based on existing bot
* implement specify instance from instances user is operator of
* implement specify subreddits to run on using comma-separated list
* rewrite invite flow ending to be more clear on results and next steps
2022-01-17 17:47:27 -05:00
FoxxMD
5811af0342 feat(config): Refactor config parsing to preserve comments and enable writing
* use node-comment and yaml@next to keep comment information intact
* store ast/source version of parsed config for operator
* implement generic yaml/json operator config classes to keep everything organized and simplify marshalling source to js/string
* refactor file parsing and json/yaml parsing to have better single responsibility
2022-01-17 15:51:43 -05:00
FoxxMD
ed2924264a feat(util): Better check for file/dir permissions 2022-01-17 11:18:23 -05:00
FoxxMD
e9394ccf2e refactor(tooling): Ignore sqlite files 2022-01-17 09:52:18 -05:00
FoxxMD
dec72f95c6 docs: Add discord invite link 2022-01-14 16:42:01 -05:00
53 changed files with 3008 additions and 1151 deletions

9
.gitignore vendored
View File

@@ -383,3 +383,12 @@ dist
**/src/**/*.js
!src/Web/assets/public/yaml/*
**/src/**/*.map
/**/*.sqlite
/**/*.bak
*.yaml
*.json5
!src/Schema/*.json
!docs/**/*.json5
!docs/**/*.yaml
!docs/**/*.json

View File

@@ -1,6 +1,7 @@
[![Latest Release](https://img.shields.io/github/v/release/foxxmd/context-mod)](https://github.com/FoxxMD/context-mod/releases)
[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
[![Docker Pulls](https://img.shields.io/docker/pulls/foxxmd/context-mod)](https://hub.docker.com/r/foxxmd/context-mod)
# ContextMod [![Latest Release](https://img.shields.io/github/v/release/foxxmd/context-mod)](https://github.com/FoxxMD/context-mod/releases) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![Docker Pulls](https://img.shields.io/docker/pulls/foxxmd/context-mod)](https://hub.docker.com/r/foxxmd/context-mod)
<img src="/docs/logo.png" align="right"
alt="ContextMod logo" width="180" height="176">
**Context Mod** (CM) is an event-based, [reddit](https://reddit.com) moderation bot built on top of [snoowrap](https://github.com/not-an-aardvark/snoowrap) and written in [typescript](https://www.typescriptlang.org/).

BIN
docs/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

867
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -35,6 +35,7 @@
"cache-manager": "^3.4.4",
"cache-manager-redis-store": "^2.0.0",
"commander": "^8.0.0",
"comment-json": "^4.1.1",
"cookie-parser": "^1.3.5",
"dayjs": "^1.10.5",
"deepmerge": "^4.2.2",
@@ -52,7 +53,6 @@
"he": "^1.2.0",
"http-proxy": "^1.18.1",
"image-size": "^1.0.0",
"js-yaml": "^4.1.0",
"json5": "^2.2.0",
"jsonwebtoken": "^8.5.1",
"leven": "^3.1.0",
@@ -68,6 +68,7 @@
"passport-custom": "^1.1.1",
"passport-jwt": "^4.0.0",
"pixelmatch": "^5.2.1",
"pony-cause": "^1.1.1",
"pretty-print-json": "^1.0.3",
"safe-stable-stringify": "^1.1.1",
"snoostorm": "^1.5.2",
@@ -82,6 +83,7 @@
"winston-daily-rotate-file": "^4.5.5",
"winston-duplex": "^0.1.1",
"winston-transport": "^4.4.0",
"yaml": "2.0.0-10",
"zlib": "^1.0.5"
},
"devDependencies": {
@@ -111,8 +113,9 @@
"@types/string-similarity": "^4.0.0",
"@types/tcp-port-used": "^1.0.0",
"@types/triple-beam": "^1.3.2",
"ts-essentials": "^9.1.2",
"ts-json-schema-generator": "^0.93.0",
"typescript-json-schema": "^0.50.1"
"typescript-json-schema": "~0.53"
},
"optionalDependencies": {
"sharp": "^0.29.1"

View File

@@ -1,31 +1,62 @@
import {ActionJson, ActionConfig} from "./index";
import {ActionJson, ActionConfig, ActionOptions} from "./index";
import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import Snoowrap from "snoowrap";
import {RuleResult} from "../Rule";
import {ActionProcessResult} from "../Common/interfaces";
import Submission from "snoowrap/dist/objects/Submission";
import Comment from "snoowrap/dist/objects/Comment";
export class ApproveAction extends Action {
targets: ApproveTarget[]
getKind() {
return 'Approve';
}
constructor(options: ApproveOptions) {
super(options);
const {
targets = ['self']
} = options;
this.targets = targets;
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
const touchedEntities = [];
//snoowrap typing issue, thinks comments can't be locked
// @ts-ignore
if (item.approved) {
this.logger.warn('Item is already approved');
return {
dryRun,
success: false,
result: 'Item is already approved'
const realTargets = item instanceof Submission ? ['self'] : this.targets;
for(const target of realTargets) {
let targetItem = item;
if(target !== 'self' && item instanceof Comment) {
targetItem = await this.resources.getActivity(this.client.getSubmission(item.link_id));
}
// @ts-ignore
if (item.approved) {
const msg = `${target === 'self' ? 'Item' : 'Comment\'s parent Submission'} is already approved`;
this.logger.warn(msg);
return {
dryRun,
success: false,
result: msg
}
}
if (!dryRun) {
// make sure we have an actual item and not just a plain object from cache
if(target !== 'self' && !(targetItem instanceof Submission)) {
// @ts-ignore
targetItem = await this.client.getSubmission((item as Comment).link_id).fetch();
}
// @ts-ignore
touchedEntities.push(await targetItem.approve());
}
}
if (!dryRun) {
// @ts-ignore
touchedEntities.push(await item.approve());
}
return {
dryRun,
success: true,
@@ -34,8 +65,20 @@ export class ApproveAction extends Action {
}
}
export interface ApproveActionConfig extends ActionConfig {
export type ApproveTarget = 'self' | 'parent';
export interface ApproveOptions extends ApproveActionConfig, ActionOptions {}
export interface ApproveActionConfig extends ActionConfig {
/**
* Specify which Activities to approve
*
* This setting is only applicable if the Activity being acted on is a **comment**. On a **submission** the setting does nothing
*
* * self => approve activity being checked (comment)
* * parent => approve parent (submission) of activity being checked (comment)
* */
targets?: ApproveTarget[]
}
/**

View File

@@ -12,7 +12,8 @@ import {
REDDIT_ENTITY_REGEX_URL,
truncateStringToLength
} from "../util";
import SimpleError from "../Utils/SimpleError";
import {SimpleError} from "../Utils/Errors";
import {ErrorWithCause} from "pony-cause";
export class MessageAction extends Action {
content: string;
@@ -65,10 +66,7 @@ export class MessageAction extends Action {
recipient = `/r/${entityData.name}`;
}
} catch (err: any) {
this.logger.error(`'to' field for message was not in a valid format. See ${REDDIT_ENTITY_REGEX_URL} for valid examples`);
this.logger.error(err);
err.logged = true;
throw err;
throw new ErrorWithCause(`'to' field for message was not in a valid format. See ${REDDIT_ENTITY_REGEX_URL} for valid examples`, {cause: err});
}
if(recipient.includes('/r/') && this.asSubreddit) {
throw new SimpleError(`Cannot send a message as a subreddit to another subreddit. Requested recipient: ${recipient}`);

View File

@@ -1,4 +1,4 @@
import {ActionJson, ActionConfig} from "./index";
import {ActionJson, ActionConfig, ActionOptions} from "./index";
import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../Rule";
@@ -6,10 +6,20 @@ import {activityIsRemoved} from "../Utils/SnoowrapUtils";
import {ActionProcessResult} from "../Common/interfaces";
export class RemoveAction extends Action {
spam: boolean;
getKind() {
return 'Remove';
}
constructor(options: RemoveOptions) {
super(options);
const {
spam = false,
} = options;
this.spam = spam;
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
const touchedEntities = [];
@@ -22,9 +32,12 @@ export class RemoveAction extends Action {
result: 'Item is already removed',
}
}
if (this.spam) {
this.logger.verbose('Marking as spam on removal');
}
if (!dryRun) {
// @ts-ignore
await item.remove();
await item.remove({spam: this.spam});
touchedEntities.push(item);
}
@@ -36,13 +49,16 @@ export class RemoveAction extends Action {
}
}
export interface RemoveActionConfig extends ActionConfig {
export interface RemoveOptions extends RemoveActionConfig, ActionOptions {
}
export interface RemoveActionConfig extends ActionConfig {
spam?: boolean
}
/**
* Remove the Activity
* */
export interface RemoveActionJson extends RemoveActionConfig, ActionJson {
kind: 'remove'
kind: 'remove'
}

View File

@@ -12,7 +12,7 @@ export class UserFlairAction extends Action {
super(options);
this.text = options.text === null || options.text === '' ? undefined : options.text;
this.css = options.css === null || options.text === '' ? undefined : options.text;
this.css = options.css === null || options.css === '' ? undefined : options.css;
this.flair_template_id = options.flair_template_id === null || options.flair_template_id === '' ? undefined : options.flair_template_id;
}

View File

@@ -1,7 +1,7 @@
import winston, {Logger} from "winston";
import dayjs, {Dayjs} from "dayjs";
import {getLogger} from "./Utils/loggerFactory";
import {Invokee, OperatorConfig} from "./Common/interfaces";
import {Invokee, OperatorConfig, OperatorConfigWithFileContext, OperatorFileConfig} from "./Common/interfaces";
import Bot from "./Bot";
import LoggedError from "./Utils/LoggedError";
import {sleep} from "./util";
@@ -14,7 +14,10 @@ export class App {
error: any;
constructor(config: OperatorConfig) {
config: OperatorConfig;
fileConfig: OperatorFileConfig;
constructor(config: OperatorConfigWithFileContext) {
const {
operator: {
name,
@@ -23,6 +26,11 @@ export class App {
bots = [],
} = config;
const {fileConfig, ...rest} = config;
this.config = rest;
this.fileConfig = fileConfig;
this.logger = getLogger(config.logging);
this.logger.info(`Operators: ${name.length === 0 ? 'None Specified' : name.join(', ')}`)

View File

@@ -46,15 +46,20 @@ export interface AuthorCriteria {
* */
name?: string[],
/**
* A list of (user) flair css class values from the subreddit to match against
* A (user) flair css class (or list of) from the subreddit to match against
* @examples ["red"]
* */
flairCssClass?: string[],
flairCssClass?: string | string[],
/**
* A list of (user) flair text values from the subreddit to match against
* A (user) flair text value (or list of) from the subreddit to match against
* @examples ["Approved"]
* */
flairText?: string[],
flairText?: string | string[],
/**
* A (user) flair template id (or list of) from the subreddit to match against
* */
flairTemplate?: string | string[]
/**
* Is the author a moderator?
* */
@@ -146,8 +151,12 @@ export class Author implements AuthorCriteria {
constructor(options: AuthorCriteria) {
this.name = options.name;
this.flairCssClass = options.flairCssClass;
this.flairText = options.flairText;
if(options.flairCssClass !== undefined) {
this.flairCssClass = typeof options.flairCssClass === 'string' ? [options.flairCssClass] : options.flairCssClass;
}
if(options.flairText !== undefined) {
this.flairText = typeof options.flairText === 'string' ? [options.flairText] : options.flairText;
}
this.isMod = options.isMod;
this.userNotes = options.userNotes;
this.age = options.age;

View File

@@ -16,10 +16,10 @@ import {
} from "../Common/interfaces";
import {
createRetryHandler,
formatNumber,
formatNumber, getExceptionMessage,
mergeArr,
parseBool,
parseDuration,
parseDuration, parseMatchMessage,
parseSubredditName, RetryOptions,
sleep,
snooLogWrapper
@@ -30,8 +30,8 @@ import {CommentStream, ModQueueStream, SPoll, SubmissionStream, UnmoderatedStrea
import {BotResourcesManager} from "../Subreddit/SubredditResources";
import LoggedError from "../Utils/LoggedError";
import pEvent from "p-event";
import SimpleError from "../Utils/SimpleError";
import {isRateLimitError, isStatusError} from "../Utils/Errors";
import {SimpleError, isRateLimitError, isRequestError, isScopeError, isStatusError, CMError} from "../Utils/Errors";
import {ErrorWithCause} from "pony-cause";
class Bot {
@@ -234,10 +234,9 @@ class Bot {
}
createSharedStreamErrorListener = (name: string) => async (err: any) => {
this.logger.error(`Polling error occurred on stream ${name.toUpperCase()}`, err);
const shouldRetry = await this.sharedStreamRetryHandler(err);
if(shouldRetry) {
(this.cacheManager.modStreams.get(name) as SPoll<any>).startInterval(false);
(this.cacheManager.modStreams.get(name) as SPoll<any>).startInterval(false, 'Within retry limits');
} else {
for(const m of this.subManagers) {
if(m.sharedStreamCallbacks.size > 0) {
@@ -255,7 +254,7 @@ class Bot {
return;
}
for(const i of listing) {
const foundManager = this.subManagers.find(x => x.subreddit.display_name === i.subreddit.display_name && x.sharedStreamCallbacks.get(name) !== undefined);
const foundManager = this.subManagers.find(x => x.subreddit.display_name === i.subreddit.display_name && x.sharedStreamCallbacks.get(name) !== undefined && x.eventsState.state === RUNNING);
if(foundManager !== undefined) {
foundManager.sharedStreamCallbacks.get(name)(i);
if(this.stagger !== undefined) {
@@ -280,22 +279,16 @@ class Bot {
if (initial) {
this.logger.error('An error occurred while trying to initialize the Reddit API Client which would prevent the entire application from running.');
}
if (err.name === 'StatusCodeError') {
const authHeader = err.response.headers['www-authenticate'];
if (authHeader !== undefined && authHeader.includes('insufficient_scope')) {
this.logger.error('Reddit responded with a 403 insufficient_scope. Please ensure you have chosen the correct scopes when authorizing your account.');
} else if (err.statusCode === 401) {
this.logger.error('It is likely a credential is missing or incorrect. Check clientId, clientSecret, refreshToken, and accessToken');
} else if(err.statusCode === 400) {
this.logger.error('Credentials may have been invalidated due to prior behavior. The error message may contain more information.');
}
this.logger.error(`Error Message: ${err.message}`);
} else {
this.logger.error(err);
}
this.error = `Error occurred while testing Reddit API client: ${err.message}`;
err.logged = true;
throw err;
const hint = getExceptionMessage(err, {
401: 'Likely a credential is missing or incorrect. Check clientId, clientSecret, refreshToken, and accessToken',
400: 'Credentials may have been invalidated manually or by reddit due to behavior',
});
let msg = `Error occurred while testing Reddit API client${hint !== undefined ? `: ${hint}` : ''}`;
this.error = msg;
const clientError = new CMError(msg, {cause: err});
clientError.logged = true;
this.logger.error(clientError);
throw clientError;
}
}
@@ -375,7 +368,7 @@ class Bot {
let processed;
if (stream !== undefined) {
this.logger.info('Restarting SHARED COMMENT STREAM due to a subreddit config change');
stream.end();
stream.end('Replacing with a new stream with updated subreddits');
processed = stream.processed;
}
if (sharedCommentsSubreddits.length > 100) {
@@ -397,7 +390,7 @@ class Bot {
} else {
const stream = this.cacheManager.modStreams.get('newComm');
if (stream !== undefined) {
stream.end();
stream.end('Determined no managers are listening on shared stream parsing');
}
}
@@ -408,7 +401,7 @@ class Bot {
let processed;
if (stream !== undefined) {
this.logger.info('Restarting SHARED SUBMISSION STREAM due to a subreddit config change');
stream.end();
stream.end('Replacing with a new stream with updated subreddits');
processed = stream.processed;
}
if (sharedSubmissionsSubreddits.length > 100) {
@@ -430,7 +423,7 @@ class Bot {
} else {
const stream = this.cacheManager.modStreams.get('newSub');
if (stream !== undefined) {
stream.end();
stream.end('Determined no managers are listening on shared stream parsing');
}
}
@@ -448,7 +441,7 @@ class Bot {
defaultUnmoderatedStream.on('listing', this.createSharedStreamListingListener('unmoderated'));
this.cacheManager.modStreams.set('unmoderated', defaultUnmoderatedStream);
} else if (!isUnmoderatedShared && unmoderatedstream !== undefined) {
unmoderatedstream.end();
unmoderatedstream.end('Determined no managers are listening on shared stream parsing');
}
const isModqueueShared = !this.sharedStreams.includes('modqueue') ? false : this.subManagers.some(x => x.isPollingShared('modqueue'));
@@ -465,7 +458,7 @@ class Bot {
defaultModqueueStream.on('listing', this.createSharedStreamListingListener('modqueue'));
this.cacheManager.modStreams.set('modqueue', defaultModqueueStream);
} else if (isModqueueShared && modqueuestream !== undefined) {
modqueuestream.end();
modqueuestream.end('Determined no managers are listening on shared stream parsing');
}
}
@@ -473,10 +466,12 @@ class Bot {
try {
await manager.parseConfiguration('system', true, {suppressNotification: true, suppressChangeEvent: true});
} catch (err: any) {
if (!(err instanceof LoggedError)) {
this.logger.error(`Config was not valid:`, {subreddit: manager.subreddit.display_name_prefixed});
this.logger.error(err, {subreddit: manager.subreddit.display_name_prefixed});
err.logged = true;
if(err.logged !== true) {
const normalizedError = new ErrorWithCause(`Bot could not start manager because config was not valid`, {cause: err});
// @ts-ignore
this.logger.error(normalizedError, {subreddit: manager.subreddit.display_name_prefixed});
} else {
this.logger.error('Bot could not start manager because config was not valid', {subreddit: manager.subreddit.display_name_prefixed});
}
}
}
@@ -666,9 +661,11 @@ class Bot {
}
}
} catch (err: any) {
this.logger.info('Stopping event polling to prevent activity processing queue from backing up. Will be restarted when config update succeeds.')
await s.stopEvents('system', {reason: 'Invalid config will cause events to pile up in queue. Will be restarted when config update succeeds (next heartbeat).'});
if(!(err instanceof LoggedError)) {
if(s.eventsState.state === RUNNING) {
this.logger.info('Stopping event polling to prevent activity processing queue from backing up. Will be restarted when config update succeeds.')
await s.stopEvents('system', {reason: 'Invalid config will cause events to pile up in queue. Will be restarted when config update succeeds (next heartbeat).'});
}
if(err.logged !== true) {
this.logger.error(err, {subreddit: s.displayLabel});
}
if(this.nextHeartbeat !== undefined) {
@@ -744,6 +741,10 @@ class Bot {
m.notificationManager.handle('runStateChanged', 'Hard Limit Triggered', `Hard Limit of ${this.hardLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit event polling has been paused.`, 'system', 'warn');
}
for(const [k,v] of this.cacheManager.modStreams) {
v.end('Hard limit cutoff');
}
this.nannyMode = 'hard';
return;
}
@@ -810,6 +811,7 @@ class Bot {
await m.startEvents('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
}
await this.runSharedStreams(true);
this.nannyMode = undefined;
}

View File

@@ -31,6 +31,8 @@ import {ActionObjectJson, RuleJson, RuleObjectJson, ActionJson as ActionTypeJson
import {checkAuthorFilter, SubredditResources} from "../Subreddit/SubredditResources";
import {Author, AuthorCriteria, AuthorOptions} from '..';
import {ExtendedSnoowrap} from '../Utils/SnoowrapClients';
import {isRateLimitError} from "../Utils/Errors";
import {ErrorWithCause} from "pony-cause";
const checkLogName = truncateStringToLength(25);
@@ -66,7 +68,7 @@ export abstract class Check implements ICheck {
itemIs = [],
authorIs: {
include = [],
excludeCondition = 'OR',
excludeCondition,
exclude = [],
} = {},
dryRun,
@@ -248,9 +250,7 @@ export abstract class Check implements ICheck {
this.logger.info(`${PASS} => Rules: ${resultsSummary(allResults, this.condition)}`);
return [true, allRuleResults];
} catch (e: any) {
e.logged = true;
this.logger.warn(`Running rules failed due to uncaught exception`, e);
throw e;
throw new ErrorWithCause('Running rules failed due to error', {cause: e});
}
}

View File

@@ -0,0 +1,27 @@
import {ConfigFormat} from "../types";
export interface ConfigDocumentInterface<DocumentType> {
format: ConfigFormat;
parsed: DocumentType
//parsingError: Error | string;
raw: string;
location?: string;
toString(): string;
toJS(): object;
}
abstract class AbstractConfigDocument<DocumentType> implements ConfigDocumentInterface<DocumentType> {
public abstract format: ConfigFormat;
public abstract parsed: DocumentType;
//public abstract parsingError: Error | string;
constructor(public raw: string, public location?: string) {
}
public abstract toString(): string;
public abstract toJS(): object;
}
export default AbstractConfigDocument;

View File

@@ -0,0 +1,30 @@
import AbstractConfigDocument from "./AbstractConfigDocument";
import {stringify, parse} from 'comment-json';
import JSON5 from 'json5';
import {ConfigFormat} from "../types";
import {OperatorJsonConfig} from "../interfaces";
class JsonConfigDocument extends AbstractConfigDocument<OperatorJsonConfig> {
public parsed: OperatorJsonConfig;
protected cleanParsed: OperatorJsonConfig;
public format: ConfigFormat;
public constructor(raw: string, location?: string) {
super(raw, location);
this.parsed = parse(raw);
this.cleanParsed = JSON5.parse(raw);
this.format = 'json';
}
public toJS(): OperatorJsonConfig {
return this.cleanParsed;
}
public toString(): string {
return stringify(this.parsed, null, 1);
}
}
export default JsonConfigDocument;

View File

@@ -0,0 +1,54 @@
import YamlConfigDocument from "../YamlConfigDocument";
import JsonConfigDocument from "../JsonConfigDocument";
import {YAMLMap, YAMLSeq} from "yaml";
import {BotInstanceJsonConfig, OperatorJsonConfig} from "../../interfaces";
import {assign} from 'comment-json';
export interface OperatorConfigDocumentInterface {
addBot(botData: BotInstanceJsonConfig): void;
toJS(): OperatorJsonConfig;
}
export class YamlOperatorConfigDocument extends YamlConfigDocument implements OperatorConfigDocumentInterface {
addBot(botData: BotInstanceJsonConfig) {
const bots = this.parsed.get('bots') as YAMLSeq;
if (bots === undefined) {
this.parsed.add({key: 'bots', value: [botData]});
} else if (botData.name !== undefined) {
// overwrite if we find an existing
const existingIndex = bots.items.findIndex(x => (x as YAMLMap).get('name') === botData.name);
if (existingIndex !== -1) {
this.parsed.setIn(['bots', existingIndex], botData);
} else {
this.parsed.addIn(['bots'], botData);
}
} else {
this.parsed.addIn(['bots'], botData);
}
}
toJS(): OperatorJsonConfig {
return super.toJS();
}
}
export class JsonOperatorConfigDocument extends JsonConfigDocument implements OperatorConfigDocumentInterface {
addBot(botData: BotInstanceJsonConfig) {
if (this.parsed.bots === undefined) {
this.parsed.bots = [botData];
} else if (botData.name !== undefined) {
const existingIndex = this.parsed.bots.findIndex(x => x.name === botData.name);
if (existingIndex !== -1) {
this.parsed.bots[existingIndex] = assign(this.parsed.bots[existingIndex], botData);
} else {
this.parsed.bots.push(botData);
}
} else {
this.parsed.bots.push(botData);
}
}
toJS(): OperatorJsonConfig {
return super.toJS();
}
}

View File

@@ -0,0 +1,24 @@
import AbstractConfigDocument from "./AbstractConfigDocument";
import {Document, parseDocument} from 'yaml';
import {ConfigFormat} from "../types";
class YamlConfigDocument extends AbstractConfigDocument<Document> {
public parsed: Document;
public format: ConfigFormat;
public constructor(raw: string, location?: string) {
super(raw, location);
this.parsed = parseDocument(raw);
this.format = 'yaml';
}
public toJS(): object {
return this.parsed.toJS();
}
public toString(): string {
return this.parsed.toString();
}
}
export default YamlConfigDocument;

View File

@@ -3,9 +3,9 @@ import {Submission} from "snoowrap/dist/objects";
import {URL} from "url";
import {absPercentDifference, getSharpAsync, isValidImageURL} from "../util";
import sizeOf from "image-size";
import SimpleError from "../Utils/SimpleError";
import {Sharp} from "sharp";
import {blockhash} from "./blockhash/blockhash";
import {SimpleError} from "../Utils/Errors";
export interface ImageDataOptions {
width?: number,

View File

@@ -8,7 +8,14 @@ import {IncomingMessage} from "http";
import Submission from "snoowrap/dist/objects/Submission";
import Comment from "snoowrap/dist/objects/Comment";
import RedditUser from "snoowrap/dist/objects/RedditUser";
import {AuthorOptions} from "../Author/Author";
import {AuthorCriteria, AuthorOptions} from "../Author/Author";
import {ConfigFormat} from "./types";
import AbstractConfigDocument, {ConfigDocumentInterface} from "./Config/AbstractConfigDocument";
import {Document as YamlDocument} from 'yaml';
import {JsonOperatorConfigDocument, YamlOperatorConfigDocument} from "./Config/Operator";
import {ConsoleTransportOptions} from "winston/lib/winston/transports";
import {DailyRotateFileTransportOptions} from "winston-daily-rotate-file";
import {DuplexTransportOptions} from "winston-duplex/dist/DuplexTransport";
/**
* An ISO 8601 Duration
@@ -939,8 +946,9 @@ export interface SubmissionState extends ActivityState {
* */
title?: string
link_flair_text?: string
link_flair_css_class?: string
link_flair_text?: string | string[]
link_flair_css_class?: string | string[]
flairTemplate?: string | string[]
}
// properties calculated/derived by CM -- not provided as plain values by reddit
@@ -1066,6 +1074,92 @@ export interface RegExResult {
}
type LogLevel = "error" | "warn" | "info" | "verbose" | "debug";
export type LogConsoleOptions = Pick<ConsoleTransportOptions, 'silent' | 'eol' | 'stderrLevels' | 'consoleWarnLevels'> & {
level?: LogLevel
}
export type LogFileOptions = Omit<DailyRotateFileTransportOptions, 'stream' | 'handleRejections' | 'options' | 'handleExceptions' | 'format' | 'log' | 'logv' | 'close' | 'dirname'> & {
level?: LogLevel
/**
* The absolute path to a directory where rotating log files should be stored.
*
* * If not present or `null` or `false` no log files will be created
* * If `true` logs will be stored at `[working directory]/logs`
*
* * ENV => `LOG_DIR`
* * ARG => `--logDir [dir]`
*
* @examples ["/var/log/contextmod"]
* */
dirname?: string | boolean | null
}
// export type StrongFileOptions = LogFileOptions & {
// dirname?: string
// }
export type LogStreamOptions = Omit<DuplexTransportOptions, 'name' | 'stream' | 'handleRejections' | 'handleExceptions' | 'format' | 'log' | 'logv' | 'close'> & {
level?: LogLevel
}
export interface LoggingOptions {
/**
* The minimum log level to output. The log level set will output logs at its level **and all levels above it:**
*
* * `error`
* * `warn`
* * `info`
* * `verbose`
* * `debug`
*
* Note: `verbose` will display *a lot* of information on the status/result of run rules/checks/actions etc. which is very useful for testing configurations. Once your bot is stable changing the level to `info` will reduce log noise.
*
* * ENV => `LOG_LEVEL`
* * ARG => `--logLevel <level>`
*
* @default "verbose"
* @examples ["verbose"]
* */
level?: LogLevel,
/**
* **DEPRECATED** - Use `file.dirname` instead
* The absolute path to a directory where rotating log files should be stored.
*
* * If not present or `null` or `false` no log files will be created
* * If `true` logs will be stored at `[working directory]/logs`
*
* * ENV => `LOG_DIR`
* * ARG => `--logDir [dir]`
*
* @examples ["/var/log/contextmod"]
* @deprecated
* @see logging.file.dirname
* */
path?: string | boolean | null
/**
* Options for Rotating File logging
* */
file?: LogFileOptions
/**
* Options for logging to api/web
* */
stream?: LogStreamOptions
/**
* Options for logging to console
* */
console?: LogConsoleOptions
}
export type StrongLoggingOptions = Required<Pick<LoggingOptions, 'stream' | 'console' | 'file'>> & {
level?: LogLevel
};
export type LoggerFactoryOptions = StrongLoggingOptions & {
additionalTransports?: any[]
defaultLabel?: string
}
/**
* Available cache providers
* */
@@ -1576,38 +1670,7 @@ export interface OperatorJsonConfig {
/**
* Settings to configure global logging defaults
* */
logging?: {
/**
* The minimum log level to output. The log level set will output logs at its level **and all levels above it:**
*
* * `error`
* * `warn`
* * `info`
* * `verbose`
* * `debug`
*
* Note: `verbose` will display *a lot* of information on the status/result of run rules/checks/actions etc. which is very useful for testing configurations. Once your bot is stable changing the level to `info` will reduce log noise.
*
* * ENV => `LOG_LEVEL`
* * ARG => `--logLevel <level>`
*
* @default "verbose"
* @examples ["verbose"]
* */
level?: LogLevel,
/**
* The absolute path to a directory where rotating log files should be stored.
*
* * If not present or `null` no log files will be created
* * If `true` logs will be stored at `[working directory]/logs`
*
* * ENV => `LOG_DIR`
* * ARG => `--logDir [dir]`
*
* @examples ["/var/log/contextmod"]
* */
path?: string,
},
logging?: LoggingOptions,
/**
* Settings to configure the default caching behavior globally
@@ -1807,10 +1870,7 @@ export interface OperatorConfig extends OperatorJsonConfig {
display?: string,
},
notifications?: NotificationConfig
logging: {
level: LogLevel,
path?: string,
},
logging: StrongLoggingOptions,
caching: StrongCache,
web: {
port: number,
@@ -1837,6 +1897,15 @@ export interface OperatorConfig extends OperatorJsonConfig {
credentials: ThirdPartyCredentialsJsonConfig
}
export interface OperatorFileConfig {
document: YamlOperatorConfigDocument | JsonOperatorConfigDocument
isWriteable?: boolean
}
export interface OperatorConfigWithFileContext extends OperatorConfig {
fileConfig: OperatorFileConfig
}
//export type OperatorConfig = Required<OperatorJsonConfig>;
interface CacheTypeStat {
@@ -1904,22 +1973,6 @@ export interface RedditEntity {
type: RedditEntityType
}
export interface StatusCodeError extends Error {
name: 'StatusCodeError',
statusCode: number,
message: string,
response: IncomingMessage,
error: Error
}
export interface RequestError extends Error {
name: 'RequestError',
statusCode: number,
message: string,
response: IncomingMessage,
error: Error
}
export interface HistoricalStatsDisplay extends HistoricalStats {
checksRunTotal: number
checksFromCacheTotal: number
@@ -2023,3 +2076,27 @@ export interface StringComparisonOptions {
lengthWeight?: number,
transforms?: ((str: string) => string)[]
}
export interface FilterCriteriaPropertyResult<T> {
property: keyof T
expected: (string | boolean | number)[]
found?: string | boolean | number | null
passed?: null | boolean
reason?: string
behavior: FilterBehavior
}
export interface FilterCriteriaResult<T> {
behavior: FilterBehavior
criteria: T//AuthorCriteria | TypedActivityStates
propertyResults: FilterCriteriaPropertyResult<T>[]
passed: boolean
}
export type FilterBehavior = 'include' | 'exclude'
export interface FilterResult<T> {
criteriaResults: FilterCriteriaResult<T>[]
join: JoinOperands
passed: boolean
}

View File

@@ -28,3 +28,5 @@ export type SetRandomInterval = (
minDelay: number,
maxDelay: number,
) => { clear: () => void };
export type ConfigFormat = 'json' | 'yaml';

26
src/Common/typings/support.d.ts vendored Normal file
View File

@@ -0,0 +1,26 @@
declare module 'snoowrap/dist/errors' {
export interface InvalidUserError extends Error {
}
export interface NoCredentialsError extends Error {
}
export interface InvalidMethodCallError extends Error {
}
export interface RequestError extends Error {
statusCode: number,
response: http.IncomingMessage
error: Error
}
export interface StatusCodeError extends RequestError {
name: 'StatusCodeError',
}
export interface RateLimitError extends RequestError {
name: 'RateLimitError',
}
}

View File

@@ -1,12 +1,12 @@
import {Logger} from "winston";
import {
buildCacheOptionsFromProvider, buildCachePrefix,
createAjvFactory,
createAjvFactory, fileOrDirectoryIsWriteable,
mergeArr,
normalizeName,
overwriteMerge,
parseBool, randomId,
readConfigFile,
parseBool, parseFromJsonOrYamlToObject, randomId,
readConfigFile, removeFromSourceIfKeysExistsInDestination,
removeUndefinedKeys
} from "./util";
import {CommentCheck} from "./Check/CommentCheck";
@@ -35,11 +35,11 @@ import {
RedditCredentials,
BotCredentialsJsonConfig,
BotCredentialsConfig,
FilterCriteriaDefaults, TypedActivityStates
FilterCriteriaDefaults, TypedActivityStates, OperatorFileConfig
} from "./Common/interfaces";
import {isRuleSetJSON, RuleSetJson, RuleSetObjectJson} from "./Rule/RuleSet";
import deepEqual from "fast-deep-equal";
import {ActionJson, ActionObjectJson, RuleJson, RuleObjectJson} from "./Common/types";
import {ActionJson, ActionObjectJson, ConfigFormat, RuleJson, RuleObjectJson} from "./Common/types";
import {isActionJson} from "./Action";
import {getLogger} from "./Utils/loggerFactory";
import {GetEnvVars} from 'env-cmd';
@@ -49,6 +49,16 @@ import * as process from "process";
import {cacheOptDefaults, cacheTTLDefaults, filterCriteriaDefault} from "./Common/defaults";
import objectHash from "object-hash";
import {AuthorCriteria, AuthorOptions} from "./Author/Author";
import path from 'path';
import {
JsonOperatorConfigDocument,
OperatorConfigDocumentInterface,
YamlOperatorConfigDocument
} from "./Common/Config/Operator";
import {ConfigDocumentInterface} from "./Common/Config/AbstractConfigDocument";
import {Document as YamlDocument} from "yaml";
import {SimpleError} from "./Utils/Errors";
import {ErrorWithCause} from "pony-cause";
export interface ConfigBuilderOptions {
logger: Logger,
@@ -145,20 +155,26 @@ export class ConfigBuilder {
const strongActions = insertNamedActions(c.actions, namedActions);
let derivedAuthorIs: AuthorOptions = authorIsDefault;
if(authorIsBehavior === 'merge') {
derivedAuthorIs = merge.all([authorIs, authorIsDefault], {arrayMerge: overwriteMerge});
} else if(Object.keys(authorIs).length > 0) {
if (authorIsBehavior === 'merge') {
derivedAuthorIs = merge.all([authorIs, authorIsDefault], {arrayMerge: removeFromSourceIfKeysExistsInDestination});
} else if (Object.keys(authorIs).length > 0) {
derivedAuthorIs = authorIs;
}
let derivedItemIs: TypedActivityStates = itemIsDefault;
if(itemIsBehavior === 'merge') {
if (itemIsBehavior === 'merge') {
derivedItemIs = [...itemIs, ...itemIsDefault];
} else if(itemIs.length > 0) {
} else if (itemIs.length > 0) {
derivedItemIs = itemIs;
}
const strongCheck = {...c, authorIs: derivedAuthorIs, itemIs: derivedItemIs, rules: strongRules, actions: strongActions} as CheckStructuredJson;
const strongCheck = {
...c,
authorIs: derivedAuthorIs,
itemIs: derivedItemIs,
rules: strongRules,
actions: strongActions
} as CheckStructuredJson;
structuredChecks.push(strongCheck);
}
@@ -321,7 +337,7 @@ export const parseDefaultBotInstanceFromArgs = (args: any): BotInstanceJsonConfi
heartbeatInterval: heartbeat,
},
polling: {
shared: sharedMod ? ['unmoderated','modqueue'] : undefined,
shared: sharedMod ? ['unmoderated', 'modqueue'] : undefined,
},
nanny: {
softLimit,
@@ -358,7 +374,16 @@ export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
},
logging: {
level: logLevel,
path: logDir === true ? `${process.cwd()}/logs` : undefined,
file: {
level: logLevel,
dirName: logDir,
},
stream: {
level: logLevel,
},
console: {
level: logLevel,
}
},
caching: {
provider: caching,
@@ -424,7 +449,7 @@ export const parseDefaultBotInstanceFromEnv = (): BotInstanceJsonConfig => {
heartbeatInterval: process.env.HEARTBEAT !== undefined ? parseInt(process.env.HEARTBEAT) : undefined,
},
polling: {
shared: parseBool(process.env.SHARE_MOD) ? ['unmoderated','modqueue'] : undefined,
shared: parseBool(process.env.SHARE_MOD) ? ['unmoderated', 'modqueue'] : undefined,
},
nanny: {
softLimit: process.env.SOFT_LIMIT !== undefined ? parseInt(process.env.SOFT_LIMIT) : undefined,
@@ -442,9 +467,17 @@ export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
display: process.env.OPERATOR_DISPLAY
},
logging: {
// @ts-ignore
level: process.env.LOG_LEVEL,
path: process.env.LOG_DIR === 'true' ? `${process.cwd()}/logs` : undefined,
file: {
level: process.env.LOG_LEVEL,
dirname: process.env.LOG_DIR,
},
stream: {
level: process.env.LOG_LEVEL,
},
console: {
level: process.env.LOG_LEVEL,
}
},
caching: {
provider: {
@@ -470,9 +503,9 @@ export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
},
},
credentials: {
youtube: {
apiKey: process.env.YOUTUBE_API_KEY
}
youtube: {
apiKey: process.env.YOUTUBE_API_KEY
}
}
}
@@ -485,12 +518,26 @@ export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
// Actual ENVs (from environment)
// json config
// args from cli
export const parseOperatorConfigFromSources = async (args: any): Promise<OperatorJsonConfig> => {
const {logLevel = process.env.LOG_LEVEL, logDir = process.env.LOG_DIR || false} = args || {};
export const parseOperatorConfigFromSources = async (args: any): Promise<[OperatorJsonConfig, OperatorFileConfig]> => {
const {logLevel = process.env.LOG_LEVEL ?? 'debug', logDir = process.env.LOG_DIR} = args || {};
const envPath = process.env.OPERATOR_ENV;
const initLoggerOptions = {
level: logLevel,
console: {
level: logLevel
},
file: {
level: logLevel,
dirname: logDir,
},
stream: {
level: logLevel
}
}
// create a pre config logger to help with debugging
const initLogger = getLogger({logLevel, logDir: logDir === true ? `${process.cwd()}/logs` : logDir}, 'init');
// default to debug if nothing is provided
const initLogger = getLogger(initLoggerOptions, 'init');
try {
const vars = await GetEnvVars({
@@ -516,25 +563,80 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
//swallow silently for now 😬
}
const {operatorConfig = process.env.OPERATOR_CONFIG} = args;
const {operatorConfig = (process.env.OPERATOR_CONFIG ?? path.resolve(__dirname, '../config.yaml'))} = args;
let configFromFile: OperatorJsonConfig = {};
if (operatorConfig !== undefined) {
let rawConfig;
try {
rawConfig = await readConfigFile(operatorConfig, {log: initLogger}) as object;
} catch (err: any) {
initLogger.error('Cannot continue app startup because operator config file was not parseable.');
err.logged = true;
throw err;
let fileConfigFormat: ConfigFormat | undefined = undefined;
let fileConfig: object = {};
let rawConfig: string = '';
let configDoc: YamlOperatorConfigDocument | JsonOperatorConfigDocument;
let writeable = false;
try {
writeable = await fileOrDirectoryIsWriteable(operatorConfig);
} catch (e) {
initLogger.warn(`Issue while parsing operator config file location: ${e} \n This is only a problem if you do not have a config file but are planning on adding bots interactively.`);
}
try {
const [rawConfigValue, format] = await readConfigFile(operatorConfig, {log: initLogger});
rawConfig = rawConfigValue ?? '';
fileConfigFormat = format as ConfigFormat;
} catch (err: any) {
const {code} = err;
if (code === 'ENOENT') {
initLogger.warn('No operator config file found but will continue');
if (err.extension !== undefined) {
fileConfigFormat = err.extension
}
} else {
throw new ErrorWithCause('Cannot continue app startup because operator config file exists but was not parseable.', {cause: err});
}
}
const [format, doc, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(rawConfig, {
location: operatorConfig,
jsonDocFunc: (content, location) => new JsonOperatorConfigDocument(content, location),
yamlDocFunc: (content, location) => new YamlOperatorConfigDocument(content, location)
});
if (format !== undefined && fileConfigFormat === undefined) {
fileConfigFormat = 'yaml';
}
if (doc === undefined && rawConfig !== '') {
initLogger.error(`Could not parse file contents at ${operatorConfig} as JSON or YAML (likely it is ${fileConfigFormat}):`);
initLogger.error(jsonErr);
initLogger.error(yamlErr);
throw new SimpleError(`Could not parse file contents at ${operatorConfig} as JSON or YAML`);
} else if (doc === undefined && rawConfig === '') {
// create an empty doc
if(fileConfigFormat === 'json') {
configDoc = new JsonOperatorConfigDocument('{}', operatorConfig);
} else {
configDoc = new YamlOperatorConfigDocument('', operatorConfig);
configDoc.parsed = new YamlDocument({});
}
configFromFile = {};
} else {
configDoc = doc as (YamlOperatorConfigDocument | JsonOperatorConfigDocument);
try {
configFromFile = validateJson(rawConfig, operatorSchema, initLogger) as OperatorJsonConfig;
const {bots = []} = configFromFile || {};
for(const b of bots) {
const {polling: {
sharedMod
} = {}} = b;
if(sharedMod !== undefined) {
configFromFile = validateJson(configDoc.toJS(), operatorSchema, initLogger) as OperatorJsonConfig;
const {
bots = [],
logging: {
path = undefined
} = {}
} = configFromFile || {};
if(path !== undefined) {
initLogger.warn(`'path' property in top-level 'logging' object is DEPRECATED and will be removed in next minor version. Use 'logging.file.dirname' instead`);
}
for (const b of bots) {
const {
polling: {
sharedMod
} = {}
} = b;
if (sharedMod !== undefined) {
initLogger.warn(`'sharedMod' bot config property is DEPRECATED and will be removed in next minor version. Use 'shared' property instead (see docs)`);
break;
}
@@ -544,6 +646,7 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
throw err;
}
}
const opConfigFromArgs = parseOpConfigFromArgs(args);
const opConfigFromEnv = parseOpConfigFromEnv();
@@ -570,7 +673,10 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
botInstances = botInstancesFromFile.map(x => merge.all([defaultBotInstance, x], {arrayMerge: overwriteMerge}));
}
return removeUndefinedKeys({...mergedConfig, bots: botInstances}) as OperatorJsonConfig;
return [removeUndefinedKeys({...mergedConfig, bots: botInstances}) as OperatorJsonConfig, {
document: configDoc,
isWriteable: writeable
}];
}
export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): OperatorConfig => {
@@ -583,6 +689,9 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
logging: {
level = 'verbose',
path,
file = {},
console = {},
stream = {},
} = {},
caching: opCache,
web: {
@@ -656,165 +765,14 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
}
}
let hydratedBots: BotInstanceConfig[] = bots.map(x => {
const {
name: botName,
filterCriteriaDefaults = filterCriteriaDefault,
polling: {
sharedMod,
shared = [],
stagger,
limit = 100,
interval = 30,
} = {},
queue: {
maxWorkers = 1,
} = {},
caching,
nanny: {
softLimit = 250,
hardLimit = 50
} = {},
snoowrap = snoowrapOp,
credentials = {},
subreddits: {
names = [],
exclude = [],
wikiConfig = 'botconfig/contextbot',
dryRun,
heartbeatInterval = 300,
} = {},
} = x;
let botCache: StrongCache;
let botActionedEventsDefault: number;
if (caching === undefined) {
botCache = {
...cacheTTLDefaults,
actionedEventsDefault: opActionedEventsDefault,
actionedEventsMax: opActionedEventsMax,
provider: {...defaultProvider}
};
} else {
const {
provider,
actionedEventsMax = opActionedEventsMax,
actionedEventsDefault = opActionedEventsDefault,
...restConfig
} = caching;
botActionedEventsDefault = actionedEventsDefault;
if (actionedEventsMax !== undefined) {
botActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
}
if (typeof provider === 'string') {
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: botActionedEventsDefault,
provider: {
store: provider as CacheProvider,
...cacheOptDefaults
}
}
} else {
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: botActionedEventsDefault,
actionedEventsMax,
provider: {
store,
...cacheOptDefaults,
...rest,
},
}
}
}
let botCreds: BotCredentialsConfig;
if((credentials as any).clientId !== undefined) {
const creds = credentials as RedditCredentials;
const {
clientId: ci,
clientSecret: cs,
...restCred
} = creds;
botCreds = {
reddit: {
clientId: (ci as string),
clientSecret: (cs as string),
...restCred,
}
}
} else {
const creds = credentials as BotCredentialsJsonConfig;
const {
reddit: {
clientId: ci,
clientSecret: cs,
...restRedditCreds
},
...rest
} = creds;
botCreds = {
reddit: {
clientId: (ci as string),
clientSecret: (cs as string),
...restRedditCreds,
},
...rest
}
}
if (botCache.provider.prefix === undefined || botCache.provider.prefix === defaultProvider.prefix) {
// need to provide unique prefix to bot
botCache.provider.prefix = buildCachePrefix([botCache.provider.prefix, 'bot', (botName || objectHash.sha1(botCreds))]);
}
let realShared = shared === true ? ['unmoderated','modqueue','newComm','newSub'] : shared;
if(sharedMod === true) {
realShared.push('unmoderated');
realShared.push('modqueue');
}
return {
name: botName,
snoowrap,
filterCriteriaDefaults,
subreddits: {
names,
exclude,
wikiConfig,
heartbeatInterval,
dryRun,
},
credentials: botCreds,
caching: botCache,
polling: {
shared: [...new Set(realShared)] as PollOn[],
stagger,
limit,
interval,
},
queue: {
maxWorkers,
},
nanny: {
softLimit,
hardLimit
}
}
});
const defaultOperators = typeof name === 'string' ? [name] : name;
const {
dirname = path,
...fileRest
} = file;
const config: OperatorConfig = {
mode,
operator: {
@@ -823,7 +781,19 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
},
logging: {
level,
path
file: {
level: level,
dirname,
...fileRest,
},
stream: {
level: level,
...stream,
},
console: {
level: level,
...console,
}
},
caching: cache,
web: {
@@ -849,9 +819,175 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
secret: apiSecret,
friendly
},
bots: hydratedBots,
bots: [],
credentials,
};
config.bots = bots.map(x => buildBotConfig(x, config));
return config;
}
export const buildBotConfig = (data: BotInstanceJsonConfig, opConfig: OperatorConfig): BotInstanceConfig => {
const {
snoowrap: snoowrapOp,
caching: {
actionedEventsMax: opActionedEventsMax,
actionedEventsDefault: opActionedEventsDefault = 25,
provider: defaultProvider,
} = {}
} = opConfig;
const {
name: botName,
filterCriteriaDefaults = filterCriteriaDefault,
polling: {
sharedMod,
shared = [],
stagger,
limit = 100,
interval = 30,
} = {},
queue: {
maxWorkers = 1,
} = {},
caching,
nanny: {
softLimit = 250,
hardLimit = 50
} = {},
snoowrap = snoowrapOp,
credentials = {},
subreddits: {
names = [],
exclude = [],
wikiConfig = 'botconfig/contextbot',
dryRun,
heartbeatInterval = 300,
} = {},
} = data;
let botCache: StrongCache;
let botActionedEventsDefault: number;
if (caching === undefined) {
botCache = {
...cacheTTLDefaults,
actionedEventsDefault: opActionedEventsDefault,
actionedEventsMax: opActionedEventsMax,
provider: {...defaultProvider as CacheOptions}
};
} else {
const {
provider,
actionedEventsMax = opActionedEventsMax,
actionedEventsDefault = opActionedEventsDefault,
...restConfig
} = caching;
botActionedEventsDefault = actionedEventsDefault;
if (actionedEventsMax !== undefined) {
botActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
}
if (typeof provider === 'string') {
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: botActionedEventsDefault,
provider: {
store: provider as CacheProvider,
...cacheOptDefaults
}
}
} else {
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: botActionedEventsDefault,
actionedEventsMax,
provider: {
store,
...cacheOptDefaults,
...rest,
},
}
}
}
let botCreds: BotCredentialsConfig;
if ((credentials as any).clientId !== undefined) {
const creds = credentials as RedditCredentials;
const {
clientId: ci,
clientSecret: cs,
...restCred
} = creds;
botCreds = {
reddit: {
clientId: (ci as string),
clientSecret: (cs as string),
...restCred,
}
}
} else {
const creds = credentials as BotCredentialsJsonConfig;
const {
reddit: {
clientId: ci,
clientSecret: cs,
...restRedditCreds
},
...rest
} = creds;
botCreds = {
reddit: {
clientId: (ci as string),
clientSecret: (cs as string),
...restRedditCreds,
},
...rest
}
}
if (botCache.provider.prefix === undefined || botCache.provider.prefix === (defaultProvider as CacheOptions).prefix) {
// need to provide unique prefix to bot
botCache.provider.prefix = buildCachePrefix([botCache.provider.prefix, 'bot', (botName || objectHash.sha1(botCreds))]);
}
let realShared = shared === true ? ['unmoderated', 'modqueue', 'newComm', 'newSub'] : shared;
if (sharedMod === true) {
realShared.push('unmoderated');
realShared.push('modqueue');
}
return {
name: botName,
snoowrap: snoowrap || {},
filterCriteriaDefaults,
subreddits: {
names,
exclude,
wikiConfig,
heartbeatInterval,
dryRun,
},
credentials: botCreds,
caching: botCache,
polling: {
shared: [...new Set(realShared)] as PollOn[],
stagger,
limit,
interval,
},
queue: {
maxWorkers,
},
nanny: {
softLimit,
hardLimit
}
}
}

View File

@@ -14,8 +14,8 @@ import {
PASS
} from "../util";
import { Comment } from "snoowrap/dist/objects";
import SimpleError from "../Utils/SimpleError";
import as from "async";
import {SimpleError} from "../Utils/Errors";
export interface AttributionCriteria {

View File

@@ -2,6 +2,7 @@ import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
import {Comment} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {Author, AuthorCriteria} from "../Author/Author";
import {checkAuthorFilter} from "../Subreddit/SubredditResources";
/**
* Checks the author of the Activity against AuthorCriteria. This differs from a Rule's AuthorOptions as this is a full Rule and will only pass/fail, not skip.
@@ -59,20 +60,8 @@ export class AuthorRule extends Rule {
}
protected async process(item: Comment | Submission): Promise<[boolean, RuleResult]> {
if (this.include.length > 0) {
for (const auth of this.include) {
if (await this.resources.testAuthorCriteria(item, auth)) {
return Promise.resolve([true, this.getResult(true)]);
}
}
return Promise.resolve([false, this.getResult(false)]);
}
for (const auth of this.exclude) {
if (await this.resources.testAuthorCriteria(item, auth, false)) {
return Promise.resolve([true, this.getResult(true)]);
}
}
return Promise.resolve([false, this.getResult(false)]);
const [result, filterType] = await checkAuthorFilter(item, {include: this.include, exclude: this.exclude}, this.resources, this.logger);
return Promise.resolve([result, this.getResult(result)]);
}
}

View File

@@ -11,7 +11,7 @@ import {
ActivityWindowType, JoinOperands,
} from "../Common/interfaces";
import dayjs from 'dayjs';
import SimpleError from "../Utils/SimpleError";
import {SimpleError} from "../Utils/Errors";
export interface RegexCriteria {
/**

View File

@@ -50,24 +50,52 @@
]
},
"flairCssClass": {
"description": "A list of (user) flair css class values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair css class (or list of) from the subreddit to match against",
"examples": [
"red"
]
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"items": {
"type": "string"
},
"type": "array"
"description": "A (user) flair template id (or list of) from the subreddit to match against"
},
"flairText": {
"description": "A list of (user) flair text values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair text value (or list of) from the subreddit to match against",
"examples": [
"Approved"
],
"items": {
"type": "string"
},
"type": "array"
]
},
"isMod": {
"description": "Is the author a moderator?",
@@ -249,14 +277,47 @@
"filtered": {
"type": "boolean"
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"is_self": {
"type": "boolean"
},
"link_flair_css_class": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"link_flair_text": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"locked": {
"type": "boolean"

View File

@@ -222,6 +222,17 @@
],
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
"type": "string"
},
"targets": {
"description": "Specify which Activities to approve\n\nThis setting is only applicable if the Activity being acted on is a **comment**. On a **submission** the setting does nothing\n\n* self => approve activity being checked (comment)\n* parent => approve parent (submission) of activity being checked (comment)",
"items": {
"enum": [
"parent",
"self"
],
"type": "string"
},
"type": "array"
}
},
"required": [
@@ -235,8 +246,7 @@
"default": "undefined",
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `redditMedia` is included then aggregate on author's submissions history which are media hosted on reddit: galleries, videos, and images (i.redd.it / v.redd.it)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or domain is `reddit.com`\n* If `link` is included then aggregate author's submission history which is external links and not recognized as `media` by reddit\n\nIf nothing is specified or list is empty (default) rule will only aggregate on `link` and `media` (ignores reddit-hosted content and self-posts)",
"examples": [
[
]
[]
],
"items": {
"enum": [
@@ -269,8 +279,7 @@
},
"domains": {
"default": [
[
]
[]
],
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"items": {
@@ -505,24 +514,52 @@
]
},
"flairCssClass": {
"description": "A list of (user) flair css class values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair css class (or list of) from the subreddit to match against",
"examples": [
"red"
]
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"items": {
"type": "string"
},
"type": "array"
"description": "A (user) flair template id (or list of) from the subreddit to match against"
},
"flairText": {
"description": "A list of (user) flair text values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair text value (or list of) from the subreddit to match against",
"examples": [
"Approved"
],
"items": {
"type": "string"
},
"type": "array"
]
},
"isMod": {
"description": "Is the author a moderator?",
@@ -924,8 +961,7 @@
"type": "object"
},
"CacheOptions": {
"additionalProperties": {
},
"additionalProperties": {},
"description": "Configure granular settings for a cache provider with this object",
"properties": {
"auth_pass": {
@@ -2599,6 +2635,9 @@
],
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
"type": "string"
},
"spam": {
"type": "boolean"
}
},
"required": [
@@ -3439,14 +3478,47 @@
"filtered": {
"type": "boolean"
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"is_self": {
"type": "boolean"
},
"link_flair_css_class": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"link_flair_text": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"locked": {
"type": "boolean"
@@ -3529,8 +3601,7 @@
"type": "object"
},
"ThirdPartyCredentialsJsonConfig": {
"additionalProperties": {
},
"additionalProperties": {},
"properties": {
"youtube": {
"properties": {

View File

@@ -50,24 +50,52 @@
]
},
"flairCssClass": {
"description": "A list of (user) flair css class values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair css class (or list of) from the subreddit to match against",
"examples": [
"red"
]
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"items": {
"type": "string"
},
"type": "array"
"description": "A (user) flair template id (or list of) from the subreddit to match against"
},
"flairText": {
"description": "A list of (user) flair text values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair text value (or list of) from the subreddit to match against",
"examples": [
"Approved"
],
"items": {
"type": "string"
},
"type": "array"
]
},
"isMod": {
"description": "Is the author a moderator?",
@@ -369,8 +397,7 @@
"type": "object"
},
"CacheOptions": {
"additionalProperties": {
},
"additionalProperties": {},
"description": "Configure granular settings for a cache provider with this object",
"properties": {
"auth_pass": {
@@ -579,6 +606,117 @@
},
"type": "object"
},
"LoggingOptions": {
"properties": {
"console": {
"allOf": [
{
"$ref": "#/definitions/Pick<Transports.ConsoleTransportOptions,\"silent\"|\"eol\"|\"stderrLevels\"|\"consoleWarnLevels\">"
},
{
"properties": {
"level": {
"enum": [
"debug",
"error",
"info",
"verbose",
"warn"
],
"type": "string"
}
},
"type": "object"
}
],
"description": "Options for logging to console"
},
"file": {
"allOf": [
{
"properties": {
"dirname": {
"description": "The absolute path to a directory where rotating log files should be stored.\n\n* If not present or `null` or `false` no log files will be created\n* If `true` logs will be stored at `[working directory]/logs`\n\n* ENV => `LOG_DIR`\n* ARG => `--logDir [dir]`",
"examples": [
"/var/log/contextmod"
],
"type": [
"null",
"string",
"boolean"
]
},
"level": {
"enum": [
"debug",
"error",
"info",
"verbose",
"warn"
],
"type": "string"
}
},
"type": "object"
},
{
"$ref": "#/definitions/Omit<DailyRotateFileTransportOptions,\"stream\"|\"dirname\"|\"options\"|\"handleRejections\"|\"format\"|\"handleExceptions\"|\"log\"|\"logv\"|\"close\">"
}
],
"description": "Options for Rotating File logging"
},
"level": {
"default": "verbose",
"description": "The minimum log level to output. The log level set will output logs at its level **and all levels above it:**\n\n * `error`\n * `warn`\n * `info`\n * `verbose`\n * `debug`\n\n Note: `verbose` will display *a lot* of information on the status/result of run rules/checks/actions etc. which is very useful for testing configurations. Once your bot is stable changing the level to `info` will reduce log noise.\n\n * ENV => `LOG_LEVEL`\n * ARG => `--logLevel <level>`",
"enum": [
"debug",
"error",
"info",
"verbose",
"warn"
],
"examples": [
"verbose"
],
"type": "string"
},
"path": {
"description": "**DEPRECATED** - Use `file.dirname` instead\nThe absolute path to a directory where rotating log files should be stored.\n\n* If not present or `null` or `false` no log files will be created\n* If `true` logs will be stored at `[working directory]/logs`\n\n* ENV => `LOG_DIR`\n* ARG => `--logDir [dir]`",
"examples": [
"/var/log/contextmod"
],
"type": [
"null",
"string",
"boolean"
]
},
"stream": {
"allOf": [
{
"$ref": "#/definitions/Omit<DuplexTransportOptions,\"name\"|\"stream\"|\"handleRejections\"|\"format\"|\"handleExceptions\"|\"log\"|\"logv\"|\"close\">"
},
{
"properties": {
"level": {
"enum": [
"debug",
"error",
"info",
"verbose",
"warn"
],
"type": "string"
}
},
"type": "object"
}
],
"description": "Options for logging to api/web"
}
},
"type": "object"
},
"NotificationConfig": {
"properties": {
"events": {
@@ -644,6 +782,90 @@
],
"type": "object"
},
"Omit<DailyRotateFileTransportOptions,\"stream\"|\"dirname\"|\"options\"|\"handleRejections\"|\"format\"|\"handleExceptions\"|\"log\"|\"logv\"|\"close\">": {
"properties": {
"auditFile": {
"description": "A string representing the name of the name of the audit file. (default: './hash-audit.json')",
"type": "string"
},
"createSymlink": {
"description": "Create a tailable symlink to the current active log file. (default: false)",
"type": "boolean"
},
"datePattern": {
"description": "A string representing the moment.js date format to be used for rotating. The meta characters used in this string will dictate the frequency of the file rotation. For example, if your datePattern is simply 'HH' you will end up with 24 log files that are picked up and appended to every day. (default 'YYYY-MM-DD')",
"type": "string"
},
"eol": {
"type": "string"
},
"extension": {
"description": "A string representing an extension to be added to the filename, if not included in the filename property. (default: '')",
"type": "string"
},
"filename": {
"description": "Filename to be used to log to. This filename can include the %DATE% placeholder which will include the formatted datePattern at that point in the filename. (default: 'winston.log.%DATE%)",
"type": "string"
},
"frequency": {
"description": "A string representing the frequency of rotation. (default: 'custom')",
"type": "string"
},
"json": {
"type": "boolean"
},
"level": {
"type": "string"
},
"maxFiles": {
"description": "Maximum number of logs to keep. If not set, no logs will be removed. This can be a number of files or number of days. If using days, add 'd' as the suffix. (default: null)",
"type": [
"string",
"number"
]
},
"maxSize": {
"description": "Maximum size of the file after which it will rotate. This can be a number of bytes, or units of kb, mb, and gb. If using the units, add 'k', 'm', or 'g' as the suffix. The units need to directly follow the number. (default: null)",
"type": [
"string",
"number"
]
},
"silent": {
"type": "boolean"
},
"symlinkName": {
"description": "The name of the tailable symlink. (default: 'current.log')",
"type": "string"
},
"utc": {
"description": "A boolean whether or not to generate file name from \"datePattern\" in UTC format. (default: false)",
"type": "boolean"
},
"zippedArchive": {
"description": "A boolean to define whether or not to gzip archived log files. (default 'false')",
"type": "boolean"
}
},
"type": "object"
},
"Omit<DuplexTransportOptions,\"name\"|\"stream\"|\"handleRejections\"|\"format\"|\"handleExceptions\"|\"log\"|\"logv\"|\"close\">": {
"properties": {
"dump": {
"type": "boolean"
},
"eol": {
"type": "string"
},
"level": {
"type": "string"
},
"silent": {
"type": "boolean"
}
},
"type": "object"
},
"OperatorCacheConfig": {
"properties": {
"actionedEventsDefault": {
@@ -763,6 +985,29 @@
},
"type": "object"
},
"Pick<Transports.ConsoleTransportOptions,\"silent\"|\"eol\"|\"stderrLevels\"|\"consoleWarnLevels\">": {
"properties": {
"consoleWarnLevels": {
"items": {
"type": "string"
},
"type": "array"
},
"eol": {
"type": "string"
},
"silent": {
"type": "boolean"
},
"stderrLevels": {
"items": {
"type": "string"
},
"type": "array"
}
},
"type": "object"
},
"PollingDefaults": {
"properties": {
"delayUntil": {
@@ -873,14 +1118,47 @@
"filtered": {
"type": "boolean"
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"is_self": {
"type": "boolean"
},
"link_flair_css_class": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"link_flair_text": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"locked": {
"type": "boolean"
@@ -922,8 +1200,7 @@
"type": "object"
},
"ThirdPartyCredentialsJsonConfig": {
"additionalProperties": {
},
"additionalProperties": {},
"properties": {
"youtube": {
"properties": {
@@ -1049,32 +1326,8 @@
"$ref": "#/definitions/ThirdPartyCredentialsJsonConfig"
},
"logging": {
"description": "Settings to configure global logging defaults",
"properties": {
"level": {
"default": "verbose",
"description": "The minimum log level to output. The log level set will output logs at its level **and all levels above it:**\n\n * `error`\n * `warn`\n * `info`\n * `verbose`\n * `debug`\n\n Note: `verbose` will display *a lot* of information on the status/result of run rules/checks/actions etc. which is very useful for testing configurations. Once your bot is stable changing the level to `info` will reduce log noise.\n\n * ENV => `LOG_LEVEL`\n * ARG => `--logLevel <level>`",
"enum": [
"debug",
"error",
"info",
"verbose",
"warn"
],
"examples": [
"verbose"
],
"type": "string"
},
"path": {
"description": "The absolute path to a directory where rotating log files should be stored.\n\n* If not present or `null` no log files will be created\n* If `true` logs will be stored at `[working directory]/logs`\n\n* ENV => `LOG_DIR`\n* ARG => `--logDir [dir]`",
"examples": [
"/var/log/contextmod"
],
"type": "string"
}
},
"type": "object"
"$ref": "#/definitions/LoggingOptions",
"description": "Settings to configure global logging defaults"
},
"mode": {
"default": "all",

View File

@@ -184,8 +184,7 @@
"default": "undefined",
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `redditMedia` is included then aggregate on author's submissions history which are media hosted on reddit: galleries, videos, and images (i.redd.it / v.redd.it)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or domain is `reddit.com`\n* If `link` is included then aggregate author's submission history which is external links and not recognized as `media` by reddit\n\nIf nothing is specified or list is empty (default) rule will only aggregate on `link` and `media` (ignores reddit-hosted content and self-posts)",
"examples": [
[
]
[]
],
"items": {
"enum": [
@@ -218,8 +217,7 @@
},
"domains": {
"default": [
[
]
[]
],
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"items": {
@@ -454,24 +452,52 @@
]
},
"flairCssClass": {
"description": "A list of (user) flair css class values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair css class (or list of) from the subreddit to match against",
"examples": [
"red"
]
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"items": {
"type": "string"
},
"type": "array"
"description": "A (user) flair template id (or list of) from the subreddit to match against"
},
"flairText": {
"description": "A list of (user) flair text values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair text value (or list of) from the subreddit to match against",
"examples": [
"Approved"
],
"items": {
"type": "string"
},
"type": "array"
]
},
"isMod": {
"description": "Is the author a moderator?",
@@ -1922,14 +1948,47 @@
"filtered": {
"type": "boolean"
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"is_self": {
"type": "boolean"
},
"link_flair_css_class": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"link_flair_text": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"locked": {
"type": "boolean"

View File

@@ -158,8 +158,7 @@
"default": "undefined",
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `redditMedia` is included then aggregate on author's submissions history which are media hosted on reddit: galleries, videos, and images (i.redd.it / v.redd.it)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or domain is `reddit.com`\n* If `link` is included then aggregate author's submission history which is external links and not recognized as `media` by reddit\n\nIf nothing is specified or list is empty (default) rule will only aggregate on `link` and `media` (ignores reddit-hosted content and self-posts)",
"examples": [
[
]
[]
],
"items": {
"enum": [
@@ -192,8 +191,7 @@
},
"domains": {
"default": [
[
]
[]
],
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"items": {
@@ -428,24 +426,52 @@
]
},
"flairCssClass": {
"description": "A list of (user) flair css class values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair css class (or list of) from the subreddit to match against",
"examples": [
"red"
]
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"items": {
"type": "string"
},
"type": "array"
"description": "A (user) flair template id (or list of) from the subreddit to match against"
},
"flairText": {
"description": "A list of (user) flair text values from the subreddit to match against",
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "A (user) flair text value (or list of) from the subreddit to match against",
"examples": [
"Approved"
],
"items": {
"type": "string"
},
"type": "array"
]
},
"isMod": {
"description": "Is the author a moderator?",
@@ -1896,14 +1922,47 @@
"filtered": {
"type": "boolean"
},
"flairTemplate": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"is_self": {
"type": "boolean"
},
"link_flair_css_class": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"link_flair_text": {
"type": "string"
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
]
},
"locked": {
"type": "boolean"

View File

@@ -48,7 +48,8 @@ import {CheckStructuredJson} from "../Check";
import NotificationManager from "../Notification/NotificationManager";
import {createHistoricalDefaults, historicalDefaults} from "../Common/defaults";
import {ExtendedSnoowrap} from "../Utils/SnoowrapClients";
import {isRateLimitError, isStatusError} from "../Utils/Errors";
import {CMError, isRateLimitError, isStatusError} from "../Utils/Errors";
import {ErrorWithCause} from "pony-cause";
export interface RunningState {
state: RunState,
@@ -449,9 +450,6 @@ export class Manager extends EventEmitter {
this.logger.info(checkSummary);
}
this.validConfigLoaded = true;
if(!suppressChangeEvent) {
this.emit('configChange');
}
if(this.eventsState.state === RUNNING) {
// need to update polling, potentially
await this.buildPolling();
@@ -462,6 +460,9 @@ export class Manager extends EventEmitter {
}
}
}
if(!suppressChangeEvent) {
this.emit('configChange');
}
} catch (err: any) {
this.validConfigLoaded = false;
throw err;
@@ -484,21 +485,21 @@ export class Manager extends EventEmitter {
if(isStatusError(err) && err.statusCode === 404) {
// see if we can create the page
if (!this.client.scope.includes('wikiedit')) {
throw new Error(`Page does not exist and could not be created because Bot does not have oauth permission 'wikiedit'`);
throw new ErrorWithCause(`Page does not exist and could not be created because Bot does not have oauth permission 'wikiedit'`, {cause: err});
}
const modPermissions = await this.getModPermissions();
if (!modPermissions.includes('all') && !modPermissions.includes('wiki')) {
throw new Error(`Page does not exist and could not be created because Bot not have mod permissions for creating wiki pages. Must have 'all' or 'wiki'`);
throw new ErrorWithCause(`Page does not exist and could not be created because Bot not have mod permissions for creating wiki pages. Must have 'all' or 'wiki'`, {cause: err});
}
if(!this.client.scope.includes('modwiki')) {
throw new Error(`Bot COULD create wiki config page but WILL NOT because it does not have the oauth permissions 'modwiki' which is required to set page visibility and editing permissions. Safety first!`);
throw new ErrorWithCause(`Bot COULD create wiki config page but WILL NOT because it does not have the oauth permissions 'modwiki' which is required to set page visibility and editing permissions. Safety first!`, {cause: err});
}
// @ts-ignore
wiki = await this.subreddit.getWikiPage(this.wikiLocation).edit({
text: '',
reason: 'Empty configuration created for ContextMod'
});
this.logger.info(`Wiki page at ${this.wikiLocation} did not exist, but bot created it!`);
this.logger.info(`Wiki page at ${this.wikiLocation} did not exist so bot created it!`);
// 0 = use subreddit wiki permissions
// 1 = only approved wiki contributors
@@ -542,11 +543,10 @@ export class Manager extends EventEmitter {
} catch (err: any) {
let hint = '';
if(isStatusError(err) && err.statusCode === 403) {
hint = `\r\nHINT: Either the page is restricted to mods only and the bot's reddit account does have the mod permission 'all' or 'wiki' OR the bot does not have the 'wikiread' oauth permission`;
hint = ` -- HINT: Either the page is restricted to mods only and the bot's reddit account does have the mod permission 'all' or 'wiki' OR the bot does not have the 'wikiread' oauth permission`;
}
const msg = `Could not read wiki configuration. Please ensure the page https://reddit.com${this.subreddit.url}wiki/${this.wikiLocation} exists and is readable${hint} -- error: ${err.message}`;
this.logger.error(msg);
throw new ConfigParseError(msg);
const msg = `Could not read wiki configuration. Please ensure the page https://reddit.com${this.subreddit.url}wiki/${this.wikiLocation} exists and is readable${hint}`;
throw new ErrorWithCause(msg, {cause: err});
}
if (sourceData.replace('\r\n', '').trim() === '') {
@@ -554,14 +554,8 @@ export class Manager extends EventEmitter {
throw new ConfigParseError('Wiki page contents was empty');
}
const [configObj, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(sourceData);
if (jsonErr === undefined) {
this.wikiFormat = 'json';
} else if (yamlErr === undefined) {
this.wikiFormat = 'yaml';
} else {
this.wikiFormat = likelyJson5(sourceData) ? 'json' : 'yaml';
}
const [format, configObj, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(sourceData);
this.wikiFormat = format;
if (configObj === undefined) {
this.logger.error(`Could not parse wiki page contents as JSON or YAML. Looks like it should be ${this.wikiFormat}?`);
@@ -577,7 +571,7 @@ export class Manager extends EventEmitter {
throw new ConfigParseError('Could not parse wiki page contents as JSON or YAML')
}
await this.parseConfigurationFromObject(configObj, suppressChangeEvent);
await this.parseConfigurationFromObject(configObj.toJS(), suppressChangeEvent);
this.logger.info('Checks updated');
if(!suppressNotification) {
@@ -586,8 +580,12 @@ export class Manager extends EventEmitter {
return true;
} catch (err: any) {
const error = new ErrorWithCause('Failed to parse subreddit configuration', {cause: err});
// @ts-ignore
//error.logged = true;
this.logger.error(error);
this.validConfigLoaded = false;
throw err;
throw error;
}
}
@@ -912,9 +910,9 @@ export class Manager extends EventEmitter {
}
if(!this.sharedStreamCallbacks.has(source)) {
stream.once('listing', this.noChecksWarning(source));
this.sharedStreamCallbacks.set(source, onItem);
this.logger.debug(`${removedOwn ? 'Stopped own polling and replace with ' : 'Set '}listener on shared polling ${source}`);
}
this.sharedStreamCallbacks.set(source, onItem);
} else {
let ownPollingMsgParts: string[] = [];
let removedShared = false;
@@ -943,14 +941,9 @@ export class Manager extends EventEmitter {
this.emit('error', err);
if (isRateLimitError(err)) {
this.logger.error('Encountered rate limit while polling! Bot is all out of requests :( Stopping subreddit queue and polling.');
await this.stop();
}
this.logger.error('Polling error occurred', err);
const shouldRetry = await this.pollingRetryHandler(err);
if (shouldRetry) {
stream.startInterval(false);
stream.startInterval(false, 'Within retry limits');
} else {
this.logger.warn('Stopping subreddit processing/polling due to too many errors');
await this.stop();
@@ -1146,7 +1139,6 @@ export class Manager extends EventEmitter {
s.end();
}
this.streams = new Map();
this.sharedStreamCallbacks = new Map();
this.startedAt = undefined;
this.logger.info(`Events STOPPED by ${causedBy}`);
this.eventsState = {

View File

@@ -1,10 +1,11 @@
import {Poll, SnooStormOptions} from "snoostorm"
import Snoowrap from "snoowrap";
import Snoowrap, {Listing} from "snoowrap";
import {EventEmitter} from "events";
import {PollConfiguration} from "snoostorm/out/util/Poll";
import {DEFAULT_POLLING_INTERVAL} from "../Common/interfaces";
import {mergeArr, parseDuration, random} from "../util";
import { Logger } from "winston";
import {ErrorWithCause} from "pony-cause";
type Awaitable<T> = Promise<T> | T;
@@ -18,11 +19,12 @@ interface RCBPollingOptions<T> extends SnooStormOptions {
}
interface RCBPollConfiguration<T> extends PollConfiguration<T>,RCBPollingOptions<T> {
get: () => Promise<Listing<T>>
}
export class SPoll<T extends object> extends Poll<T> {
identifier: keyof T;
getter: () => Awaitable<T[]>;
getter: () => Promise<Listing<T>>;
frequency;
running: boolean = false;
// intention of newStart is to make polling behavior such that only "new" items AFTER polling has started get emitted
@@ -82,6 +84,10 @@ export class SPoll<T extends object> extends Poll<T> {
// @ts-ignore
batch = await batch.fetchMore({amount: 100});
}
if(batch.length === 0 || batch.isFinished) {
// if nothing is returned we don't want to end up in an endless loop!
anyAlreadySeen = true;
}
for (const item of batch) {
const id = item[self.identifier];
if (self.processed.has(id)) {
@@ -99,7 +105,7 @@ export class SPoll<T extends object> extends Poll<T> {
}
page++;
}
const newItemMsg = `Found ${newItems.length} new items`;
const newItemMsg = `Found ${newItems.length} new items out of ${batch.length} returned`;
if(self.newStart) {
self.logger.debug(`${newItemMsg} but will ignore all on first start.`);
self.emit("listing", []);
@@ -113,6 +119,8 @@ export class SPoll<T extends object> extends Poll<T> {
// if everything succeeded then create a new timeout
self.createInterval();
} catch (err: any) {
self.running = false;
self.logger.error(new ErrorWithCause('Polling Interval stopped due to error encountered', {cause: err}));
self.emit('error', err);
}
}
@@ -120,15 +128,22 @@ export class SPoll<T extends object> extends Poll<T> {
}
// allow controlling newStart state
startInterval = (newStartState?: boolean) => {
startInterval = (newStartState?: boolean, msg?: string) => {
this.running = true;
if(newStartState !== undefined) {
this.newStart = newStartState;
}
const startMsg = `Polling Interval Started${msg !== undefined ? `: ${msg}` : ''}`;
this.logger.debug(startMsg)
this.createInterval();
}
end = () => {
end = (reason?: string) => {
let msg ='Stopping Polling Interval';
if(reason !== undefined) {
msg += `: ${reason}`;
}
this.logger.debug(msg);
this.running = false;
this.newStart = true;
super.end();

View File

@@ -20,11 +20,11 @@ import {
comparisonTextOp,
createCacheManager,
createHistoricalStatsDisplay, FAIL,
fetchExternalUrl,
fetchExternalUrl, filterCriteriaSummary,
formatNumber,
getActivityAuthorName,
getActivitySubredditName,
isStrongSubredditState,
isStrongSubredditState, isSubmission,
mergeArr,
parseDurationComparison,
parseExternalUrl,
@@ -55,7 +55,7 @@ import {
HistoricalStats,
HistoricalStatUpdateData,
SubredditHistoricalStats,
SubredditHistoricalStatsDisplay, ThirdPartyCredentialsJsonConfig,
SubredditHistoricalStatsDisplay, ThirdPartyCredentialsJsonConfig, FilterCriteriaResult,
} from "../Common/interfaces";
import UserNotes from "./UserNotes";
import Mustache from "mustache";
@@ -185,7 +185,7 @@ export class SubredditResources {
this.stats.cache.userNotes.requests++;
this.stats.cache.userNotes.miss += miss ? 1 : 0;
}
this.userNotes = new UserNotes(userNotesTTL, this.subreddit, this.logger, this.cache, cacheUseCB)
this.userNotes = new UserNotes(userNotesTTL, this.subreddit, this.client, this.logger, this.cache, cacheUseCB)
if(this.cacheType === 'memory' && this.cacheSettingsHash !== 'default') {
const min = Math.min(...([this.wikiTTL, this.authorTTL, this.submissionTTL, this.commentTTL, this.filterCriteriaTTL].filter(x => typeof x === 'number' && x !== 0) as number[]));
@@ -725,7 +725,7 @@ export class SubredditResources {
return await this.isSubreddit(await this.getSubreddit(item), state, this.logger);
}
async testAuthorCriteria(item: (Comment | Submission), authorOpts: AuthorCriteria, include = true) {
async testAuthorCriteria(item: (Comment | Submission), authorOpts: AuthorCriteria, include = true): Promise<FilterCriteriaResult<AuthorCriteria>> {
if (this.filterCriteriaTTL !== false) {
// in the criteria check we only actually use the `item` to get the author flair
// which will be the same for the entire subreddit
@@ -738,17 +738,18 @@ export class SubredditResources {
await this.stats.cache.authorCrit.identifierRequestCount.set(hash, (await this.stats.cache.authorCrit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
this.stats.cache.authorCrit.requestTimestamps.push(Date.now());
this.stats.cache.authorCrit.requests++;
let miss = false;
const cachedAuthorTest = await this.cache.wrap(hash, async () => {
miss = true;
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
}, {ttl: this.filterCriteriaTTL});
if (!miss) {
// need to check shape of result to invalidate old result type
let cachedAuthorTest: FilterCriteriaResult<AuthorCriteria> = await this.cache.get(hash) as FilterCriteriaResult<AuthorCriteria>;
if(cachedAuthorTest !== null && cachedAuthorTest !== undefined && typeof cachedAuthorTest === 'object') {
this.logger.debug(`Cache Hit: Author Check on ${userName} (Hash ${hash})`);
return cachedAuthorTest;
} else {
this.stats.cache.authorCrit.miss++;
cachedAuthorTest = await testAuthorCriteria(item, authorOpts, include, this.userNotes);
await this.cache.set(hash, cachedAuthorTest, {ttl: this.filterCriteriaTTL});
return cachedAuthorTest;
}
return cachedAuthorTest;
}
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
@@ -778,7 +779,7 @@ export class SubredditResources {
const cachedItem = await this.cache.get(hash);
if (cachedItem !== undefined && cachedItem !== null) {
this.logger.debug(`Cache Hit: Item Check on ${item.name} (Hash ${hash})`);
return cachedItem as boolean;
//return cachedItem as boolean;
}
const itemResult = await this.isItem(item, states, this.logger);
this.stats.cache.itemCrit.miss++;
@@ -872,7 +873,7 @@ export class SubredditResources {
if (crit[k] !== undefined) {
switch (k) {
case 'submissionState':
if(!(item instanceof Comment)) {
if(isSubmission(item)) {
log.warn('`submissionState` is not allowed in `itemIs` criteria when the main Activity is a Submission');
continue;
}
@@ -991,7 +992,7 @@ export class SubredditResources {
}
break;
case 'op':
if(item instanceof Submission) {
if(isSubmission(item)) {
log.warn(`On a Submission the 'op' property will always be true. Did you mean to use this on a comment instead?`);
break;
}
@@ -1003,7 +1004,7 @@ export class SubredditResources {
}
break;
case 'depth':
if(item instanceof Submission) {
if(isSubmission(item)) {
log.warn(`Cannot test for 'depth' on a Submission`);
break;
}
@@ -1015,6 +1016,36 @@ export class SubredditResources {
return false
}
break;
case 'flairTemplate':
case 'link_flair_text':
case 'link_flair_css_class':
if(asSubmission(item)) {
const subCrit = crit as SubmissionState;
let propertyValue: string | null;
if(k === 'flairTemplate') {
propertyValue = await item.link_flair_template_id;
} else {
propertyValue = await item[k];
}
const expectedValues = typeof subCrit[k] === 'string' ? [subCrit[k]] : (subCrit[k] as string[]);
const VALUEPass = () => {
for (const c of expectedValues) {
if (c === propertyValue) {
return true;
}
}
return false;
};
const valueResult = VALUEPass();
if(!valueResult) {
log.debug(`Failed: Expected => ${k} ${expectedValues.join(' OR ')} | Found => ${k}:${propertyValue}`);
return false;
}
break;
} else {
log.warn(`Cannot test for ${k} on Comment`);
break;
}
default:
// @ts-ignore
if (item[k] !== undefined) {
@@ -1296,43 +1327,67 @@ export const checkAuthorFilter = async (item: (Submission | Comment), filter: Au
const authLogger = logger.child({labels: ['Author Filter']}, mergeArr);
const {
include = [],
excludeCondition = 'OR',
excludeCondition = 'AND',
exclude = [],
} = filter;
let authorPass = null;
if (include.length > 0) {
let index = 1;
for (const auth of include) {
if (await resources.testAuthorCriteria(item, auth)) {
authLogger.verbose(`${PASS} => Inclusive author criteria matched`);
authLogger.debug(`Inclusive is always OR => At least one of ${include.length} matched`);
const critResult = await resources.testAuthorCriteria(item, auth);
const [summary, details] = filterCriteriaSummary(critResult);
if (critResult.passed) {
authLogger.verbose(`${PASS} => Inclusive Author Criteria ${index} => ${summary}`);
authLogger.debug(`Criteria Details: \n${details.join('\n')}`);
return [true, 'inclusive'];
} else {
authLogger.debug(`${FAIL} => Inclusive Author Criteria ${index} => ${summary}`);
authLogger.debug(`Criteria Details: \n${details.join('\n')}`);
}
index++;
}
authLogger.verbose(`${FAIL} => Inclusive author criteria not matched`);
authLogger.debug(`Inclusive is always OR => None of ${include.length} criteria matched`);
authLogger.verbose(`${FAIL} => No Inclusive Author Criteria matched`);
return [false, 'inclusive'];
}
if (exclude.length > 0) {
let index = 1;
const summaries: string[] = [];
for (const auth of exclude) {
const excludePass = await resources.testAuthorCriteria(item, auth, false);
if (excludePass && excludeCondition === 'OR') {
authorPass = true;
break;
} else if (!excludePass && excludeCondition === 'AND') {
authorPass = false;
break;
const critResult = await resources.testAuthorCriteria(item, auth, false);
const [summary, details] = filterCriteriaSummary(critResult);
if (critResult.passed) {
if(excludeCondition === 'OR') {
authLogger.verbose(`${PASS} (OR) => Exclusive Author Criteria ${index} => ${summary}`);
authLogger.debug(`Criteria Details: \n${details.join('\n')}`);
authorPass = true;
break;
}
summaries.push(summary);
authLogger.debug(`${PASS} (AND) => Exclusive Author Criteria ${index} => ${summary}`);
authLogger.debug(`Criteria Details: \n${details.join('\n')}`);
} else if (!critResult.passed) {
if(excludeCondition === 'AND') {
authLogger.verbose(`${FAIL} (AND) => Exclusive Author Criteria ${index} => ${summary}`);
authLogger.debug(`Criteria Details: \n${details.join('\n')}`);
authorPass = false;
break;
}
summaries.push(summary);
authLogger.debug(`${FAIL} (OR) => Exclusive Author Criteria ${index} => ${summary}`);
authLogger.debug(`Criteria Details: \n${details.join('\n')}`);
}
index++;
}
if(excludeCondition === 'AND' && authorPass === null) {
authorPass = true;
}
if (authorPass !== true) {
authLogger.verbose(`${FAIL} => Exclusive author criteria not matched`);
if(exclude.length > 1) {
authLogger.debug(excludeCondition === 'OR' ? `Exclusive OR => No criteria from set of ${exclude.length} matched` : `Exclusive AND => At least one of ${exclude.length} criteria did not match`)
if(excludeCondition === 'OR') {
authLogger.verbose(`${FAIL} => Exclusive author criteria not matched => ${summaries.length === 1 ? `${summaries[0]}` : '(many, see debug)'}`);
}
return [false, 'exclusive']
}
authLogger.verbose(`${PASS} => Exclusive author criteria matched`);
if(exclude.length > 1) {
authLogger.debug(excludeCondition === 'OR' ? `Exclusive OR => At least 1 in set of ${exclude.length} matched` : `Exclusive AND => All ${exclude.length} matched`)
} else if(excludeCondition === 'AND') {
authLogger.verbose(`${PASS} => Exclusive author criteria matched => ${summaries.length === 1 ? `${summaries[0]}` : '(many, see debug)'}`);
}
return [true, 'exclusive'];
}

View File

@@ -1,5 +1,5 @@
import dayjs, {Dayjs} from "dayjs";
import {Comment, RedditUser, WikiPage} from "snoowrap";
import Snoowrap, {Comment, RedditUser, WikiPage} from "snoowrap";
import {
COMMENT_URL_ID,
deflateUserNotes, getActivityAuthorName,
@@ -57,7 +57,7 @@ export type UserNotesConstants = Pick<any, "users" | "warnings">;
export class UserNotes {
notesTTL: number | false;
subreddit: Subreddit;
wiki: WikiPage;
client: Snoowrap;
moderators?: RedditUser[];
logger: Logger;
identifier: string;
@@ -70,14 +70,14 @@ export class UserNotes {
debounceCB: any;
batchCount: number = 0;
constructor(ttl: number | boolean, subreddit: Subreddit, logger: Logger, cache: Cache, cacheCB: Function) {
constructor(ttl: number | boolean, subreddit: Subreddit, client: Snoowrap, logger: Logger, cache: Cache, cacheCB: Function) {
this.notesTTL = ttl === true ? 0 : ttl;
this.subreddit = subreddit;
this.logger = logger;
this.wiki = subreddit.getWikiPage('usernotes');
this.identifier = `${this.subreddit.display_name}-usernotes`;
this.cache = cache;
this.cacheCB = cacheCB;
this.client = client;
}
async getUserNotes(user: RedditUser): Promise<UserNote[]> {
@@ -172,8 +172,8 @@ export class UserNotes {
// this.saveDebounce = undefined;
// }
// @ts-ignore
this.wiki = await this.subreddit.getWikiPage('usernotes').fetch();
const wikiContent = this.wiki.content_md;
const wiki = this.client.getSubreddit(this.subreddit.display_name).getWikiPage('usernotes');
const wikiContent = await wiki.content_md;
// TODO don't handle for versions lower than 6
const userNotes = JSON.parse(wikiContent);
@@ -197,6 +197,7 @@ export class UserNotes {
const blob = deflateUserNotes(payload.blob);
const wikiPayload = {text: JSON.stringify({...payload, blob}), reason: 'ContextBot edited usernotes'};
try {
const wiki = this.client.getSubreddit(this.subreddit.display_name).getWikiPage('usernotes');
if (this.notesTTL !== false) {
// DISABLED for now because if it fails throws an uncaught rejection
// and need to figured out how to handle this other than just logging (want to interrupt action flow too?)
@@ -226,12 +227,12 @@ export class UserNotes {
// this.logger.debug(`Saving Usernotes has been debounced for 5 seconds (${this.batchCount} batched)`)
// @ts-ignore
await this.subreddit.getWikiPage('usernotes').edit(wikiPayload);
await wiki.edit(wikiPayload);
await this.cache.set(this.identifier, payload, {ttl: this.notesTTL});
this.users = new Map();
} else {
// @ts-ignore
this.wiki = await this.subreddit.getWikiPage('usernotes').edit(wikiPayload);
await wiki.edit(wikiPayload);
}
return payload as RawUserNotesPayload;

View File

@@ -56,7 +56,7 @@ export const port = new commander.Option('-p, --port <port>', 'Port for web serv
export const sharedMod = new commander.Option('-q, --shareMod', `If enabled then all subreddits using the default settings to poll "unmoderated" or "modqueue" will retrieve results from a shared request to /r/mod (default: process.env.SHARE_MOD || false)`)
.argParser(parseBool);
export const operatorConfig = new commander.Option('-c, --operatorConfig <path>', 'An absolute path to a JSON file to load all parameters from (default: process.env.OPERATOR_CONFIG)');
export const operatorConfig = new commander.Option('-c, --operatorConfig <path>', 'An absolute path to a YAML/JSON file to load all parameters from (default: process.env.OPERATOR_CONFIG | CWD/config.yaml)');
export const getUniversalWebOptions = (): commander.Option[] => {
return [

View File

@@ -1,22 +1,39 @@
import {StatusCodeError, RequestError} from "../Common/interfaces";
import {RateLimitError, RequestError, StatusCodeError} from 'snoowrap/dist/errors';
import ExtendableError from "es6-error";
import {ErrorWithCause} from "pony-cause";
export const isRateLimitError = (err: any) => {
return typeof err === 'object' && err.name === 'RateLimitError';
export const isRateLimitError = (err: any): err is RateLimitError => {
return isRequestError(err) && err.name === 'RateLimitError';
}
export const isScopeError = (err: any): boolean => {
if(typeof err === 'object' && err.name === 'StatusCodeError' && err.response !== undefined) {
if(isStatusError(err)) {
const authHeader = err.response.headers['www-authenticate'];
return authHeader !== undefined && authHeader.includes('insufficient_scope');
}
return false;
}
export const getScopeError = (err: any): string | undefined => {
if(isScopeError(err)) {
return err.response.headers['www-authenticate'];
}
return undefined;
}
export const isStatusError = (err: any): err is StatusCodeError => {
return typeof err === 'object' && err.name === 'StatusCodeError' && err.response !== undefined;
return isRequestError(err) && err.name === 'StatusCodeError';
}
export const isRequestError = (err: any): err is RequestError => {
return typeof err === 'object' && err.name === 'RequestError' && err.response !== undefined;
return typeof err === 'object' && err.response !== undefined;
}
export class SimpleError extends ExtendableError {
}
export class CMError extends ErrorWithCause {
logged: boolean = false;
}

View File

@@ -1,7 +0,0 @@
import ExtendableError from "es6-error";
class SimpleError extends ExtendableError {
}
export default SimpleError;

View File

@@ -8,30 +8,31 @@ import he from "he";
import {RuleResult, UserNoteCriteria} from "../Rule";
import {
ActivityWindowType, CommentState, DomainInfo,
DurationVal,
DurationVal, FilterCriteriaPropertyResult, FilterCriteriaResult,
SubmissionState,
TypedActivityStates
} from "../Common/interfaces";
import {
asUserNoteCriteria,
compareDurationValue,
comparisonTextOp, escapeRegex, getActivityAuthorName,
isActivityWindowCriteria,
comparisonTextOp, escapeRegex, formatNumber, getActivityAuthorName,
isActivityWindowCriteria, isUserNoteCriteria,
normalizeName,
parseDuration,
parseDurationComparison,
parseGenericValueComparison,
parseGenericValueOrPercentComparison,
parseRuleResultsToMarkdownSummary, parseStringToRegex,
parseSubredditName,
truncateStringToLength, windowToActivityWindowCriteria
parseSubredditName, removeUndefinedKeys,
truncateStringToLength, userNoteCriteriaSummary, windowToActivityWindowCriteria
} from "../util";
import UserNotes from "../Subreddit/UserNotes";
import {Logger} from "winston";
import InvalidRegexError from "./InvalidRegexError";
import SimpleError from "./SimpleError";
import {AuthorCriteria} from "../Author/Author";
import {URL} from "url";
import {isStatusError} from "./Errors";
import {SimpleError, isStatusError} from "./Errors";
import {Dictionary, ElementOf, SafeDictionary} from "ts-essentials";
export const BOT_LINK = 'https://www.reddit.com/r/ContextModBot/comments/otz396/introduction_to_contextmodbot';
@@ -354,24 +355,54 @@ export const renderContent = async (template: string, data: (Submission | Commen
return he.decode(rendered);
}
export const testAuthorCriteria = async (item: (Comment | Submission), authorOpts: AuthorCriteria, include = true, userNotes: UserNotes) => {
const {shadowBanned, ...rest} = authorOpts;
type AuthorCritPropHelper = SafeDictionary<FilterCriteriaPropertyResult<AuthorCriteria>, keyof AuthorCriteria>;
type RequiredAuthorCrit = Required<AuthorCriteria>;
if(shadowBanned !== undefined) {
export const testAuthorCriteria = async (item: (Comment | Submission), authorOpts: AuthorCriteria, include = true, userNotes: UserNotes): Promise<FilterCriteriaResult<AuthorCriteria>> => {
const definedAuthorOpts = (removeUndefinedKeys(authorOpts) as RequiredAuthorCrit);
const propResultsMap = Object.entries(definedAuthorOpts).reduce((acc: AuthorCritPropHelper, [k, v]) => {
const key = (k as keyof AuthorCriteria);
let ex;
if (Array.isArray(v)) {
ex = v.map(x => {
if (asUserNoteCriteria(x)) {
return userNoteCriteriaSummary(x);
}
return x;
});
} else {
ex = [v];
}
acc[key] = {
property: key,
expected: ex,
behavior: include ? 'include' : 'exclude',
};
return acc;
}, {});
const {shadowBanned} = authorOpts;
if (shadowBanned !== undefined) {
try {
// @ts-ignore
await item.author.fetch();
// user is not shadowbanned
// if criteria specifies they SHOULD be shadowbanned then return false now
if(shadowBanned) {
return false;
if (shadowBanned) {
propResultsMap.shadowBanned!.found = false;
propResultsMap.shadowBanned!.passed = false;
}
} catch (err: any) {
if(isStatusError(err) && err.statusCode === 404) {
if (isStatusError(err) && err.statusCode === 404) {
// user is shadowbanned
// if criteria specifies they should not be shadowbanned then return false now
if(!shadowBanned) {
return false;
if (!shadowBanned) {
propResultsMap.shadowBanned!.found = true;
propResultsMap.shadowBanned!.passed = false;
}
} else {
throw err;
@@ -379,17 +410,30 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
}
}
try {
const authorName = getActivityAuthorName(item.author);
for (const k of Object.keys(rest)) {
// @ts-ignore
if (authorOpts[k] !== undefined) {
if (propResultsMap.shadowBanned === undefined || propResultsMap.shadowBanned.passed === undefined) {
try {
const authorName = getActivityAuthorName(item.author);
const keys = Object.keys(propResultsMap) as (keyof AuthorCriteria)[]
let shouldContinue = true;
for (const k of keys) {
if (k === 'shadowBanned') {
// we have already taken care of this with shadowban check above
continue;
}
const authorOptVal = definedAuthorOpts[k];
//if (authorOpts[k] !== undefined) {
switch (k) {
case 'name':
const nameVal = authorOptVal as RequiredAuthorCrit['name'];
const authPass = () => {
// @ts-ignore
for (const n of authorOpts[k]) {
for (const n of nameVal) {
if (n.toLowerCase() === authorName.toLowerCase()) {
return true;
}
@@ -397,8 +441,10 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
return false;
}
const authResult = authPass();
if ((include && !authResult) || (!include && authResult)) {
return false;
propResultsMap.name!.found = authorName;
propResultsMap.name!.passed = !((include && !authResult) || (!include && authResult));
if (!propResultsMap.name!.passed) {
shouldContinue = false;
}
break;
case 'flairCssClass':
@@ -413,8 +459,10 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
return false;
}
const cssResult = cssPass();
if ((include && !cssResult) || (!include && cssResult)) {
return false;
propResultsMap.flairCssClass!.found = css;
propResultsMap.flairCssClass!.passed = !((include && !cssResult) || (!include && cssResult));
if (!propResultsMap.flairCssClass!.passed) {
shouldContinue = false;
}
break;
case 'flairText':
@@ -429,68 +477,103 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
return false;
};
const textResult = textPass();
if ((include && !textResult) || (!include && textResult)) {
propResultsMap.flairText!.found = text;
propResultsMap.flairText!.passed = !((include && !textResult) || (!include && textResult));
if (!propResultsMap.flairText!.passed) {
shouldContinue = false;
}
break;
case 'flairTemplate':
const templateId = await item.author_flair_template_id;
const templatePass = () => {
// @ts-ignore
for (const c of authorOpts[k]) {
if (c === templateId) {
return true;
}
}
return false;
};
const templateResult = templatePass();
propResultsMap.flairTemplate!.found = templateId;
propResultsMap.flairTemplate!.passed = !((include && !templateResult) || (!include && templateResult));
if (!propResultsMap.flairTemplate!.passed) {
shouldContinue = false;
}
break;
case 'isMod':
const mods: RedditUser[] = await item.subreddit.getModerators();
const isModerator = mods.some(x => x.name === authorName);
const modMatch = authorOpts.isMod === isModerator;
if ((include && !modMatch) || (!include && modMatch)) {
return false;
propResultsMap.isMod!.found = isModerator;
propResultsMap.isMod!.passed = !((include && !modMatch) || (!include && modMatch));
if (!propResultsMap.isMod!.passed) {
shouldContinue = false;
}
break;
case 'age':
const ageTest = compareDurationValue(parseDurationComparison(await authorOpts.age as string), dayjs.unix(await item.author.created));
if ((include && !ageTest) || (!include && ageTest)) {
return false;
const authorAge = dayjs.unix(await item.author.created);
const ageTest = compareDurationValue(parseDurationComparison(await authorOpts.age as string), authorAge);
propResultsMap.age!.found = authorAge.fromNow(true);
propResultsMap.age!.passed = !((include && !ageTest) || (!include && ageTest));
if (!propResultsMap.age!.passed) {
shouldContinue = false;
}
break;
case 'linkKarma':
// @ts-ignore
const tk = await item.author.total_karma as number;
const lkCompare = parseGenericValueOrPercentComparison(await authorOpts.linkKarma as string);
let lkMatch;
if (lkCompare.isPercent) {
// @ts-ignore
const tk = await item.author.total_karma as number;
lkMatch = comparisonTextOp(item.author.link_karma / tk, lkCompare.operator, lkCompare.value / 100);
} else {
lkMatch = comparisonTextOp(item.author.link_karma, lkCompare.operator, lkCompare.value);
}
if ((include && !lkMatch) || (!include && lkMatch)) {
return false;
propResultsMap.linkKarma!.found = tk;
propResultsMap.linkKarma!.passed = !((include && !lkMatch) || (!include && lkMatch));
if (!propResultsMap.linkKarma!.passed) {
shouldContinue = false;
}
break;
case 'commentKarma':
// @ts-ignore
const ck = await item.author.total_karma as number;
const ckCompare = parseGenericValueOrPercentComparison(await authorOpts.commentKarma as string);
let ckMatch;
if (ckCompare.isPercent) {
// @ts-ignore
const ck = await item.author.total_karma as number;
ckMatch = comparisonTextOp(item.author.comment_karma / ck, ckCompare.operator, ckCompare.value / 100);
} else {
ckMatch = comparisonTextOp(item.author.comment_karma, ckCompare.operator, ckCompare.value);
}
if ((include && !ckMatch) || (!include && ckMatch)) {
return false;
propResultsMap.commentKarma!.found = ck;
propResultsMap.commentKarma!.passed = !((include && !ckMatch) || (!include && ckMatch));
if (!propResultsMap.commentKarma!.passed) {
shouldContinue = false;
}
break;
case 'totalKarma':
// @ts-ignore
const totalKarma = await item.author.total_karma as number;
const tkCompare = parseGenericValueComparison(await authorOpts.totalKarma as string);
if (tkCompare.isPercent) {
throw new SimpleError(`'totalKarma' value on AuthorCriteria cannot be a percentage`);
}
// @ts-ignore
const totalKarma = await item.author.total_karma as number;
const tkMatch = comparisonTextOp(totalKarma, tkCompare.operator, tkCompare.value);
if ((include && !tkMatch) || (!include && tkMatch)) {
return false;
propResultsMap.totalKarma!.found = totalKarma;
propResultsMap.totalKarma!.passed = !((include && !tkMatch) || (!include && tkMatch));
if (!propResultsMap.totalKarma!.passed) {
shouldContinue = false;
}
break;
case 'verified':
const vMatch = await item.author.has_verified_mail === authorOpts.verified as boolean;
if ((include && !vMatch) || (!include && vMatch)) {
return false;
const verified = await item.author.has_verified_mail;
const vMatch = verified === authorOpts.verified as boolean;
propResultsMap.verified!.found = verified;
propResultsMap.verified!.passed = !((include && !vMatch) || (!include && vMatch));
if (!propResultsMap.verified!.passed) {
shouldContinue = false;
}
break;
case 'description':
@@ -498,25 +581,32 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
const desc = await item.author.subreddit?.display_name.public_description;
const dVals = authorOpts[k] as string[];
let passed = false;
for(const val of dVals) {
let passReg;
for (const val of dVals) {
let reg = parseStringToRegex(val, 'i');
if(reg === undefined) {
if (reg === undefined) {
reg = parseStringToRegex(`/.*${escapeRegex(val.trim())}.*/`, 'i');
if(reg === undefined) {
if (reg === undefined) {
throw new SimpleError(`Could not convert 'description' value to a valid regex: ${authorOpts[k] as string}`);
}
}
if(reg.test(desc)) {
if (reg.test(desc)) {
passed = true;
passReg = reg.toString();
break;
}
}
if(!passed) {
return false;
propResultsMap.description!.found = typeof desc === 'string' ? truncateStringToLength(50)(desc) : desc;
propResultsMap.description!.passed = !((include && !passed) || (!include && passed));
if (!propResultsMap.description!.passed) {
shouldContinue = false;
} else {
propResultsMap.description!.reason = `Matched with: ${passReg as string}`;
}
break;
case 'userNotes':
const notes = await userNotes.getUserNotes(item.author);
let foundNoteResult: string[] = [];
const notePass = () => {
for (const noteCriteria of authorOpts[k] as UserNoteCriteria[]) {
const {count = '>= 1', search = 'current', type} = noteCriteria;
@@ -529,8 +619,14 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
const order = extra.includes('asc') ? 'ascending' : 'descending';
switch (search) {
case 'current':
if (notes.length > 0 && notes[notes.length - 1].noteType === type) {
return true;
if (notes.length > 0) {
const currentNoteType = notes[notes.length - 1].noteType;
foundNoteResult.push(`Current => ${currentNoteType}`);
if (currentNoteType === type) {
return true;
}
} else {
foundNoteResult.push('No notes present');
}
break;
case 'consecutive':
@@ -549,39 +645,64 @@ export const testAuthorCriteria = async (item: (Comment | Submission), authorOpt
if (isPercent) {
throw new SimpleError(`When comparing UserNotes with 'consecutive' search 'count' cannot be a percentage. Given: ${count}`);
}
foundNoteResult.push(`Found ${currCount} ${type} consecutively`);
if (comparisonTextOp(currCount, operator, value)) {
return true;
}
}
break;
case 'total':
const filteredNotes = notes.filter(x => x.noteType === type);
if (isPercent) {
if (comparisonTextOp(notes.filter(x => x.noteType === type).length / notes.length, operator, value / 100)) {
// avoid divide by zero
const percent = notes.length === 0 ? 0 : filteredNotes.length / notes.length;
foundNoteResult.push(`${formatNumber(percent)}% are ${type}`);
if (comparisonTextOp(percent, operator, value / 100)) {
return true;
}
} else {
foundNoteResult.push(`${filteredNotes.length} are ${type}`);
if (comparisonTextOp(notes.filter(x => x.noteType === type).length, operator, value)) {
return true;
}
} else if (comparisonTextOp(notes.filter(x => x.noteType === type).length, operator, value)) {
return true;
}
break;
}
}
return false;
}
const noteResult = notePass();
if ((include && !noteResult) || (!include && noteResult)) {
return false;
propResultsMap.userNotes!.found = foundNoteResult.join(' | ');
propResultsMap.userNotes!.passed = !((include && !noteResult) || (!include && noteResult));
if (!propResultsMap.userNotes!.passed) {
shouldContinue = false;
}
break;
}
//}
if (!shouldContinue) {
break;
}
}
} catch (err: any) {
if (isStatusError(err) && err.statusCode === 404) {
throw new SimpleError('Reddit returned a 404 while trying to retrieve User profile. It is likely this user is shadowbanned.');
} else {
throw err;
}
}
return true;
} catch (err: any) {
if(isStatusError(err) && err.statusCode === 404) {
throw new SimpleError('Reddit returned a 404 while trying to retrieve User profile. It is likely this user is shadowbanned.');
} else {
throw err;
}
}
// gather values and determine overall passed
const propResults = Object.values(propResultsMap);
const passed = propResults.filter(x => typeof x.passed === 'boolean').every(x => x.passed === true);
return {
behavior: include ? 'include' : 'exclude',
criteria: authorOpts,
propertyResults: propResults,
passed,
};
}
export interface ItemContent {

View File

@@ -1,19 +1,28 @@
import {labelledFormat, logLevels} from "../util";
import winston, {Logger} from "winston";
import {DuplexTransport} from "winston-duplex";
import {LoggerFactoryOptions} from "../Common/interfaces";
import process from "process";
import path from "path";
const {transports} = winston;
export const getLogger = (options: any, name = 'app'): Logger => {
export const getLogger = (options: LoggerFactoryOptions, name = 'app'): Logger => {
if(!winston.loggers.has(name)) {
const {
path,
level,
additionalTransports = [],
defaultLabel = 'App',
file: {
dirname,
...fileRest
},
console,
stream
} = options || {};
const consoleTransport = new transports.Console({
...console,
handleExceptions: true,
handleRejections: true,
});
@@ -28,21 +37,39 @@ export const getLogger = (options: any, name = 'app'): Logger => {
objectMode: true,
},
name: 'duplex',
dump: false,
handleExceptions: true,
handleRejections: true,
...stream,
dump: false,
}),
...additionalTransports,
];
if (path !== undefined && path !== '') {
if (dirname !== undefined && dirname !== '' && dirname !== null) {
let realDir: string | undefined;
if(typeof dirname === 'boolean') {
if(!dirname) {
realDir = undefined;
} else {
realDir = path.resolve(__dirname, '../../logs')
}
} else if(dirname === 'true') {
realDir = path.resolve(__dirname, '../../logs')
} else if(dirname === 'false') {
realDir = undefined;
} else {
realDir = dirname;
}
const rotateTransport = new winston.transports.DailyRotateFile({
dirname: path,
createSymlink: true,
symlinkName: 'contextBot-current.log',
filename: 'contextBot-%DATE%.log',
datePattern: 'YYYY-MM-DD',
maxSize: '5m',
dirname: realDir,
...fileRest,
handleExceptions: true,
handleRejections: true,
});

View File

@@ -9,10 +9,10 @@ import {Strategy as CustomStrategy} from 'passport-custom';
import {OperatorConfig, BotConnection, LogInfo} from "../../Common/interfaces";
import {
buildCachePrefix,
createCacheManager, filterLogBySubreddit,
createCacheManager, defaultFormat, filterLogBySubreddit,
formatLogLineToHtml,
intersect, isLogLineMinLevel,
LogEntry, parseFromJsonOrYamlToObject, parseInstanceLogInfoName, parseInstanceLogName,
LogEntry, parseInstanceLogInfoName, parseInstanceLogName, parseRedditEntity,
parseSubredditLogName, permissions,
randomId, sleep, triggeredIndicator
} from "../../util";
@@ -24,7 +24,6 @@ import EventEmitter from "events";
import stream, {Readable, Writable, Transform} from "stream";
import winston from "winston";
import tcpUsed from "tcp-port-used";
import SimpleError from "../../Utils/SimpleError";
import http from "http";
import jwt from 'jsonwebtoken';
import {Server as SocketServer} from "socket.io";
@@ -47,6 +46,10 @@ import Autolinker from "autolinker";
import path from "path";
import {ExtendedSnoowrap} from "../../Utils/SnoowrapClients";
import ClientUser from "../Common/User/ClientUser";
import {BotStatusResponse} from "../Common/interfaces";
import {TransformableInfo} from "logform";
import {SimpleError} from "../../Utils/Errors";
import {ErrorWithCause} from "pony-cause";
const emitter = new EventEmitter();
@@ -318,32 +321,36 @@ const webClient = async (options: OperatorConfig) => {
});
// @ts-ignore
const user = await client.getMe();
const userName = `u/${user.name}`;
// @ts-ignore
await webCache.del(`invite:${req.session.inviteId}`);
let data: any = {
accessToken: client.accessToken,
refreshToken: client.refreshToken,
userName,
};
if(invite.instance !== undefined) {
const bot = cmInstances.find(x => x.friendly === invite.instance);
if(bot !== undefined) {
const botPayload: any = {
overwrite: invite.overwrite === true,
name: userName,
credentials: {
accessToken: client.accessToken,
refreshToken: client.refreshToken,
clientId: invite.clientId,
clientSecret: invite.clientSecret,
reddit: {
accessToken: client.accessToken,
refreshToken: client.refreshToken,
clientId: invite.clientId,
clientSecret: invite.clientSecret,
}
}
};
if(invite.subreddit !== undefined) {
botPayload.subreddits = {names: [invite.subreddit]};
if(invite.subreddits !== undefined && invite.subreddits.length > 0) {
botPayload.subreddits = {names: invite.subreddits};
}
const botAddResult: any = await addBot(bot, {name: invite.creator}, botPayload);
let msg = botAddResult.success ? 'Bot successfully added to running instance' : 'An error occurred while adding the bot to the instance';
if(botAddResult.success) {
msg = `${msg}. ${botAddResult.stored === false ? 'Additionally, the bot was not stored in config so the operator will need to add it manually to persist after a restart.' : ''}`;
}
data.addResult = msg;
// stored
// success
data = {...data, ...botAddResult};
// @ts-ignore
req.session.destroy();
req.logout();
@@ -388,12 +395,13 @@ const webClient = async (options: OperatorConfig) => {
let token = randomId();
interface InviteData {
permissions: string[],
subreddit?: string,
subreddits?: string,
instance?: string,
clientId: string
clientSecret: string
redirectUri: string
creator: string
overwrite?: boolean
}
const helperAuthed = async (req: express.Request, res: express.Response, next: Function) => {
@@ -421,7 +429,8 @@ const webClient = async (options: OperatorConfig) => {
redirectUri,
clientId,
clientSecret,
token: req.isAuthenticated() && req.user?.clientData?.webOperator ? token : undefined
token: req.isAuthenticated() && req.user?.clientData?.webOperator ? token : undefined,
instances: cmInstances.filter(x => req.user?.isInstanceOperator(x)).map(x => x.friendly),
});
});
@@ -449,7 +458,7 @@ const webClient = async (options: OperatorConfig) => {
clientSecret: ce,
redirect: redir,
instance,
subreddit,
subreddits,
code,
} = req.body as any;
@@ -474,7 +483,7 @@ const webClient = async (options: OperatorConfig) => {
clientSecret: (ce || clientSecret).trim(),
redirectUri: redir.trim(),
instance,
subreddit,
subreddits: subreddits.trim() === '' ? [] : subreddits.split(',').map((x: string) => parseRedditEntity(x).name),
creator: (req.user as Express.User).name,
}, {ttl: invitesMaxAge * 1000});
return res.send(inviteId);
@@ -519,6 +528,8 @@ const webClient = async (options: OperatorConfig) => {
const cmInstances: CMInstance[] = [];
let init = false;
const formatter = defaultFormat();
const formatTransform = formatter.transform as (info: TransformableInfo, opts?: any) => TransformableInfo;
let server: http.Server,
io: SocketServer;
@@ -559,7 +570,9 @@ const webClient = async (options: OperatorConfig) => {
limit: sessionData.limit,
sort: sessionData.sort,
level: sessionData.level,
stream: true
stream: true,
streamObjects: true,
formatted: false,
}
});
@@ -583,8 +596,24 @@ const webClient = async (options: OperatorConfig) => {
}
});
delim.on('data', (c: any) => {
io.to(sessionId).emit('log', formatLogLineToHtml(c.toString()));
const logObj = JSON.parse(c) as LogInfo;
let subredditMessage;
let allMessage;
if(logObj.subreddit !== undefined) {
const {subreddit, bot, ...rest} = logObj
// @ts-ignore
subredditMessage = formatLogLineToHtml(formatter.transform(rest)[MESSAGE], rest.timestamp);
}
if(logObj.bot !== undefined) {
const {bot, ...rest} = logObj
// @ts-ignore
allMessage = formatLogLineToHtml(formatter.transform(rest)[MESSAGE], rest.timestamp);
}
// @ts-ignore
let formattedMessage = formatLogLineToHtml(formatter.transform(logObj)[MESSAGE], logObj.timestamp);
io.to(sessionId).emit('log', {...logObj, subredditMessage, allMessage, formattedMessage});
});
gotStream.once('retry', retryFn);
@@ -602,9 +631,7 @@ const webClient = async (options: OperatorConfig) => {
server = await app.listen(port);
io = new SocketServer(server);
} catch (err: any) {
logger.error('Error occurred while initializing web or socket.io server', err);
err.logged = true;
throw err;
throw new ErrorWithCause('[Web] Error occurred while initializing web or socket.io server', {cause: err});
}
logger.info(`Web UI started: http://localhost:${port}`, {label: ['Web']});
@@ -816,12 +843,39 @@ const webClient = async (options: OperatorConfig) => {
// return acc.concat({...curr, isOperator: instanceOperator});
// },[]);
const isOp = req.user?.isInstanceOperator(instance);
res.render('status', {
instances: shownInstances,
bots: resp.bots,
bots: resp.bots.map((x: BotStatusResponse) => {
const {subreddits = []} = x;
const subredditsWithSimpleLogs = subreddits.map(y => {
let transformedLogs: string[];
if(y.name === 'All') {
// only need to remove bot name here
transformedLogs = (y.logs as LogInfo[]).map((z: LogInfo) => {
const {bot, ...rest} = z;
// @ts-ignore
return formatLogLineToHtml(formatter.transform(rest)[MESSAGE] as string, rest.timestamp);
});
} else {
transformedLogs = (y.logs as LogInfo[]).map((z: LogInfo) => {
const {bot, subreddit, ...rest} = z;
// @ts-ignore
return formatLogLineToHtml(formatter.transform(rest)[MESSAGE] as string, rest.timestamp);
});
}
y.logs = transformedLogs;
return y;
});
return {...x, subreddits: subredditsWithSimpleLogs};
}),
botId: (req.instance as CMInstance).friendly,
instanceId: (req.instance as CMInstance).friendly,
isOperator: req.user?.isInstanceOperator(instance),
isOperator: isOp,
system: isOp ? {
logs: resp.system.logs,
} : undefined,
operators: instance.operators.join(', '),
operatorDisplay: instance.operatorDisplay,
logSettings: {
@@ -1077,8 +1131,9 @@ const webClient = async (options: OperatorConfig) => {
try {
const token = createToken(bot, userPayload);
const resp = await got.post(`${bot.normalUrl}/bot`, {
body: botPayload,
body: JSON.stringify(botPayload),
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${token}`,
}
}).json() as object;

View File

@@ -1,5 +1,5 @@
import {RunningState} from "../../Subreddit/Manager";
import {ManagerStats} from "../../Common/interfaces";
import {LogInfo, ManagerStats} from "../../Common/interfaces";
export interface BotStats {
startedAtHuman: string,
@@ -15,7 +15,7 @@ export interface BotStats {
export interface SubredditDataResponse {
name: string
logs: string[]
logs: (string|LogInfo)[]
botState: RunningState
eventsState: RunningState
queueState: RunningState

View File

@@ -5,7 +5,7 @@ import {formatNumber} from "../../util";
import Bot from "../../Bot";
export const opStats = (bot: Bot): BotStats => {
const limitReset = dayjs(bot.client.ratelimitExpiration);
const limitReset = bot.client === undefined ? dayjs() : dayjs(bot.client.ratelimitExpiration);
const nextHeartbeat = bot.nextHeartbeat !== undefined ? bot.nextHeartbeat.local().format('MMMM D, YYYY h:mm A Z') : 'N/A';
const nextHeartbeatHuman = bot.nextHeartbeat !== undefined ? `in ${dayjs.duration(bot.nextHeartbeat.diff(dayjs())).humanize()}` : 'N/A'
return {

View File

@@ -3,6 +3,8 @@ import {BotInstanceConfig} from "../../../../../Common/interfaces";
import {authUserCheck} from "../../../middleware";
import Bot from "../../../../../Bot";
import LoggedError from "../../../../../Utils/LoggedError";
import {open} from 'fs/promises';
import {buildBotConfig} from "../../../../../ConfigBuilder";
const addBot = () => {
@@ -16,9 +18,37 @@ const addBot = () => {
return res.status(401).send("Must be an Operator to use this route");
}
const newBot = new Bot(req.body as BotInstanceConfig, req.botApp.logger);
if (!req.botApp.fileConfig.isWriteable) {
return res.status(409).send('Operator config is not writeable');
}
const {overwrite = false, ...botData} = req.body;
// check if bot is new or overwriting
let existingBot = req.botApp.bots.find(x => x.botAccount === botData.name);
// spin down existing
if (existingBot !== undefined) {
const {
bots: botsFromConfig = []
} = req.botApp.fileConfig.document.toJS();
if(botsFromConfig.length === 0 || botsFromConfig.some(x => x.name !== botData.name)) {
req.botApp.logger.warn('Overwriting existing bot with the same name BUT this bot does not exist in the operator CONFIG FILE. You should check how you have provided config before next start or else this bot may be started twice (once from file, once from arg/env)');
}
await existingBot.destroy('system');
req.botApp.bots.filter(x => x.botAccount !== botData.name);
}
req.botApp.fileConfig.document.addBot(botData);
const handle = await open(req.botApp.fileConfig.document.location as string, 'w');
await handle.writeFile(req.botApp.fileConfig.document.toString());
await handle.close();
const newBot = new Bot(buildBotConfig(botData, req.botApp.config), req.botApp.logger);
req.botApp.bots.push(newBot);
let result: any = {stored: true};
let result: any = {stored: true, success: true};
try {
if (newBot.error !== undefined) {
result.error = newBot.error;
@@ -26,13 +56,14 @@ const addBot = () => {
}
await newBot.testClient();
await newBot.buildManagers();
newBot.runManagers('user').catch((err) => {
newBot.runManagers('system').catch((err) => {
req.botApp.logger.error(`Unexpected error occurred while running Bot ${newBot.botName}. Bot must be re-built to restart`);
if (!err.logged || !(err instanceof LoggedError)) {
req.botApp.logger.error(err);
}
});
} catch (err: any) {
result.success = false;
if (newBot.error === undefined) {
newBot.error = err.message;
result.error = err.message;

View File

@@ -27,7 +27,7 @@ const logs = (subLogMap: Map<string, LogEntry[]>) => {
const userName = req.user?.name as string;
const isOperator = req.user?.isInstanceOperator(req.botApp);
const realManagers = req.botApp.bots.map(x => req.user?.accessibleSubreddits(x).map(x => x.displayLabel)).flat() as string[];
const {level = 'verbose', stream, limit = 200, sort = 'descending', streamObjects = false} = req.query;
const {level = 'verbose', stream, limit = 200, sort = 'descending', streamObjects = false, formatted = true} = req.query;
if (stream) {
const origin = req.header('X-Forwarded-For') ?? req.header('host');
try {
@@ -36,9 +36,16 @@ const logs = (subLogMap: Map<string, LogEntry[]>) => {
const {subreddit: subName} = log;
if (isOperator || (subName !== undefined && (realManagers.includes(subName) || subName.includes(userName)))) {
if(streamObjects) {
res.write(`${JSON.stringify(log)}\r\n`);
} else {
let obj: any = log;
if(!formatted) {
const {[MESSAGE]: fMessage, ...rest} = log;
obj = rest;
}
res.write(`${JSON.stringify(obj)}\r\n`);
} else if(formatted) {
res.write(`${log[MESSAGE]}\r\n`)
} else {
res.write(`${log.message}\r\n`)
}
}
}
@@ -62,11 +69,17 @@ const logs = (subLogMap: Map<string, LogEntry[]>) => {
operator: isOperator,
user: userName,
sort: sort as 'descending' | 'ascending',
limit: Number.parseInt((limit as string))
limit: Number.parseInt((limit as string)),
returnType: 'object',
});
const subArr: any = [];
logs.forEach((v: string[], k: string) => {
subArr.push({name: k, logs: v.join('')});
logs.forEach((v: (string|LogInfo)[], k: string) => {
let logs = v as LogInfo[];
let output: any[] = formatted ? logs : logs.map((x) => {
const {[MESSAGE]: fMessage, ...rest} = x;
return rest;
})
subArr.push({name: k, logs: output});
});
return res.json(subArr);
}

View File

@@ -44,13 +44,15 @@ const status = () => {
bots = req.user?.accessibleBots(req.botApp.bots) as Bot[];
}
const botResponses: BotStatusResponse[] = [];
let index = 1;
for(const b of bots) {
botResponses.push(await botStatResponse(b, req, botLogMap));
botResponses.push(await botStatResponse(b, req, botLogMap, index));
index++;
}
const system: any = {};
if(req.user?.isInstanceOperator(req.botApp)) {
// @ts-ignore
system.logs = filterLogBySubreddit(new Map([['app', systemLogs]]), [], {level, sort, limit, operator: true}).get('app');
system.logs = filterLogBySubreddit(new Map([['app', systemLogs]]), [], {level, sort, limit, operator: true}).get('all');
}
const response = {
bots: botResponses,
@@ -59,7 +61,7 @@ const status = () => {
return res.json(response);
}
const botStatResponse = async (bot: Bot, req: Request, botLogMap: Map<string, Map<string, LogEntry[]>>) => {
const botStatResponse = async (bot: Bot, req: Request, botLogMap: Map<string, Map<string, LogEntry[]>>, index: number) => {
const {
//subreddits = [],
//user: userVal,
@@ -77,7 +79,8 @@ const status = () => {
user,
// @ts-ignore
sort,
limit: Number.parseInt((limit as string))
limit: Number.parseInt((limit as string)),
returnType: 'object'
});
const subManagerData = [];
@@ -295,8 +298,8 @@ const status = () => {
startedAt: bot.startedAt.local().format('MMMM D, YYYY h:mm A Z'),
running: bot.running,
error: bot.error,
account: bot.botAccount as string,
name: bot.botName as string,
account: (bot.botAccount as string) ?? `Bot ${index}`,
name: (bot.botName as string) ?? `Bot ${index}`,
...opStats(bot),
},
subreddits: [allManagerData, ...subManagerData],

View File

@@ -16,9 +16,8 @@ import {
} from "../../util";
import {getLogger} from "../../Utils/loggerFactory";
import LoggedError from "../../Utils/LoggedError";
import {Invokee, LogInfo, OperatorConfig} from "../../Common/interfaces";
import {Invokee, LogInfo, OperatorConfigWithFileContext} from "../../Common/interfaces";
import http from "http";
import SimpleError from "../../Utils/SimpleError";
import {heartbeat} from "./routes/authenticated/applicationRoutes";
import logs from "./routes/authenticated/user/logs";
import status from './routes/authenticated/user/status';
@@ -30,6 +29,8 @@ import Bot from "../../Bot";
import addBot from "./routes/authenticated/user/addBot";
import dayjs from "dayjs";
import ServerUser from "../Common/User/ServerUser";
import {SimpleError} from "../../Utils/Errors";
import {ErrorWithCause} from "pony-cause";
const server = addAsync(express());
server.use(bodyParser.json());
@@ -52,7 +53,7 @@ const botLogMap: Map<string, Map<string, LogEntry[]>> = new Map();
const botSubreddits: Map<string, string[]> = new Map();
const rcbServer = async function (options: OperatorConfig) {
const rcbServer = async function (options: OperatorConfigWithFileContext) {
const {
operator: {
@@ -116,9 +117,7 @@ const rcbServer = async function (options: OperatorConfig) {
httpServer = await server.listen(port);
io = new SocketServer(httpServer);
} catch (err: any) {
logger.error('Error occurred while initializing web or socket.io server', err);
err.logged = true;
throw err;
throw new ErrorWithCause('[Server] Error occurred while initializing web or socket.io server', {cause: err});
}
logger.info(`API started => localhost:${port}`);
@@ -178,8 +177,10 @@ const rcbServer = async function (options: OperatorConfig) {
bots = req.user.accessibleBots(req.botApp.bots);
}
const resp = [];
let index = 1;
for(const b of bots) {
resp.push({name: b.botName, data: await opStats(b)});
resp.push({name: b.botName ?? `Bot ${index}`, data: await opStats(b)});
index++;
}
return res.json(resp);
});
@@ -202,7 +203,7 @@ const rcbServer = async function (options: OperatorConfig) {
server.getAsync('/check', ...actionRoute);
server.getAsync('/addBot', ...addBot());
server.postAsync('/bot', ...addBot());
server.getAsync('/bot/invite', ...getInvitesRoute);

View File

@@ -9,19 +9,29 @@
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Congrats! You did the thing.</div>
<div class="space-y-3">
<div>These are the credentials ContextMod will use to act as your bot, <b><%= userName %></b></div>
<ul class="list-inside list-disc">
<li>Access Token: <b><%= accessToken %></b></li>
<li>Refresh Token: <b><%= refreshToken %></b></li>
</ul>
<% if(locals.addResult !== undefined) { %>
<div>Result of trying to add bot automatically: <%= addResult %></div>
<div>Note: You can revoke ContextMod's access to this account at any time by visiting the <a href="https://www.reddit.com/prefs/apps">reddit app preferences</a> while logged in as the account and clicking the <b>revoke access</b> link under ContextMod</div>
</div>
<div class="text-xl my-4">What Do I Do Now?</div>
<div class="space-y-3">
<% if(locals.stored === true) { %>
<% if(locals.success === true) { %>
<div>Credentials were successfully persisted to the application and the bot was automatically started! You may now <a href="/">login with your normal/moderator account</a> to view the web dashboard where your bot can be monitored.</div>
<% } else { %>
<div>The bot was successfully saved to the application but it could not be started automatically. Please inform the operator so they can restart the application.</div>
<% } %>
<% } else { %>
<div>Bot was not automatically added to an instance and will need to manually appended to configuration...</div>
<div>These credentials were <b>not automatically added</b> to an instance and will need to be <b>manually added by the operator</b>:</div>
<ul class="list-inside list-disc">
<li>If you are a <b>Moderator</b> then copy the above <b>Tokens</b> and pass them on to the Operator of this ContextMod instance.</li>
<li>If you are an <b>Operator</b> copy these somewhere and then restart the application providing these as either arguments, environmental variables, or in a config as described in the <a
href="https://github.com/FoxxMD/context-mod/blob/master/docs/operatorConfiguration.md#defining-configuration">configuration guide</a></li>
</ul>
<% } %>
<div>If you are a <b>Moderator</b> then copy the above <b>Tokens</b> and pass them on to the Operator of this ContextMod instance.</div>
<div>If you are an <b>Operator</b> copy these somewhere and then restart the application providing these as either arguments, environmental variables, or in a json config as described in the <a
href="https://github.com/FoxxMD/context-mod/blob/master/docs/operatorConfiguration.md#defining-configuration">configuration guide</a>
</div>
</div>
</div>
</div>

View File

@@ -46,7 +46,43 @@
class="text-black placeholder-gray-500 rounded mt-2 mb-3 p-2"
placeholder="<%= locals.clientSecret !== undefined ? 'Use Provided Client Secret' : 'Client Secret Not Provided' %>">
</div>
<div class="text-lg text-semibold my-3">3. Select permissions</div>
<div class="text-lg text-semibold my-3">3. Select Instance</div>
<div class="my-2 ml-5">
<div class="space-y-3">
<div>Specify the ContextMod instance to add this bot to.</div>
<select id="instanceSelect" style="max-width:400px;" class="form-select
block
w-full
px-3
py-1.5
text-base
font-normal
text-gray-700
bg-white bg-clip-padding bg-no-repeat
border border-solid border-gray-300
rounded
transition
ease-in-out
m-0
focus:text-gray-700 focus:bg-white focus:border-blue-600 focus:outline-none" aria-label="Default select example">
<% instances.forEach(function (name, index){ %>
<option selected="<%= index === 0 ? 'true' : 'false' %>" value="<%= name %>"><%= name %></option>
<%= name %>
<% }) %>
</select>
</div>
</div>
<div class="text-lg text-semibold my-3">4. Optionally, restrict to Subreddits</div>
<div class="my-2 ml-5">
<div class="space-y-3">
<div>Specify which subreddits, out of all the subreddits the bot moderates, CM should run on.</div>
<div>Subreddits should be seperated with a comma. Leave blank to run on all moderated subreddits</div>
<input id="subreddits" style="max-width:800px; display: block;"
class="text-black placeholder-gray-500 rounded mt-2 mb-3 p-2 w-full"
placeholder="aSubreddit,aSecondSubreddit,aThirdSubreddit">
</div>
</div>
<div class="text-lg text-semibold my-3">5. Select permissions</div>
<div class="my-2 ml-5">
<div class="space-y-3">
<div>These are permissions to allow the bot account to perform these actions, <b>in
@@ -220,6 +256,8 @@
clientSecret: document.querySelector('#clientSecret').value,
code: document.querySelector("#inviteCode").value === '' ? undefined : document.querySelector("#inviteCode").value,
permissions,
instance: document.querySelector('#instanceSelect').value,
subreddits: document.querySelector('#subreddits').value
})
}).then((resp) => {
if(!resp.ok) {

View File

@@ -1,10 +1,19 @@
<div class="space-x-4 py-1 md:px-10 leading-6 font-semibold bg-gray-700">
<div class="container mx-auto">
<% if(locals.bots !== undefined) { %>
<ul id="botTabs" class="inline-flex flex-wrap">
<% if(locals.system !== undefined && locals.system.logs !== undefined) {%>
<li class="my-3 px-3">
<span data-bot="system" class="rounded-md py-2 px-3 tabSelectWrapper">
<a class="tabSelect instance font-normal pointer hover:font-bold" data-bot="system">
System
</a>
</span>
</li>
<% } %>
<% if(locals.bots !== undefined) { %>
<% bots.forEach(function (data){ %>
<li class="my-3 px-3">
<span data-bot="<%= data.system.name %>" class="rounded-md py-2 px-3 tabSelectWrapper">
<span data-bot="<%= data.system.name %>" class="rounded-md py-2 px-3 tabSelectWrapper real">
<a class="tabSelect font-normal pointer hover:font-bold"
data-bot="<%= data.system.name %>">
<%= data.system.name %>
@@ -24,7 +33,7 @@
</span>
</li>
<% } %>
</ul>
<% } %>
</ul>
</div>
</div>

View File

@@ -9,6 +9,19 @@
<div class="grid">
<div class="">
<div class="pb-6 md:px-7">
<% if(isOperator) { %>
<div class="sub" data-bot="system" data-subreddit="All">
<div class="grid grid-cols-1 lg:grid-cols-2 xl:grid-cols-3 2xl:grid-cols-3 gap-5">
</div>
<br/>
<%- include('partials/loadingIcon') %>
<div data-subreddit="All" class="logs font-mono text-sm">
<% system.logs.forEach(function (logEntry){ %>
<%- logEntry %>
<% }) %>
</div>
</div>
<% } %>
<% bots.forEach(function (bot){ %>
<% bot.subreddits.forEach(function (data){ %>
<div class="sub <%= bot.system.running ? '' : 'offline' %>" data-subreddit="<%= data.name %>" data-bot="<%= bot.system.name %>">
@@ -727,23 +740,31 @@
const firstSub = document.querySelectorAll(`[data-bot="${bot}"].sub`)[0];
firstSub.classList.add('active');
let firstSubWrapper;
const firstSubTab = document.querySelector(`ul[data-bot="${bot}"] [data-subreddit="${firstSub.dataset.subreddit}"].tabSelect`);
firstSubTab.classList.add('font-bold', 'no-underline', 'pointer-events-none');
const firstSubWrapper = firstSubTab.closest('.tabSelectWrapper');
//document.querySelector(`[data-subreddit="${subreddit}"][data-bot="${bot}"].sub`).classList.add('active');
if(firstSubTab !== null) {
firstSubTab.classList.add('font-bold', 'no-underline', 'pointer-events-none');
firstSubWrapper = firstSubTab.closest('.tabSelectWrapper');
//document.querySelector(`[data-subreddit="${subreddit}"][data-bot="${bot}"].sub`).classList.add('active');
}
document.querySelectorAll('.tabSelectWrapper').forEach(el => {
el.classList.remove('border-2');
el.classList.add('border');
})
firstSubWrapper.classList.remove('border');
firstSubWrapper.classList.add('border-2');
if(firstSubWrapper !== undefined) {
firstSubWrapper.classList.remove('border');
firstSubWrapper.classList.add('border-2');
}
document.querySelectorAll('[data-bot].subreddit.nestedTabs').forEach(el => {
el.classList.remove('active');
})
document.querySelector(`[data-bot="${bot}"].subreddit.nestedTabs`).classList.add('active');
const nested = document.querySelector(`[data-bot="${bot}"].subreddit.nestedTabs`);
if(nested !== null) {
nested.classList.add('active');
}
const wrapper = e.target.closest('.tabSelectWrapper');//document.querySelector(`[data-subreddit="${subreddit}"].tabSelectWrapper`);
wrapper.classList.remove('border');
@@ -752,7 +773,9 @@
if ('URLSearchParams' in window) {
var searchParams = new URLSearchParams(window.location.search)
searchParams.set("bot", bot);
searchParams.set("sub", firstSub.dataset.subreddit);
if(firstSub.dataset.subreddit !== undefined) {
searchParams.set("sub", firstSub.dataset.subreddit);
}
var newRelativePathQuery = window.location.pathname + '?' + searchParams.toString();
history.pushState(null, '', newRelativePathQuery);
}
@@ -797,7 +820,10 @@
let shownBot = searchParams.get('bot');
if(shownBot === null) {
// show the first bot listed if none is specified
const firstBot = document.querySelector('.tabSelectWrapper[data-bot]');
let firstBot = document.querySelector('.real.tabSelectWrapper[data-bot]');
if(firstBot === null) {
}
if(firstBot !== null) {
shownBot = firstBot.dataset.bot;
searchParams.set('bot', shownBot);
@@ -807,17 +833,27 @@
}
document.querySelector(`[data-bot="${shownBot}"].tabSelect`).classList.add('font-bold', 'no-underline', 'pointer-events-none');
document.querySelector(`ul[data-bot="${shownBot}"] [data-subreddit="${shownSub}"].tabSelect`).classList.add('font-bold', 'no-underline', 'pointer-events-none');
const tabSelect = document.querySelector(`ul[data-bot="${shownBot}"] [data-subreddit="${shownSub}"].tabSelect`);
if(tabSelect !== null) {
tabSelect.classList.add('font-bold', 'no-underline', 'pointer-events-none');
}
document.querySelectorAll('.tabSelectWrapper').forEach(el => el.classList.add('border'));
document.querySelector(`[data-bot="${shownBot}"][data-subreddit="${shownSub}"].sub`).classList.add('active');
const subWrapper = document.querySelector(`ul[data-bot="${shownBot}"] [data-subreddit="${shownSub}"].tabSelectWrapper`);
subWrapper.classList.remove('border');
subWrapper.classList.add('border-2');
if(subWrapper !== null) {
subWrapper.classList.remove('border');
subWrapper.classList.add('border-2');
}
const wrapper = document.querySelector(`[data-bot="${shownBot}"].tabSelectWrapper`);
wrapper.classList.remove('border');
wrapper.classList.add('border-2');
if(wrapper !== null) {
wrapper.classList.remove('border');
wrapper.classList.add('border-2');
}
document.querySelector(`[data-bot="${shownBot}"].subreddit.nestedTabs`).classList.add('active');
const nestedTabs = document.querySelector(`[data-bot="${shownBot}"].subreddit.nestedTabs`);
if(nestedTabs !== null) {
nestedTabs.classList.add('active');
}
document.querySelectorAll('.stats.reloadStats').forEach(el => el.classList.add('hidden'));
document.querySelectorAll('.allStatsToggle').forEach(el => el.classList.add('font-bold', 'no-underline', 'pointer-events-none'));
@@ -825,19 +861,6 @@
<script src="https://cdn.socket.io/4.1.2/socket.io.min.js" integrity="sha384-toS6mmwu70G0fw54EGlWWeA4z3dyJ+dlXBtSURSKN4vyRFOcxd3Bzjj/AoOwY+Rg" crossorigin="anonymous"></script>
<script>
const SUBREDDIT_NAME_LOG_REGEX = /{(.+?)}/;
const BOT_NAME_LOG_REGEX = /~(.+?)~/;
const parseALogName = (reg) => {
return (val) => {
const matches = val.match(reg);
if (matches === null) {
return undefined;
}
return matches[1];
}
}
const parseSubredditLogName = parseALogName(SUBREDDIT_NAME_LOG_REGEX);
const parseBotLogName = parseALogName(BOT_NAME_LOG_REGEX);
let socket = io({
reconnectionAttempts: 5, // bail after 5 attempts
@@ -860,25 +883,29 @@
socket.on("connect", () => {
document.body.classList.add('connected')
socket.on("log", data => {
bufferedBot.set('All', bufferedBot.get('All').concat(data));
const bot = parseBotLogName(data);
const {
subreddit,
bot,
subredditMessage,
allMessage,
formattedMessage
} = data;
if(bot === undefined && subreddit === undefined) {
const sys = bufferedBot.get('system');
if(sys !== undefined) {
sys.set('All', sys.get('All').concat(formattedMessage));
bufferedBot.set('system', sys);
}
}
if(bot !== undefined) {
bufferedBot.set('All', bufferedBot.get('All').concat(allMessage));
const buffBot = bufferedBot.get(bot) || newBufferedLogs();
buffBot.set('All', buffBot.get('All').concat(data));
const sub = parseSubredditLogName(data);
if (sub !== undefined) {
buffBot.set(sub, (buffBot.get(sub) || []).concat(data));
buffBot.set('All', buffBot.get('All').concat(allMessage));
if (subreddit !== undefined) {
buffBot.set(subreddit, (buffBot.get(subreddit) || []).concat(subredditMessage));
}
bufferedBot.set(bot, buffBot);
} else {
bufferedBot.forEach((logs, botName) => {
if(botName === 'All') {
return;
}
logs.set('All', logs.get('All').concat(data));
bufferedBot.set(botName, logs);
});
}

View File

@@ -64,7 +64,8 @@ const program = new Command();
.allowUnknownOption();
runCommand = addOptions(runCommand, getUniversalWebOptions());
runCommand.action(async (interfaceVal, opts) => {
const config = buildOperatorConfigWithDefaults(await parseOperatorConfigFromSources({...opts, mode: interfaceVal}));
const [opConfig, fileConfig] = await parseOperatorConfigFromSources({...opts, mode: interfaceVal});
const config = buildOperatorConfigWithDefaults(opConfig);
const {
mode,
} = config;
@@ -73,7 +74,7 @@ const program = new Command();
await clientServer(config);
}
if(mode === 'all' || mode === 'server') {
await apiServer(config);
await apiServer({...config, fileConfig});
}
} catch (err: any) {
throw err;
@@ -92,9 +93,10 @@ const program = new Command();
checkCommand
.addOption(checks)
.action(async (activityIdentifier, type, botVal, commandOptions = {}) => {
const config = buildOperatorConfigWithDefaults(await parseOperatorConfigFromSources(commandOptions));
const [opConfig, fileConfig] = await parseOperatorConfigFromSources(commandOptions);
const config = buildOperatorConfigWithDefaults(opConfig);
const {checks = []} = commandOptions;
app = new App(config);
app = new App({...config, fileConfig});
let a;
const commentId = commentReg(activityIdentifier);
@@ -168,7 +170,8 @@ const program = new Command();
unmodCommand
.addOption(checks)
.action(async (subreddits = [], botVal, opts = {}) => {
const config = buildOperatorConfigWithDefaults(await parseOperatorConfigFromSources(opts));
const [opConfig, fileConfig] = await parseOperatorConfigFromSources(opts);
const config = buildOperatorConfigWithDefaults(opConfig);
const {checks = []} = opts;
const logger = winston.loggers.get('app');
let bots: Bot[] = [];
@@ -201,10 +204,7 @@ const program = new Command();
} catch (err: any) {
if (!err.logged && !(err instanceof LoggedError)) {
const logger = winston.loggers.get('app');
if(isScopeError(err)) {
logger.error('Reddit responded with a 403 insufficient_scope which means the bot is lacking necessary OAUTH scopes to perform general actions.');
}
const logger = winston.loggers.has('app') ? winston.loggers.get('app') : winston.loggers.get('init');
logger.error(err);
}
process.kill(process.pid, 'SIGTERM');

View File

@@ -1,7 +1,7 @@
import winston, {Logger} from "winston";
import jsonStringify from 'safe-stable-stringify';
import dayjs, {Dayjs, OpUnitType} from 'dayjs';
import {FormattedRuleResult, isRuleSetResult, RulePremise, RuleResult, RuleSetResult} from "./Rule";
import {FormattedRuleResult, isRuleSetResult, RulePremise, RuleResult, RuleSetResult, UserNoteCriteria} from "./Rule";
import deepEqual from "fast-deep-equal";
import {Duration} from 'dayjs/plugin/duration.js';
import Ajv from "ajv";
@@ -15,7 +15,7 @@ import {
ActivityWindowCriteria, ActivityWindowType,
CacheOptions,
CacheProvider,
DurationComparison, DurationVal,
DurationComparison, DurationVal, FilterCriteriaPropertyResult, FilterCriteriaResult,
GenericComparison,
HistoricalStats,
HistoricalStatsDisplay, ImageComparisonResult,
@@ -23,20 +23,18 @@ import {
ImageDetection,
//ImageDownloadOptions,
LogInfo,
NamedGroup,
NamedGroup, OperatorJsonConfig,
PollingOptionsStrong,
RedditEntity,
RedditEntityType,
RegExResult, RepostItem, RepostItemResult,
ResourceStats, SearchAndReplaceRegExp,
StatusCodeError, StringComparisonOptions,
StringComparisonOptions,
StringOperator,
StrongSubredditState,
SubredditState
} from "./Common/interfaces";
import JSON5 from "json5";
import yaml, {JSON_SCHEMA} from "js-yaml";
import SimpleError from "./Utils/SimpleError";
import { Document as YamlDocument } from 'yaml'
import InvalidRegexError from "./Utils/InvalidRegexError";
import {constants, promises} from "fs";
import {cacheOptDefaults} from "./Common/defaults";
@@ -45,20 +43,26 @@ import redisStore from "cache-manager-redis-store";
import crypto from "crypto";
import Autolinker from 'autolinker';
import {create as createMemoryStore} from './Utils/memoryStore';
import {MESSAGE} from "triple-beam";
import {MESSAGE, LEVEL} from "triple-beam";
import {RedditUser} from "snoowrap/dist/objects";
import reRegExp from '@stdlib/regexp-regexp';
import fetch, {Response} from "node-fetch";
import { URL } from "url";
import ImageData from "./Common/ImageData";
import {Sharp, SharpOptions} from "sharp";
// @ts-ignore
import {blockhashData, hammingDistance} from 'blockhash';
import {SetRandomInterval} from "./Common/types";
import {ErrorWithCause, stackWithCauses} from "pony-cause";
import {ConfigFormat, SetRandomInterval} from "./Common/types";
import stringSimilarity from 'string-similarity';
import calculateCosineSimilarity from "./Utils/StringMatching/CosineSimilarity";
import levenSimilarity from "./Utils/StringMatching/levenSimilarity";
import {isRequestError, isStatusError} from "./Utils/Errors";
import {SimpleError, isRateLimitError, isRequestError, isScopeError, isStatusError, CMError} from "./Utils/Errors";
import {parse} from "path";
import JsonConfigDocument from "./Common/Config/JsonConfigDocument";
import YamlConfigDocument from "./Common/Config/YamlConfigDocument";
import AbstractConfigDocument, {ConfigDocumentInterface} from "./Common/Config/AbstractConfigDocument";
import LoggedError from "./Utils/LoggedError";
//import {ResembleSingleCallbackComparisonResult} from "resemblejs";
// want to guess how many concurrent image comparisons we should be doing
@@ -85,35 +89,77 @@ const CWD = process.cwd();
// }
// }
const errorAwareFormat = {
transform: (info: any, opts: any) => {
// don't need to log stack trace if we know the error is just a simple message (we handled it)
const stack = !(info instanceof SimpleError) && !(info.message instanceof SimpleError);
const {name, response, message, stack: errStack, error, statusCode} = info;
if(name === 'StatusCodeError' && response !== undefined && response.headers !== undefined && response.headers['content-type'].includes('html')) {
// reddit returns html even when we specify raw_json in the querystring (via snoowrap)
// which means the html gets set as the message for the error AND gets added to the stack as the message
// and we end up with a h u g e log statement full of noisy html >:(
transform: (einfo: any, {stack = true}: any = {}) => {
const errorSample = error.slice(0, 10);
const messageBeforeIndex = message.indexOf(errorSample);
let newMessage = `Status Error ${statusCode} from Reddit`;
if(messageBeforeIndex > 0) {
newMessage = `${message.slice(0, messageBeforeIndex)} - ${newMessage}`;
}
let cleanStack = errStack;
// because winston logger.child() re-assigns its input to an object ALWAYS the object we recieve here will never actually be of type Error
const includeStack = stack && (!isProbablyError(einfo, 'simpleerror') && !isProbablyError(einfo.message, 'simpleerror'));
// try to get just stacktrace by finding beginning of what we assume is the actual trace
if(errStack) {
cleanStack = `${newMessage}\n${errStack.slice(errStack.indexOf('at new StatusCodeError'))}`;
}
// now put it all together so its nice and clean
info.message = newMessage;
info.stack = cleanStack;
if (!isProbablyError(einfo.message) && !isProbablyError(einfo)) {
return einfo;
}
return errors().transform(info, { stack });
let info: any = {};
if (isProbablyError(einfo)) {
const tinfo = transformError(einfo);
info = Object.assign({}, tinfo, {
// @ts-ignore
level: einfo.level,
// @ts-ignore
[LEVEL]: einfo[LEVEL] || einfo.level,
message: tinfo.message,
// @ts-ignore
[MESSAGE]: tinfo[MESSAGE] || tinfo.message
});
if(includeStack) {
// so we have to create a dummy error and re-assign all error properties from our info object to it so we can get a proper stack trace
const dummyErr = new ErrorWithCause('');
for(const k in tinfo) {
if(dummyErr.hasOwnProperty(k) || k === 'cause') {
// @ts-ignore
dummyErr[k] = tinfo[k];
}
}
// @ts-ignore
info.stack = stackWithCauses(dummyErr);
}
} else {
const err = transformError(einfo.message);
info = Object.assign(einfo, err);
// @ts-ignore
info.message = err.message;
// @ts-ignore
info[MESSAGE] = err.message;
if(includeStack) {
const dummyErr = new ErrorWithCause('');
for(const k in err) {
if(dummyErr.hasOwnProperty(k) || k === 'cause') {
// @ts-ignore
dummyErr[k] = tinfo[k];
}
}
// @ts-ignore
info.stack = stackWithCauses(dummyErr);
}
}
// remove redundant message from stack and make stack causes easier to read
if(info.stack !== undefined) {
let cleanedStack = info.stack.replace(info.message, '');
cleanedStack = `${cleanedStack}`;
cleanedStack = cleanedStack.replaceAll('caused by:', '\ncaused by:');
info.stack = cleanedStack;
}
return info;
}
}
const isProbablyError = (val: any, errName = 'error') => {
return typeof val === 'object' && val.name !== undefined && val.name.toLowerCase().includes(errName);
}
export const PASS = '✔';
export const FAIL = '✘';
@@ -289,13 +335,39 @@ export const mergeArr = (objValue: [], srcValue: []): (any[] | undefined) => {
}
}
export const removeFromSourceIfKeysExistsInDestination = (destinationArray: any[], sourceArray: any[], options: any): any[] => {
// get all keys from objects in destination
const destKeys = destinationArray.reduce((acc: string[], curr) => {
// can only get keys for objects, skip for everything else
if(curr !== null && typeof curr === 'object') {
const keys = Object.keys(curr).map(x => x.toLowerCase());
for(const k of keys) {
if(!acc.includes(k)) {
acc.push(k);
}
}
}
return acc;
}, []);
const sourceItemsToKeep = sourceArray.filter(x => {
if(x !== null && typeof x === 'object') {
const sourceKeys = Object.keys(x).map(x => x.toLowerCase());
// only keep if keys from this object do not appear anywhere in destination items
return intersect(sourceKeys, destKeys).length === 0;
}
// keep if item is not an object since we can't test for keys anyway
return true;
});
return sourceItemsToKeep.concat(destinationArray);
}
export const ruleNamesFromResults = (results: RuleResult[]) => {
return results.map(x => x.name || x.premise.kind).join(' | ')
}
export const triggeredIndicator = (val: boolean | null): string => {
export const triggeredIndicator = (val: boolean | null, nullResultIndicator = '-'): string => {
if(val === null) {
return '-';
return nullResultIndicator;
}
return val ? PASS : FAIL;
}
@@ -312,6 +384,40 @@ export const resultsSummary = (results: (RuleResult|RuleSetResult)[], topLevelCo
//return results.map(x => x.name || x.premise.kind).join(' | ')
}
export const filterCriteriaSummary = (val: FilterCriteriaResult<any>): [string, string[]] => {
// summarize properties relevant to result
const passedProps = {props: val.propertyResults.filter(x => x.passed === true), name: 'Passed'};
const failedProps = {props: val.propertyResults.filter(x => x.passed === false), name: 'Failed'};
const skippedProps = {props: val.propertyResults.filter(x => x.passed === null), name: 'Skipped'};
const dnrProps = {props: val.propertyResults.filter(x => x.passed === undefined), name: 'DNR'};
const propSummary = [passedProps, failedProps];
if (skippedProps.props.length > 0) {
propSummary.push(skippedProps);
}
if (dnrProps.props.length > 0) {
propSummary.push(dnrProps);
}
const propSummaryStrArr = propSummary.map(x => `${x.props.length} ${x.name}${x.props.length > 0 ? ` (${x.props.map(y => y.property as string)})` : ''}`);
return [propSummaryStrArr.join(' | '), val.propertyResults.map(filterCriteriaPropertySummary)]
}
export const filterCriteriaPropertySummary = (val: FilterCriteriaPropertyResult<any>): string => {
let passResult: string;
switch (val.passed) {
case undefined:
passResult = 'DNR'
break;
case null:
case true:
case false:
passResult = triggeredIndicator(val.passed, 'Skipped');
break;
}
const found = val.passed === null || val.passed === undefined ? '' : ` => Found: ${val.found}${val.reason !== undefined ? ` -- ${val.reason}` : ''}${val.behavior === 'exclude' ? ' (Exclude passes when Expected is not Found)' : ''}`;
return `${val.property as string} => ${passResult} => Expected: ${val.expected}${found}`;
}
export const createAjvFactory = (logger: Logger) => {
return new Ajv({logger: logger, verbose: true, strict: "log", allowUnionTypes: true});
}
@@ -458,34 +564,64 @@ export const isActivityWindowCriteria = (val: any): val is ActivityWindowCriteri
return false;
}
export const parseFromJsonOrYamlToObject = (content: string): [object?, Error?, Error?] => {
export interface ConfigToObjectOptions {
location?: string,
jsonDocFunc?: (content: string, location?: string) => AbstractConfigDocument<OperatorJsonConfig>,
yamlDocFunc?: (content: string, location?: string) => AbstractConfigDocument<YamlDocument>
}
export const parseFromJsonOrYamlToObject = (content: string, options?: ConfigToObjectOptions): [ConfigFormat, ConfigDocumentInterface<YamlDocument | object>?, Error?, Error?] => {
let obj;
let configFormat: ConfigFormat = 'yaml';
let jsonErr,
yamlErr;
const likelyType = likelyJson5(content) ? 'json' : 'yaml';
const {
location,
jsonDocFunc = (content: string, location?: string) => new JsonConfigDocument(content, location),
yamlDocFunc = (content: string, location?: string) => new YamlConfigDocument(content, location),
} = options || {};
try {
obj = JSON5.parse(content);
const oType = obj === null ? 'null' : typeof obj;
const jsonObj = jsonDocFunc(content, location);
const output = jsonObj.toJS();
const oType = output === null ? 'null' : typeof output;
if (oType !== 'object') {
jsonErr = new SimpleError(`Parsing as json produced data of type '${oType}' (expected 'object')`);
obj = undefined;
} else {
obj = jsonObj;
configFormat = 'json';
}
} catch (err: any) {
jsonErr = err;
}
if (obj === undefined) {
try {
obj = yaml.load(content, {schema: JSON_SCHEMA, json: true});
const oType = obj === null ? 'null' : typeof obj;
if (oType !== 'object') {
yamlErr = new SimpleError(`Parsing as yaml produced data of type '${oType}' (expected 'object')`);
obj = undefined;
try {
const yamlObj = yamlDocFunc(content, location)
const output = yamlObj.toJS();
const oType = output === null ? 'null' : typeof output;
if (oType !== 'object') {
yamlErr = new SimpleError(`Parsing as yaml produced data of type '${oType}' (expected 'object')`);
obj = undefined;
} else if (obj === undefined && (likelyType !== 'json' || yamlObj.parsed.errors.length === 0)) {
configFormat = 'yaml';
if(yamlObj.parsed.errors.length !== 0) {
yamlErr = new Error(yamlObj.parsed.errors.join('\n'))
} else {
obj = yamlObj;
}
} catch (err: any) {
yamlErr = err;
}
} catch (err: any) {
yamlErr = err;
}
return [obj, jsonErr, yamlErr];
if (obj === undefined) {
configFormat = likelyType;
}
return [configFormat, obj, jsonErr, yamlErr];
}
export const comparisonTextOp = (val1: number, strOp: string, val2: number): boolean => {
@@ -805,6 +941,11 @@ export const createRetryHandler = (opts: RetryOptions, logger: Logger) => {
lastErrorAt = dayjs();
if(isRateLimitError(err)) {
logger.error('Will not retry because error was due to ratelimit exhaustion');
return false;
}
const redditApiError = isRequestError(err) || isStatusError(err);
if(redditApiError) {
@@ -842,6 +983,119 @@ export const createRetryHandler = (opts: RetryOptions, logger: Logger) => {
}
}
type StringReturn = (err:any) => string;
export interface LogMatch {
[key: string | number]: string | StringReturn
}
export interface logExceptionOptions {
context?: string
logIfNotMatched?: boolean
logStackTrace?: boolean
match?: LogMatch
}
export const parseMatchMessage = (err: any, match: LogMatch, matchTypes: (string | number)[], defaultMatch: string): [string, boolean] => {
for(const m of matchTypes) {
if(match[m] !== undefined) {
if(typeof match[m] === 'string') {
return [match[m] as string, true];
}
return [(match[m] as Function)(err), true];
}
}
return [defaultMatch, false];
}
export const getExceptionMessage = (err: any, match: LogMatch = {}): string | undefined => {
let matched = false,
matchMsg;
if (isRequestError(err)) {
if (isRateLimitError(err)) {
([matchMsg, matched] = parseMatchMessage(err, match, ['ratelimit', err.statusCode], 'Ratelimit Exhausted'));
} else if (isScopeError(err)) {
([matchMsg, matched] = parseMatchMessage(err, match, ['scope', err.statusCode], 'Missing OAUTH scope required for this request'));
} else {
([matchMsg, matched] = parseMatchMessage(err, match, [err.statusCode], err.message));
}
} else {
([matchMsg, matched] = parseMatchMessage(err, match, ['any'], err.message));
}
if (matched) {
return matchMsg;
}
}
const _transformError = (err: Error, seen: Set<Error>, matchOptions?: LogMatch) => {
if (!err || !isProbablyError(err)) {
return '';
}
if (seen.has(err)) {
return err;
}
// @ts-ignore
let mOpts = err.matchOptions ?? matchOptions;
if (isRequestError(err)) {
const errMsgParts = [`Reddit responded with a NOT OK status (${err.statusCode})`];
if (err.response.headers['content-type'].includes('html')) {
// reddit returns html even when we specify raw_json in the querystring (via snoowrap)
// which means the html gets set as the message for the error AND gets added to the stack as the message
// and we end up with a h u g e log statement full of noisy html >:(
const {error, statusCode, message, stack: errStack} = err;
const errorSample = (error as unknown as string).slice(0, 10);
const messageBeforeIndex = message.indexOf(errorSample);
let newMessage = `Status Error ${statusCode} from Reddit`;
if (messageBeforeIndex > 0) {
newMessage = `${message.slice(0, messageBeforeIndex)} - ${newMessage}`;
}
let cleanStack = errStack;
// try to get just stacktrace by finding beginning of what we assume is the actual trace
if (errStack) {
cleanStack = `${newMessage}\n${errStack.slice(errStack.indexOf('at new StatusCodeError'))}`;
}
// now put it all together so its nice and clean
err.message = newMessage;
err.stack = cleanStack;
}
const msg = getExceptionMessage(err, mOpts);
if (msg !== undefined) {
errMsgParts.push(msg);
}
// we don't care about stack trace for this error because we know where it came from so truncate to two lines for now...maybe remove all together later
if(err.stack !== undefined) {
err.stack = err.stack.split('\n').slice(0, 2).join('\n');
}
const normalizedError = new ErrorWithCause(errMsgParts.join(' => '), {cause: err});
normalizedError.stack = normalizedError.message;
return normalizedError;
}
// @ts-ignore
const cause = err.cause as unknown;
if (cause !== undefined && cause instanceof Error) {
// @ts-ignore
err.cause = _transformError(cause, seen, mOpts);
}
return err;
}
export const transformError = (err: Error): any => _transformError(err, new Set());
const LABELS_REGEX: RegExp = /(\[.+?])*/g;
export const parseLabels = (log: string): string[] => {
return Array.from(log.matchAll(LABELS_REGEX), m => m[0]).map(x => x.substring(1, x.length - 1));
@@ -887,7 +1141,8 @@ export const isLogLineMinLevel = (log: string | LogInfo, minLevelText: string):
// https://regexr.com/3e6m0
const HYPERLINK_REGEX: RegExp = /(http(s)?:\/\/.)?(www\.)?[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]{2,6}\b([-a-zA-Z0-9@:%_\+.~#?&//=]*)/;
export const formatLogLineToHtml = (log: string | LogInfo) => {
const formattedTime = (short: string, full: string) => `<span class="has-tooltip"><span style="margin-top:35px" class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black space-y-3 p-2 text-left'>${full}</span><span>${short}</span></span>`;
export const formatLogLineToHtml = (log: string | LogInfo, timestamp?: string) => {
const val = typeof log === 'string' ? log : log[MESSAGE];
const logContent = Autolinker.link(val, {
email: false,
@@ -904,7 +1159,16 @@ export const formatLogLineToHtml = (log: string | LogInfo) => {
.replace(/(\s*verbose\s*):/i, '<span class="error purple">$1</span>:')
.replaceAll('\n', '<br />');
//.replace(HYPERLINK_REGEX, '<a target="_blank" href="$&">$&</a>');
return `<div class="logLine">${logContent}</div>`
let line = '';
if(timestamp !== undefined) {
const timeStampReplacement = formattedTime(dayjs(timestamp).format('HH:mm:ss z'), timestamp);
const splitLine = logContent.split(timestamp);
line = `<div class="logLine">${splitLine[0]}${timeStampReplacement}<span style="white-space: pre-wrap">${splitLine[1]}</span></div>`;
} else {
line = `<div style="white-space: pre-wrap" class="logLine">${logContent}</div>`
}
return line;
}
export type LogEntry = [number, LogInfo];
@@ -915,10 +1179,11 @@ export interface LogOptions {
operator?: boolean,
user?: string,
allLogsParser?: Function
allLogName?: string
allLogName?: string,
returnType?: 'string' | 'object'
}
export const filterLogBySubreddit = (logs: Map<string, LogEntry[]>, validLogCategories: string[] = [], options: LogOptions): Map<string, string[]> => {
export const filterLogBySubreddit = (logs: Map<string, LogEntry[]>, validLogCategories: string[] = [], options: LogOptions): Map<string, (string|LogInfo)[]> => {
const {
limit,
level,
@@ -926,7 +1191,8 @@ export const filterLogBySubreddit = (logs: Map<string, LogEntry[]>, validLogCate
operator = false,
user,
allLogsParser = parseSubredditLogInfoName,
allLogName = 'app'
allLogName = 'app',
returnType = 'string',
} = options;
// get map of valid logs categories
@@ -960,13 +1226,18 @@ export const filterLogBySubreddit = (logs: Map<string, LogEntry[]>, validLogCate
const sortFunc = sort === 'ascending' ? (a: LogEntry, b: LogEntry) => a[0] - b[0] : (a: LogEntry, b: LogEntry) => b[0] - a[0];
const preparedMap: Map<string, string[]> = new Map();
const preparedMap: Map<string, (string|LogInfo)[]> = new Map();
// iterate each entry and
// sort, filter by level, slice to limit, then map to html string
for(const [k,v] of validSubMap.entries()) {
let preparedEntries = v.filter(([time, l]) => isLogLineMinLevel(l, level));
preparedEntries.sort(sortFunc);
preparedMap.set(k, preparedEntries.slice(0, limit + 1).map(([time, l]) => formatLogLineToHtml(l)));
const entriesSlice = preparedEntries.slice(0, limit + 1);
if(returnType === 'string') {
preparedMap.set(k, entriesSlice.map(([time, l]) => formatLogLineToHtml(l)));
} else {
preparedMap.set(k, entriesSlice.map(([time, l]) => l));
}
}
@@ -1130,19 +1401,25 @@ export const convertSubredditsRawToStrong = (x: (SubredditState | string), opts:
return toStrongSubredditState(x, opts);
}
export async function readConfigFile(path: string, opts: any) {
export async function readConfigFile(path: string, opts: any): Promise<[string?, ConfigFormat?]> {
const {log, throwOnNotFound = true} = opts;
let extensionHint: ConfigFormat | undefined;
const fileInfo = parse(path);
if(fileInfo.ext !== undefined) {
switch(fileInfo.ext) {
case '.json':
case '.json5':
extensionHint = 'json';
break;
case '.yaml':
extensionHint = 'yaml';
break;
}
}
try {
await promises.access(path, constants.R_OK);
const data = await promises.readFile(path);
const [configObj, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(data as unknown as string);
if(configObj !== undefined) {
return configObj as object;
}
log.error(`Could not parse file contents at ${path} as JSON or YAML:`);
log.error(jsonErr);
log.error(yamlErr);
throw new SimpleError(`Could not parse file contents at ${path} as JSON or YAML`);
return [(data as any).toString(), extensionHint]
} catch (e: any) {
const {code} = e;
if (code === 'ENOENT') {
@@ -1150,14 +1427,16 @@ export async function readConfigFile(path: string, opts: any) {
if (log) {
log.warn('No file found at given path', {filePath: path});
}
e.extension = extensionHint;
throw e;
} else {
return;
return [];
}
} else if (log) {
log.warn(`Encountered error while parsing file`, {filePath: path});
log.error(e);
}
e.extension = extensionHint;
throw e;
}
}
@@ -1166,6 +1445,29 @@ export async function readConfigFile(path: string, opts: any) {
// return (item && typeof item === 'object' && !Array.isArray(item));
// }
export const fileOrDirectoryIsWriteable = async (location: string) => {
const pathInfo = parse(location);
try {
await promises.access(location, constants.R_OK | constants.W_OK);
return true;
} catch (err: any) {
const {code} = err;
if (code === 'ENOENT') {
// file doesn't exist, see if we can write to directory in which case we are good
try {
await promises.access(pathInfo.dir, constants.R_OK | constants.W_OK)
// we can write to dir
return true;
} catch (accessError: any) {
// also can't access directory :(
throw new SimpleError(`No file exists at ${location} and application does not have permission to write to that directory`);
}
} else {
throw new SimpleError(`File exists at ${location} but application does have permission to write to it.`);
}
}
}
export const overwriteMerge = (destinationArray: any[], sourceArray: any[], options: any): any[] => sourceArray;
export const removeUndefinedKeys = (obj: any) => {
@@ -1282,6 +1584,18 @@ export const asSubmission = (value: any): value is Submission => {
return isSubmission(value);
}
export const isUserNoteCriteria = (value: any) => {
return value !== null && typeof value === 'object' && value.type !== undefined;
}
export const asUserNoteCriteria = (value: any): value is UserNoteCriteria => {
return isUserNoteCriteria(value);
}
export const userNoteCriteriaSummary = (val: UserNoteCriteria): string => {
return `${val.count === undefined ? '>= 1' : val.count} of ${val.search === undefined ? 'current' : val.search} notes is ${val.type}`;
}
/**
* Serialized activities store subreddit and user properties as their string representations (instead of proxy)
* */

View File

@@ -5,7 +5,8 @@
"resolveJsonModule": true,
"typeRoots": [
"./node_modules/@types",
"./src/Web/types"
"./src/Web/types",
"./src/Common/typings"
]
},
// "compilerOptions": {