Compare commits

...

13 Commits
0.8.0 ... 0.8.1

Author SHA1 Message Date
FoxxMD
a11b667d5e Merge branch 'edge' 2021-09-13 16:16:55 -04:00
FoxxMD
269b1620b9 fix(regex): fix when to add match sample to summary
Only include summary if there were non-zero matches
2021-09-07 15:04:30 -04:00
FoxxMD
6dee734440 fix(recent activity): fix subreddit summary in result
* Only include if any subreddits with activity found
* Use correct subreddit array
2021-09-07 15:03:56 -04:00
FoxxMD
3aea422eff fix(cache): check manager has cache (valid config) before trying to get actioned events 2021-09-07 14:56:03 -04:00
FoxxMD
e707e5a9a8 fix(manager): revert commented notification line from debugging 2021-09-07 14:07:00 -04:00
FoxxMD
2a24eea3a5 fix(rule): fix regex rule matching default behavior and improve log message
* Default to global flag if none specified so that all matches per activity are found
* Improve result message section ordering and display a sample of up to 4 found matches
2021-09-07 14:06:30 -04:00
FoxxMD
8ad8297c0e feat(rule): improve recent activity result (log message) by listing on subreddits with found activity 2021-09-07 14:04:26 -04:00
FoxxMD
0b94a14ac1 feat(ui): improve actioned events interactions
* Refactor api to get all accessible events, sorted by time, when subreddit is not specified
* Add subreddit name to actioned event data to differentiate between events
* Show actioned events link in "All" subreddit view
* Remove user-select css style (left over from config template)
* Format timestamp to be more human friendly
* Remove success/triggered text and just use checkmarks (same as log)
2021-09-07 13:33:35 -04:00
FoxxMD
a04e0d2a9b fix(cache): Set actioned events not to expire in cache 2021-09-07 13:26:30 -04:00
FoxxMD
3a1348c370 feat(ui): move actioned events link to "Actions Run" statistic
More intuitive location
2021-09-07 12:59:51 -04:00
FoxxMD
507818037f feat(cache): refactor actioned events into cache for persistence and make number stored configurable
* refactor actioned events into bot-configured cache so they can be persisted between restarts
* add config params for actionedEventsMax and actionedEventsDefault to allow defining defaults at operator/bot/subreddit level
2021-09-07 12:55:19 -04:00
FoxxMD
2c1f6daf4f Implement load config from URL for editor 2021-09-01 10:15:46 -04:00
FoxxMD
fef79472fe re-add missing heartbeat and improve bot exception handling
* Missed heartbeat during client-server refactor somehow...oops. Re-add heartbeat behavior
* Refactor nanny functionality to use date check rather than loop -- behaves same as heartbeat now
* use http retry handling in nanny to handle reddit outages
* try-catch on nanny and heartbeat for better exception handling at bot-level
* await health loop so we can catch bot-level exceptions in app to prevent entire app from crashing
2021-08-31 11:02:03 -04:00
18 changed files with 605 additions and 376 deletions

View File

@@ -3,7 +3,6 @@ import dayjs, {Dayjs} from "dayjs";
import {getLogger} from "./Utils/loggerFactory";
import {Invokee, OperatorConfig} from "./Common/interfaces";
import Bot from "./Bot";
import {castArray} from "lodash";
import LoggedError from "./Utils/LoggedError";
export class App {
@@ -53,8 +52,11 @@ export class App {
}
async onTerminate(reason = 'The application was shutdown') {
for(const m of this.bots) {
//await m.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
for(const b of this.bots) {
for(const m of b.subManagers) {
await m.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
}
//await b.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
}
}

View File

@@ -3,7 +3,7 @@ import {Logger} from "winston";
import dayjs, {Dayjs} from "dayjs";
import {Duration} from "dayjs/plugin/duration";
import EventEmitter from "events";
import {BotInstanceConfig, Invokee, PAUSED, RUNNING, SYSTEM} from "../Common/interfaces";
import {BotInstanceConfig, Invokee, PAUSED, RUNNING, STOPPED, SYSTEM, USER} from "../Common/interfaces";
import {
createRetryHandler,
formatNumber,
@@ -33,12 +33,15 @@ class Bot {
excludeSubreddits: string[];
subManagers: Manager[] = [];
heartbeatInterval: number;
nextHeartbeat?: Dayjs;
nextHeartbeat: Dayjs = dayjs();
heartBeating: boolean = false;
softLimit: number | string = 250;
hardLimit: number | string = 50;
nannyMode?: 'soft' | 'hard';
nannyRunning: boolean = false;
nextNannyCheck: Dayjs = dayjs().add(10, 'second');
nannyRetryHandler: Function;
nextExpiration: Dayjs = dayjs();
botName?: string;
botLink?: string;
@@ -179,6 +182,7 @@ class Bot {
}
const retryHandler = createRetryHandler({maxRequestRetry: 8, maxOtherRetry: 1}, this.logger);
this.nannyRetryHandler = createRetryHandler({maxRequestRetry: 5, maxOtherRetry: 1}, this.logger);
const modStreamErrorListener = (name: string) => async (err: any) => {
this.logger.error('Polling error occurred', err);
@@ -322,7 +326,7 @@ class Bot {
async destroy(causedBy: Invokee) {
this.logger.info('Stopping heartbeat and nanny processes, may take up to 5 seconds...');
const processWait = Promise.all([pEvent(this.emitter, 'heartbeatStopped'), pEvent(this.emitter, 'nannyStopped')]);
const processWait = pEvent(this.emitter, 'healthStopped');
this.running = false;
await processWait;
for (const manager of this.subManagers) {
@@ -361,144 +365,210 @@ class Bot {
await this.runModStreams();
this.running = true;
this.runApiNanny();
this.nextNannyCheck = dayjs().add(10, 'second');
this.nextHeartbeat = dayjs().add(this.heartbeatInterval, 'second');
await this.healthLoop();
}
async healthLoop() {
while (this.running) {
await sleep(5000);
if (!this.running) {
break;
}
if (dayjs().isSameOrAfter(this.nextNannyCheck)) {
try {
await this.runApiNanny();
this.nextNannyCheck = dayjs().add(10, 'second');
} catch (err) {
this.logger.info('Delaying next nanny check for 1 minute due to emitted error');
this.nextNannyCheck = dayjs().add(120, 'second');
}
}
if(dayjs().isSameOrAfter(this.nextHeartbeat)) {
try {
await this.heartbeat();
} catch (err) {
this.logger.error(`Error occurred during heartbeat check: ${err.message}`);
}
this.nextHeartbeat = dayjs().add(this.heartbeatInterval, 'second');
}
}
this.emitter.emit('healthStopped');
}
async heartbeat() {
const heartbeat = `HEARTBEAT -- API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ~${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion === undefined ? 'N/A' : this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`
this.logger.info(heartbeat);
for (const s of this.subManagers) {
if(s.botState.state === STOPPED && s.botState.causedBy === USER) {
this.logger.debug('Skipping config check/restart on heartbeat due to previously being stopped by user', {subreddit: s.displayLabel});
continue;
}
try {
const newConfig = await s.parseConfiguration();
if(newConfig || (s.queueState.state !== RUNNING && s.queueState.causedBy === SYSTEM))
{
await s.startQueue('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running queue'});
}
if(newConfig || (s.eventsState.state !== RUNNING && s.eventsState.causedBy === SYSTEM))
{
await s.startEvents('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running events'});
}
if(s.botState.state !== RUNNING && s.eventsState.state === RUNNING && s.queueState.state === RUNNING) {
s.botState = {
state: RUNNING,
causedBy: 'system',
}
}
} catch (err) {
this.logger.info('Stopping event polling to prevent activity processing queue from backing up. Will be restarted when config update succeeds.')
await s.stopEvents('system', {reason: 'Invalid config will cause events to pile up in queue. Will be restarted when config update succeeds (next heartbeat).'});
if(!(err instanceof LoggedError)) {
this.logger.error(err, {subreddit: s.displayLabel});
}
if(this.nextHeartbeat !== undefined) {
this.logger.info(`Will retry parsing config on next heartbeat (in ${dayjs.duration(this.nextHeartbeat.diff(dayjs())).humanize()})`, {subreddit: s.displayLabel});
}
}
}
await this.runModStreams(true);
}
async runApiNanny() {
try {
mainLoop:
while (this.running) {
for(let i = 0; i < 2; i++) {
await sleep(5000);
if (!this.running) {
break mainLoop;
}
}
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
const nowish = dayjs().add(10, 'second');
if (nowish.isAfter(this.nextExpiration)) {
// it's possible no api calls are being made because of a hard limit
// need to make an api call to update this
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
const nowish = dayjs().add(10, 'second');
if (nowish.isAfter(this.nextExpiration)) {
// it's possible no api calls are being made because of a hard limit
// need to make an api call to update this
let shouldRetry = true;
while (shouldRetry) {
try {
// @ts-ignore
await this.client.getMe();
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
}
const rollingSample = this.apiSample.slice(0, 7)
rollingSample.unshift(this.client.ratelimitRemaining);
this.apiSample = rollingSample;
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
if (this.apiSample[index + 1] !== undefined) {
const d = Math.abs(curr - this.apiSample[index + 1]);
if (d === 0) {
return [...acc, 0];
}
return [...acc, d / 10];
shouldRetry = false;
} catch (err) {
shouldRetry = await this.nannyRetryHandler(err);
if (!shouldRetry) {
throw err;
}
return acc;
}, []);
this.apiRollingAvg = diff.reduce((acc, curr) => acc + curr, 0) / diff.length; // api requests per second
this.depletedInSecs = this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
this.apiEstDepletion = dayjs.duration({seconds: this.depletedInSecs});
this.logger.debug(`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`);
let hardLimitHit = false;
if (typeof this.hardLimit === 'string') {
const hardDur = parseDuration(this.hardLimit);
hardLimitHit = hardDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
hardLimitHit = this.hardLimit > this.client.ratelimitRemaining;
}
if (hardLimitHit) {
if (this.nannyMode === 'hard') {
continue;
}
this.logger.info(`Detected HARD LIMIT of ${this.hardLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${this.apiRollingAvg}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info(`All subreddit event polling has been paused`, {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
m.pauseEvents('system');
m.notificationManager.handle('runStateChanged', 'Hard Limit Triggered', `Hard Limit of ${this.hardLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit event polling has been paused.`, 'system', 'warn');
}
this.nannyMode = 'hard';
continue;
}
let softLimitHit = false;
if (typeof this.softLimit === 'string') {
const softDur = parseDuration(this.softLimit);
softLimitHit = softDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
softLimitHit = this.softLimit > this.client.ratelimitRemaining;
}
if (softLimitHit) {
if (this.nannyMode === 'soft') {
continue;
}
this.logger.info(`Detected SOFT LIMIT of ${this.softLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info('Trying to detect heavy usage subreddits...', {leaf: 'Api Nanny'});
let threshold = 0.5;
let offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
if (offenders.length === 0) {
threshold = 0.25;
// reduce threshold
offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
}
if (offenders.length > 0) {
this.logger.info(`Slowing subreddits using >- ${threshold}req/s:`, {leaf: 'Api Nanny'});
for (const m of offenders) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
} else {
this.logger.info(`Couldn't detect specific offenders, slowing all...`, {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
}
this.nannyMode = 'soft';
continue;
}
if (this.nannyMode !== undefined) {
this.logger.info('Turning off due to better conditions...', {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
if (m.delayBy !== undefined) {
m.delayBy = undefined;
m.notificationManager.handle('runStateChanged', 'Normal Processing Resumed', 'Slow Mode has been turned off due to better API conditions', 'system');
}
if (m.queueState.state === PAUSED && m.queueState.causedBy === SYSTEM) {
m.startQueue('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
if (m.eventsState.state === PAUSED && m.eventsState.causedBy === SYSTEM) {
await m.startEvents('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
}
this.nannyMode = undefined;
}
}
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
}
const rollingSample = this.apiSample.slice(0, 7)
rollingSample.unshift(this.client.ratelimitRemaining);
this.apiSample = rollingSample;
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
if (this.apiSample[index + 1] !== undefined) {
const d = Math.abs(curr - this.apiSample[index + 1]);
if (d === 0) {
return [...acc, 0];
}
return [...acc, d / 10];
}
return acc;
}, []);
this.apiRollingAvg = diff.reduce((acc, curr) => acc + curr, 0) / diff.length; // api requests per second
this.depletedInSecs = this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
this.apiEstDepletion = dayjs.duration({seconds: this.depletedInSecs});
this.logger.debug(`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`);
let hardLimitHit = false;
if (typeof this.hardLimit === 'string') {
const hardDur = parseDuration(this.hardLimit);
hardLimitHit = hardDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
hardLimitHit = this.hardLimit > this.client.ratelimitRemaining;
}
if (hardLimitHit) {
if (this.nannyMode === 'hard') {
return;
}
this.logger.info(`Detected HARD LIMIT of ${this.hardLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${this.apiRollingAvg}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info(`All subreddit event polling has been paused`, {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
m.pauseEvents('system');
m.notificationManager.handle('runStateChanged', 'Hard Limit Triggered', `Hard Limit of ${this.hardLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit event polling has been paused.`, 'system', 'warn');
}
this.nannyMode = 'hard';
return;
}
let softLimitHit = false;
if (typeof this.softLimit === 'string') {
const softDur = parseDuration(this.softLimit);
softLimitHit = softDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
softLimitHit = this.softLimit > this.client.ratelimitRemaining;
}
if (softLimitHit) {
if (this.nannyMode === 'soft') {
return;
}
this.logger.info(`Detected SOFT LIMIT of ${this.softLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info('Trying to detect heavy usage subreddits...', {leaf: 'Api Nanny'});
let threshold = 0.5;
let offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
if (offenders.length === 0) {
threshold = 0.25;
// reduce threshold
offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
}
if (offenders.length > 0) {
this.logger.info(`Slowing subreddits using >- ${threshold}req/s:`, {leaf: 'Api Nanny'});
for (const m of offenders) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
} else {
this.logger.info(`Couldn't detect specific offenders, slowing all...`, {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
}
this.nannyMode = 'soft';
return
}
if (this.nannyMode !== undefined) {
this.logger.info('Turning off due to better conditions...', {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
if (m.delayBy !== undefined) {
m.delayBy = undefined;
m.notificationManager.handle('runStateChanged', 'Normal Processing Resumed', 'Slow Mode has been turned off due to better API conditions', 'system');
}
if (m.queueState.state === PAUSED && m.queueState.causedBy === SYSTEM) {
m.startQueue('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
if (m.eventsState.state === PAUSED && m.eventsState.causedBy === SYSTEM) {
await m.startEvents('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
}
this.nannyMode = undefined;
}
} catch (err) {
this.logger.error('Error occurred during nanny loop', err);
this.logger.error(`Error occurred during nanny loop: ${err.message}`);
throw err;
} finally {
this.logger.info('Nanny stopped');
this.emitter.emit('nannyStopped');
}
}
}

View File

@@ -456,6 +456,32 @@ export interface CacheConfig extends TTLConfig {
* To specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`
* */
provider?: CacheProvider | CacheOptions
/**
* The **maximum** number of Events that the cache should store triggered result summaries for
*
* These summaries are viewable through the Web UI.
*
* The value specified by a subreddit cannot be larger than the value set by the Operator for the global/bot config (if set)
*
* @default 25
* @example [25]
* */
actionedEventsMax?: number
}
export interface OperatorCacheConfig extends CacheConfig {
/**
* The **default** number of Events that the cache will store triggered result summaries for
*
* These summaries are viewable through the Web UI.
*
* The value specified cannot be larger than `actionedEventsMax` for the global/bot config (if set)
*
* @default 25
* @example [25]
* */
actionedEventsDefault?: number
}
export interface Footer {
@@ -731,6 +757,8 @@ export type StrongCache = {
commentTTL: number | boolean,
filterCriteriaTTL: number | boolean,
provider: CacheOptions
actionedEventsMax?: number,
actionedEventsDefault: number,
}
/**
@@ -1091,7 +1119,7 @@ export interface BotInstanceJsonConfig {
*
* Every setting not specified will default to what is specified by the global operator caching config
* */
caching?: CacheConfig
caching?: OperatorCacheConfig
/**
* Settings related to managing heavy API usage.
* */
@@ -1204,7 +1232,7 @@ export interface OperatorJsonConfig {
*
* These settings will be used by each bot, and subreddit, that does not specify their own
* */
caching?: CacheConfig
caching?: OperatorCacheConfig
bots?: BotInstanceJsonConfig[]
@@ -1455,6 +1483,7 @@ export interface ActionedEvent {
timestamp: number
check: string
ruleSummary: string,
subreddit: string,
ruleResults: RuleResult[]
actionResults: ActionResult[]
}

View File

@@ -348,9 +348,9 @@ export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
return removeUndefinedKeys(data) as OperatorJsonConfig;
}
const parseListFromEnv = (val: string|undefined) => {
const parseListFromEnv = (val: string | undefined) => {
let listVals: undefined | string[];
if(val === undefined) {
if (val === undefined) {
return listVals;
}
const trimmedVal = val.trim();
@@ -401,7 +401,7 @@ export const parseDefaultBotInstanceFromEnv = (): BotInstanceJsonConfig => {
export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
const data = {
mode: process.env.MODE !== undefined ? process.env.MODE as ('all' | 'server' | 'client') : undefined,
operator: {
operator: {
name: parseListFromEnv(process.env.OPERATOR),
display: process.env.OPERATOR_DISPLAY
},
@@ -504,12 +504,12 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
arrayMerge: overwriteMerge,
}) as BotInstanceJsonConfig;
if(configFromFile.caching !== undefined) {
if (configFromFile.caching !== undefined) {
defaultBotInstance.caching = configFromFile.caching;
}
let botInstances = [];
if(botInstancesFromFile.length === 0) {
if (botInstancesFromFile.length === 0) {
botInstances = [defaultBotInstance];
} else {
botInstances = botInstancesFromFile.map(x => merge.all([defaultBotInstance, x], {arrayMerge: overwriteMerge}));
@@ -555,20 +555,29 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
let cache: StrongCache;
let defaultProvider: CacheOptions;
let opActionedEventsMax: number | undefined;
let opActionedEventsDefault: number = 25;
if(opCache === undefined) {
defaultProvider = {
if (opCache === undefined) {
defaultProvider = {
store: 'memory',
...cacheOptDefaults
};
cache = {
...cacheTTLDefaults,
provider: defaultProvider
provider: defaultProvider,
actionedEventsDefault: opActionedEventsDefault,
};
} else {
const {provider, ...restConfig} = opCache;
if(typeof provider === 'string') {
const {provider, actionedEventsMax, actionedEventsDefault = opActionedEventsDefault, ...restConfig} = opCache;
if (actionedEventsMax !== undefined && actionedEventsMax !== null) {
opActionedEventsMax = actionedEventsMax;
opActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
}
if (typeof provider === 'string') {
defaultProvider = {
store: provider as CacheProvider,
...cacheOptDefaults
@@ -584,112 +593,132 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
cache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsMax: opActionedEventsMax,
actionedEventsDefault: opActionedEventsDefault,
provider: defaultProvider,
}
}
let hydratedBots: BotInstanceConfig[] = bots.map(x => {
const {
name: botName,
polling: {
sharedMod = false,
limit = 100,
interval = 30,
} = {},
queue: {
maxWorkers = 1,
} = {},
caching,
nanny: {
softLimit = 250,
hardLimit = 50
} = {},
snoowrap = {},
credentials: {
clientId: ci,
clientSecret: cs,
...restCred
} = {},
subreddits: {
names = [],
exclude = [],
wikiConfig = 'botconfig/contextbot',
dryRun,
heartbeatInterval = 300,
} = {},
} = x;
let hydratedBots: BotInstanceConfig[] = bots.map(x => {
const {
name: botName,
polling: {
sharedMod = false,
limit = 100,
interval = 30,
} = {},
queue: {
maxWorkers = 1,
} = {},
caching,
nanny: {
softLimit = 250,
hardLimit = 50
} = {},
snoowrap = {},
credentials: {
clientId: ci,
clientSecret: cs,
...restCred
} = {},
subreddits: {
names = [],
exclude = [],
wikiConfig = 'botconfig/contextbot',
dryRun,
heartbeatInterval = 300,
} = {},
} = x;
let botCache: StrongCache;
let botCache: StrongCache;
let botActionedEventsDefault: number;
if (caching === undefined) {
if(caching === undefined) {
botCache = {
...cacheTTLDefaults,
provider: {
store: 'memory',
...cacheOptDefaults
}
};
} else {
const {provider, ...restConfig} = caching;
if (typeof provider === 'string') {
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: opActionedEventsDefault,
actionedEventsMax: opActionedEventsMax,
provider: {
store: provider as CacheProvider,
store: 'memory',
...cacheOptDefaults
}
}
};
} else {
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
botCache = {
...cacheTTLDefaults,
...restConfig,
provider: {
store,
...cacheOptDefaults,
...rest,
},
const {
provider,
actionedEventsMax = opActionedEventsMax,
actionedEventsDefault = opActionedEventsDefault,
...restConfig
} = caching;
botActionedEventsDefault = actionedEventsDefault;
if(actionedEventsMax !== undefined) {
botActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
}
if (typeof provider === 'string') {
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: botActionedEventsDefault,
provider: {
store: provider as CacheProvider,
...cacheOptDefaults
}
}
} else {
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: botActionedEventsDefault,
actionedEventsMax,
provider: {
store,
...cacheOptDefaults,
...rest,
},
}
}
}
}
const botCreds = {
const botCreds = {
clientId: (ci as string),
clientSecret: (cs as string),
...restCred,
clientSecret: (cs as string),
...restCred,
};
if (botCache.provider.prefix === undefined || botCache.provider.prefix === defaultProvider.prefix) {
// need to provide unique prefix to bot
botCache.provider.prefix = buildCachePrefix([botCache.provider.prefix, 'bot', (botName || objectHash.sha1(botCreds))]);
}
return {
name: botName,
snoowrap,
subreddits: {
names,
exclude,
wikiConfig,
heartbeatInterval,
dryRun,
},
credentials: botCreds,
caching: botCache,
polling: {
sharedMod,
limit,
interval,
},
queue: {
maxWorkers,
},
nanny: {
softLimit,
hardLimit
return {
name: botName,
snoowrap,
subreddits: {
names,
exclude,
wikiConfig,
heartbeatInterval,
dryRun,
},
credentials: botCreds,
caching: botCache,
polling: {
sharedMod,
limit,
interval,
},
queue: {
maxWorkers,
},
nanny: {
softLimit,
hardLimit
}
}
}
});

View File

@@ -168,7 +168,10 @@ export class RecentActivityRule extends Rule {
karmaThreshold,
} = summary;
const relevantSubs = subsWithActivity.length === 0 ? subreddits : subsWithActivity;
const totalSummary = `${testValue} activities over ${relevantSubs.length} subreddits${karmaThreshold !== undefined ? ` with ${combinedKarma} combined karma` : ''} ${triggered ? 'met' : 'did not meet'} threshold of ${threshold}${karmaThreshold !== undefined ? ` and ${karmaThreshold} combined karma` : ''}`;
let totalSummary = `${testValue} activities over ${relevantSubs.length} subreddits${karmaThreshold !== undefined ? ` with ${combinedKarma} combined karma` : ''} ${triggered ? 'met' : 'did not meet'} threshold of ${threshold}${karmaThreshold !== undefined ? ` and ${karmaThreshold} combined karma` : ''}`;
if(triggered && subsWithActivity.length > 0) {
totalSummary = `${totalSummary} -- subreddits: ${subsWithActivity.join(', ')}`;
}
return {
result: totalSummary,
data: {

View File

@@ -5,7 +5,7 @@ import {
asSubmission,
comparisonTextOp, FAIL, isExternalUrlSubmission, isSubmission, parseGenericValueComparison,
parseGenericValueOrPercentComparison, parseRegex,
PASS
PASS, triggeredIndicator
} from "../util";
import {
ActivityWindowType, JoinOperands,
@@ -140,7 +140,7 @@ export class RegexRule extends Rule {
const {
name,
regex,
regexFlags,
regexFlags = 'g',
testOn: testOnVals = ['title', 'body'],
lookAt = 'all',
matchThreshold = '> 0',
@@ -158,7 +158,7 @@ export class RegexRule extends Rule {
}, []);
// check regex
const reg = new RegExp(regex);
const reg = new RegExp(regex, regexFlags);
// ok cool its a valid regex
const matchComparison = parseGenericValueComparison(matchThreshold);
@@ -301,20 +301,25 @@ export class RegexRule extends Rule {
let index = 0;
for (const c of criteriaResults) {
index++;
let msg = `Crit ${c.criteria.name || index} ${c.triggered ? PASS : FAIL}`;
let msg = `Criteria ${c.criteria.name || `#${index}`} ${triggeredIndicator(c.triggered)}`;
if (c.activityThresholdMet !== undefined) {
msg = `${msg} -- Activity Match=> ${c.activityThresholdMet ? PASS : FAIL} ${c.activitiesMatchedCount} ${c.criteria.activityMatchThreshold} (Threshold ${c.criteria.matchThreshold})`;
msg = `${msg} -- Activity Match ${triggeredIndicator(c.activityThresholdMet)} => ${c.activitiesMatchedCount} ${c.criteria.activityMatchThreshold} (Threshold ${c.criteria.matchThreshold})`;
}
if (c.totalThresholdMet !== undefined) {
msg = `${msg} -- Total Matches=> ${c.totalThresholdMet ? PASS : FAIL} ${c.matchCount} ${c.criteria.totalMatchThreshold}`;
msg = `${msg} -- Total Matches ${triggeredIndicator(c.totalThresholdMet)} => ${c.matchCount} ${c.criteria.totalMatchThreshold}`;
} else {
msg = `${msg} and ${c.matchCount} Total Matches`;
}
msg = `${msg} (Window: ${c.criteria.window})`;
logSummary.push(msg);
if(c.matches.length > 0) {
let matchSample = `-- Matched Values: ${c.matches.slice(0, 3).map(x => `"${x}"`).join(', ')}${c.matches.length > 3 ? `, and ${c.matches.length - 3} more...` : ''}`;
logSummary.push(`${msg} ${matchSample}`);
} else {
logSummary.push(msg);
}
}
const result = `${criteriaMet ? PASS : FAIL} ${logSummary.join(' || ')}`;
const result = `${triggeredIndicator(criteriaMet)} ${logSummary.join(' || ')}`;
this.logger.verbose(result);
return Promise.resolve([criteriaMet, this.getResult(criteriaMet, {result, data: criteriaResults})]);

View File

@@ -688,6 +688,11 @@
},
"CacheConfig": {
"properties": {
"actionedEventsMax": {
"default": 25,
"description": "The **maximum** number of Events that the cache should store triggered result summaries for\n\nThese summaries are viewable through the Web UI.\n\nThe value specified by a subreddit cannot be larger than the value set by the Operator for the global/bot config (if set)",
"type": "number"
},
"authorTTL": {
"default": 60,
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",

View File

@@ -23,7 +23,7 @@
"description": "The configuration for an **individual reddit account** ContextMod will run as a bot.\n\nMultiple bot configs may be specified (one per reddit account).\n\n**NOTE:** If `bots` is not specified in a `FILE` then a default `bot` is generated using `ENV/ARG` values IE `CLIENT_ID`, etc...but if `bots` IS specified the default is not generated.",
"properties": {
"caching": {
"$ref": "#/definitions/CacheConfig",
"$ref": "#/definitions/OperatorCacheConfig",
"description": "Settings to configure the default caching behavior for this bot\n\nEvery setting not specified will default to what is specified by the global operator caching config"
},
"credentials": {
@@ -176,93 +176,6 @@
},
"type": "object"
},
"CacheConfig": {
"properties": {
"authorTTL": {
"default": 60,
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"commentTTL": {
"default": 60,
"description": "Amount of time, in seconds, a comment should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"filterCriteriaTTL": {
"default": 60,
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
],
"description": "The cache provider and, optionally, a custom configuration for that provider\n\nIf not present or `null` provider will be `memory`.\n\nTo specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`"
},
"submissionTTL": {
"default": 60,
"description": "Amount of time, in seconds, a submission should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"userNotesTTL": {
"default": 300,
"description": "Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
300
],
"type": [
"number",
"boolean"
]
},
"wikiTTL": {
"default": 300,
"description": "Amount of time, in seconds, wiki content pages should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
300
],
"type": [
"number",
"boolean"
]
}
},
"type": "object"
},
"CacheOptions": {
"additionalProperties": {
},
@@ -417,6 +330,103 @@
],
"type": "object"
},
"OperatorCacheConfig": {
"properties": {
"actionedEventsDefault": {
"default": 25,
"description": "The **default** number of Events that the cache will store triggered result summaries for\n\nThese summaries are viewable through the Web UI.\n\nThe value specified cannot be larger than `actionedEventsMax` for the global/bot config (if set)",
"type": "number"
},
"actionedEventsMax": {
"default": 25,
"description": "The **maximum** number of Events that the cache should store triggered result summaries for\n\nThese summaries are viewable through the Web UI.\n\nThe value specified by a subreddit cannot be larger than the value set by the Operator for the global/bot config (if set)",
"type": "number"
},
"authorTTL": {
"default": 60,
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"commentTTL": {
"default": 60,
"description": "Amount of time, in seconds, a comment should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"filterCriteriaTTL": {
"default": 60,
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
],
"description": "The cache provider and, optionally, a custom configuration for that provider\n\nIf not present or `null` provider will be `memory`.\n\nTo specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`"
},
"submissionTTL": {
"default": 60,
"description": "Amount of time, in seconds, a submission should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"userNotesTTL": {
"default": 300,
"description": "Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
300
],
"type": [
"number",
"boolean"
]
},
"wikiTTL": {
"default": 300,
"description": "Amount of time, in seconds, wiki content pages should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
300
],
"type": [
"number",
"boolean"
]
}
},
"type": "object"
},
"PollingDefaults": {
"properties": {
"delayUntil": {
@@ -551,7 +561,7 @@
"type": "array"
},
"caching": {
"$ref": "#/definitions/CacheConfig",
"$ref": "#/definitions/OperatorCacheConfig",
"description": "Settings to configure the default caching behavior globally\n\nThese settings will be used by each bot, and subreddit, that does not specify their own"
},
"logging": {

View File

@@ -85,7 +85,6 @@ export interface ManagerStats {
actionsRunTotal: number
actionsRunSinceStart: number,
actionsRunSinceStartTotal: number
actionedEvents: number
cache: {
provider: string,
currentKeyCount: number,
@@ -193,7 +192,6 @@ export class Manager {
actionsRunTotal: totalFromMapStats(this.actionsRun),
actionsRunSinceStart: this.actionsRunSinceStart,
actionsRunSinceStartTotal: totalFromMapStats(this.actionsRunSinceStart),
actionedEvents: this.actionedEvents.length,
cache: {
provider: 'none',
currentKeyCount: 0,
@@ -548,6 +546,7 @@ export class Manager {
let totalRulesRun = 0;
let runActions: ActionResult[] = [];
let actionedEvent: ActionedEvent = {
subreddit: this.subreddit.display_name_prefixed,
activity: {
peek: ePeek,
link: item.permalink
@@ -645,9 +644,7 @@ export class Manager {
}
actionedEvent.actionResults = runActions;
if(triggered) {
this.actionedEvents.unshift(actionedEvent);
// save last 25 triggered events
this.actionedEvents = this.actionedEvents.slice(0, 25);
await this.resources.addActionedEvent(actionedEvent);
}
this.logger.verbose(`Run Stats: Checks ${checksRun} | Rules => Total: ${totalRulesRun} Unique: ${allRuleResults.length} Cached: ${totalRulesRun - allRuleResults.length} Rolling Avg: ~${formatNumber(this.rulesUniqueRollingAvg)}/s | Actions ${actionsRun}`);

View File

@@ -25,7 +25,7 @@ import {
BotInstanceConfig,
CacheOptions, CommentState,
Footer, OperatorConfig, ResourceStats, StrongCache, SubmissionState,
CacheConfig, TTLConfig, TypedActivityStates, UserResultCache
CacheConfig, TTLConfig, TypedActivityStates, UserResultCache, ActionedEvent
} from "../Common/interfaces";
import UserNotes from "./UserNotes";
import Mustache from "mustache";
@@ -54,7 +54,8 @@ interface SubredditResourceOptions extends Footer {
subreddit: Subreddit,
logger: Logger;
client: Snoowrap;
prefix? :string;
prefix?: string;
actionedEventsMax: number;
}
export interface SubredditResourceSetOptions extends CacheConfig, Footer {
@@ -79,6 +80,7 @@ export class SubredditResources {
cacheSettingsHash?: string;
pruneInterval?: any;
prefix?: string
actionedEventsMax: number;
stats: { cache: ResourceStats };
@@ -97,6 +99,7 @@ export class SubredditResources {
cache,
prefix,
cacheType,
actionedEventsMax,
cacheSettingsHash,
client,
} = options || {};
@@ -106,6 +109,7 @@ export class SubredditResources {
this.prefix = prefix;
this.client = client;
this.cacheType = cacheType;
this.actionedEventsMax = actionedEventsMax;
this.authorTTL = authorTTL === true ? 0 : authorTTL;
this.submissionTTL = submissionTTL === true ? 0 : submissionTTL;
this.commentTTL = commentTTL === true ? 0 : commentTTL;
@@ -213,6 +217,16 @@ export class SubredditResources {
this.logger = logger.child({labels: ['Resource Cache']}, mergeArr);
}
async getActionedEvents(): Promise<ActionedEvent[]> {
return await this.cache.wrap(`actionedEvents-${this.subreddit.display_name}`, () => []);
}
async addActionedEvent(ae: ActionedEvent) {
const events = await this.cache.wrap(`actionedEvents-${this.subreddit.display_name}`, () => []) as ActionedEvent[];
events.unshift(ae);
await this.cache.set(`actionedEvents-${this.subreddit.display_name}`, events.slice(0, this.actionedEventsMax), {ttl: 0});
}
async getActivity(item: Submission | Comment) {
try {
let hash = '';
@@ -586,6 +600,8 @@ export class BotResourcesManager {
cacheType: string = 'none';
cacheHash: string;
ttlDefaults: Required<TTLConfig>;
actionedEventsMaxDefault?: number;
actionedEventsDefault: number;
pruneInterval: any;
constructor(config: BotInstanceConfig) {
@@ -598,18 +614,23 @@ export class BotResourcesManager {
submissionTTL,
filterCriteriaTTL,
provider,
actionedEventsMax,
actionedEventsDefault,
},
name,
credentials,
caching,
} = config;
caching.provider.prefix = buildCachePrefix([caching.provider.prefix, 'SHARED']);
this.cacheHash = objectHash.sha1(caching);
const {actionedEventsMax: eMax, actionedEventsDefault: eDef, ...relevantCacheSettings} = caching;
this.cacheHash = objectHash.sha1(relevantCacheSettings);
this.defaultCacheConfig = caching;
this.ttlDefaults = {authorTTL, userNotesTTL, wikiTTL, commentTTL, submissionTTL, filterCriteriaTTL};
const options = provider;
this.cacheType = options.store;
this.actionedEventsMaxDefault = actionedEventsMax;
this.actionedEventsDefault = actionedEventsDefault;
this.defaultCache = createCacheManager(options);
if (this.cacheType === 'memory') {
const min = Math.min(...([this.ttlDefaults.wikiTTL, this.ttlDefaults.authorTTL, this.ttlDefaults.userNotesTTL].filter(x => typeof x === 'number' && x !== 0) as number[]));
@@ -644,11 +665,12 @@ export class BotResourcesManager {
cacheSettingsHash: hash,
ttl: this.ttlDefaults,
prefix: this.defaultCacheConfig.provider.prefix,
actionedEventsMax: this.actionedEventsMaxDefault !== undefined ? Math.min(this.actionedEventsDefault, this.actionedEventsMaxDefault) : this.actionedEventsDefault,
...init,
};
if(caching !== undefined) {
const {provider = this.defaultCacheConfig.provider, ...rest} = caching;
const {provider = this.defaultCacheConfig.provider, actionedEventsMax = this.actionedEventsDefault, ...rest} = caching;
let cacheConfig = {
provider: buildCacheOptionsFromProvider(provider),
ttl: {
@@ -663,8 +685,10 @@ export class BotResourcesManager {
const defaultPrefix = trueProvider.prefix;
const subPrefix = defaultPrefix === this.defaultCacheConfig.provider.prefix ? buildCachePrefix([(defaultPrefix !== undefined ? defaultPrefix.replace('SHARED', '') : defaultPrefix), subName]) : trueProvider.prefix;
trueProvider.prefix = subPrefix;
const eventsMax = this.actionedEventsMaxDefault !== undefined ? Math.min(actionedEventsMax, this.actionedEventsMaxDefault) : actionedEventsMax;
opts = {
cache: createCacheManager(trueProvider),
actionedEventsMax: eventsMax,
cacheType: trueProvider.store,
cacheSettingsHash: hash,
prefix: subPrefix,

View File

@@ -847,7 +847,7 @@ const webClient = async (options: OperatorConfig) => {
return res.render('events', {
data: resp.map((x) => {
const {timestamp, activity: {peek, link}, ruleResults = [], actionResults = [], ...rest} = x;
const time = dayjs(timestamp).local().format();
const time = dayjs(timestamp).local().format('YY-MM-DD HH:mm:ss z');
const formattedPeek = Autolinker.link(peek, {
email: false,
phone: false,
@@ -858,11 +858,11 @@ const webClient = async (options: OperatorConfig) => {
});
const formattedRuleResults = ruleResults.map((y: any) => {
const {triggered, result, ...restY} = y;
let t = 'Not Triggered';
let t = triggeredIndicator(false);
if(triggered === null) {
t = 'Skipped';
} else if(triggered === true) {
t = 'Triggered';
t = triggeredIndicator(true);
}
return {
...restY,
@@ -876,7 +876,7 @@ const webClient = async (options: OperatorConfig) => {
if(!run) {
res = `Not Run - ${runReason === undefined ? '(No Reason)' : runReason}`;
} else {
res = `Success: ${triggeredIndicator(success)}${result !== undefined ? ` - ${result}` : ''}`;
res = `${triggeredIndicator(success)}${result !== undefined ? ` - ${result}` : ''}`;
}
return {
...restA,
@@ -895,7 +895,7 @@ const webClient = async (options: OperatorConfig) => {
actionResults: formattedActionResults
}
}),
title: `${subreddit} Actioned Events`
title: `${subreddit !== undefined ? `${subreddit} ` : ''}Actioned Events`
});
});

View File

@@ -34,7 +34,6 @@ const managerStats: ManagerStats = {
rulesRunTotal: 0,
rulesTriggeredSinceStartTotal: 0,
rulesTriggeredTotal: 0,
actionedEvents: 0,
};
const botStats: BotStats = {
apiAvg: '-',

View File

@@ -37,16 +37,20 @@ export const subredditRoute = (required = true) => async (req: Request, res: Res
const bot = req.serverBot;
const {subreddit} = req.query as any;
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
if (!isOperator && !realManagers.includes(subreddit)) {
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
}
const manager = bot.subManagers.find(x => x.displayLabel === subreddit);
if (manager === undefined) {
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
}
if(subreddit === undefined && required === false) {
next();
} else {
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
if (!isOperator && !realManagers.includes(subreddit)) {
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
}
const manager = bot.subManagers.find(x => x.displayLabel === subreddit);
if (manager === undefined) {
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
}
req.manager = manager;
req.manager = manager;
next();
next();
}
}

View File

@@ -5,6 +5,7 @@ import winston from 'winston';
import {COMMENT_URL_ID, parseLinkIdentifier, SUBMISSION_URL_ID} from "../../../../../util";
import {booleanMiddle} from "../../../../Common/middleware";
import {Manager} from "../../../../../Subreddit/Manager";
import {ActionedEvent} from "../../../../../Common/interfaces";
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);
@@ -21,11 +22,30 @@ export const configRoute = [authUserCheck(), botRoute(), subredditRoute(), confi
const actionedEvents = async (req: Request, res: Response) => {
const manager = req.manager as Manager;
let managers: Manager[] = [];
const manager = req.manager as Manager | undefined;
if(manager !== undefined) {
managers.push(manager);
} else {
for(const manager of req.serverBot.subManagers) {
if((req.user?.realManagers as string[]).includes(manager.displayLabel)) {
managers.push(manager);
}
}
}
return res.json(manager.actionedEvents);
let events: ActionedEvent[] = [];
for(const m of managers) {
if(m.resources !== undefined) {
events = events.concat(await m.resources.getActionedEvents());
}
}
events.sort((a, b) => b.timestamp - a.timestamp);
return res.json(events);
};
export const actionedEventsRoute = [authUserCheck(), botRoute(), subredditRoute(), actionedEvents];
export const actionedEventsRoute = [authUserCheck(), botRoute(), subredditRoute(false), actionedEvents];
const action = async (req: Request, res: Response) => {
const bot = req.serverBot;

View File

@@ -22,6 +22,7 @@
<div class="container mx-auto">
<div class="grid">
<div class="dark:text-white mb-3 pl-2">
Schema <a href="/config?schema=subreddit" id="subredditSchemaType">Subreddit</a> / <a href="/config?schema=operator" id="operatorSchemaType">Operator</a> |
<span class="has-tooltip">
<span style="z-index:999; margin-top: 30px;" class='tooltip rounded shadow-lg p-3 bg-gray-100 text-black space-y-2'>
<div>Copy + paste your configuration here to get:</div>
@@ -39,7 +40,6 @@
</ul>
<div>When done editing hit Ctrl+A (Command+A on macOS) to select all text, then copy + paste back into your wiki/file</div>
</span>
<span id="schemaType"></span> |
<span class="cursor-help">
How To Use
<span>
@@ -55,7 +55,7 @@
</span>
</span>
</span>
| <a id="schemaOpen" href="">Open With Operator Schema</a>
| <input id="configUrl" class="text-black placeholder-gray-500 rounded mx-2" style="min-width:400px;" placeholder="URL of a config to load"/> <a href="#" id="loadConfig">Load</a>
<div id="error" class="font-semibold"></div>
</div>
<div style="min-height: 80vh" id="editor"></div>
@@ -104,22 +104,22 @@
var searchParams = new URLSearchParams(window.location.search);
let schemaType;
let schemaFile;
if(searchParams.get('schema') === 'operator') {
schemaType = 'OperatorConfig.json';
schemaType = 'operator';
schemaFile = 'OperatorConfig.json';
preamble.push('// automatic validation of your OPERATOR configuration');
document.querySelector('#schemaTypeList').innerHTML = 'automatic validation of your OPERATOR configuration (yellow squiggly)';
document.querySelector('#schemaType').innerHTML = 'Operator Configuration';
document.querySelector('#schemaOpen').href = '/config?schema=subreddit';
document.querySelector('#schemaOpen').innerHTML = 'Open with Subreddit Schema';
document.querySelector('#operatorSchemaType').classList.add('font-bold', 'no-underline', 'pointer-events-none');
} else {
schemaType = 'App.json';
schemaType = 'subreddit';
schemaFile = 'App.json';
preamble.push('// automatic validation of your SUBREDDIT configuration');
document.querySelector('#schemaTypeList').innerHTML = 'automatic validation of your SUBREDDIT configuration (yellow squiggly)'
document.querySelector('#schemaType').innerHTML = 'Subreddit Configuration';
document.querySelector('#schemaOpen').href = '/config?schema=operator';
document.querySelector('#subredditSchemaType').classList.add('font-bold', 'no-underline', 'pointer-events-none');
}
const schemaUri = `${document.location.origin}/schemas/${schemaType}`;
const schemaUri = `${document.location.origin}/schemas/${schemaFile}`;
require(['vs/editor/editor.main'], function () {
const modelUri = monaco.Uri.parse("a://b/foo.json");
@@ -135,15 +135,44 @@
schema: schemaData
}]
});
if(searchParams.get('subreddit') !== null) {
fetch(`${document.location.origin}/config/content${document.location.search}`).then((resp) => {
var model = monaco.editor.createModel(preamble.join('\r\n'), "json", modelUri);
document.querySelector('#loadConfig').addEventListener('click', (e) => {
e.preventDefault();
const newUrl = document.querySelector('#configUrl').value;
fetch(newUrl).then((resp) => {
if(!resp.ok) {
resp.text().then(data => {
document.querySelector('#error').innerHTML = `Error occurred while fetching configuration => ${data}`
});
} else {
var sp = new URLSearchParams();
sp.append('schema', schemaType);
sp.append('url', newUrl);
history.pushState(null, '', `${window.location.pathname}?${sp.toString()}`);
resp.text().then(data => {
//model = monaco.editor.createModel(data, "json", modelUri);
model.setValue(data);
})
}
});
});
let dlUrl = searchParams.get('url');
if(dlUrl === null && searchParams.get('subreddit') !== null) {
dlUrl = `${document.location.origin}/config/content${document.location.search}`
}
if(dlUrl !== null) {
document.querySelector('#configUrl').value = dlUrl;
fetch(dlUrl).then((resp) => {
if(!resp.ok) {
resp.text().then(data => {
document.querySelector('#error').innerHTML = `Error occurred while fetching configuration => ${data}`
});
} else {
resp.text().then(data => {
var model = monaco.editor.createModel(data, "json", modelUri);
model.setValue(data);
//model = monaco.editor.createModel(data, "json", modelUri);
var editor = monaco.editor.create(document.getElementById('editor'), {
model,
theme: 'vs-dark',
@@ -154,9 +183,8 @@
editor;
})
}
})
});
} else {
var model = monaco.editor.createModel(preamble.join('\r\n'), "json", modelUri);
var editor = monaco.editor.create(document.getElementById('editor'), {
model,
theme: 'vs-dark',

View File

@@ -20,7 +20,7 @@
}
</style>
</head>
<body style="user-select: none;" class="">
<body>
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<%- include('partials/title') %>
@@ -38,7 +38,7 @@
<span class="peek"><%- eRes.activity.peek %></span><a target="_blank" href="https://reddit.com<%= eRes.activity.link%>">(Link)</a>
</div>
<div class="flex items-center flex-end">
<%= eRes.timestamp %>
<%= eRes.subreddit %> @ <%= eRes.timestamp %>
</div>
</div>
</div>

View File

@@ -373,11 +373,7 @@
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.checksTriggeredTotal %></span> Triggered / <span><%= data.stats.checksRunTotal %></span> Run
</span>
<% if (data.name !== 'All') { %>
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>&subreddit=<%= data.name %>" class="underline" style="text-decoration-style: dotted"><%= data.stats.checksTriggeredTotal %> T</a>
<% } else { %>
<%= data.stats.checksTriggeredTotal %> T
<% } %>/ <span><%= data.stats.checksRunTotal %></span> R</span>
<span><%= data.stats.checksTriggeredTotal %> T / <span><%= data.stats.checksRunTotal %></span> R</span>
</span>
<label>Rules</label>
@@ -389,7 +385,11 @@
</span>
<label>Actions</label>
<span><%= data.stats.actionsRunTotal %> Run</span>
<% if (data.name !== 'All') { %>
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>&subreddit=<%= data.name %>" class="underline" style="text-decoration-style: dotted"><%= data.stats.actionsRunTotal %> Run</a>
<% } else { %>
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>" class="underline" style="text-decoration-style: dotted"><%= data.stats.actionsRunTotal %> Run</a>
<% } %>
</div>
</div>
<div>

View File

@@ -2,6 +2,8 @@ import winston from 'winston';
import 'winston-daily-rotate-file';
import dayjs from 'dayjs';
import utc from 'dayjs/plugin/utc.js';
import advancedFormat from 'dayjs/plugin/advancedFormat';
import tz from 'dayjs/plugin/timezone';
import dduration from 'dayjs/plugin/duration.js';
import relTime from 'dayjs/plugin/relativeTime.js';
import sameafter from 'dayjs/plugin/isSameOrAfter.js';
@@ -31,6 +33,8 @@ dayjs.extend(dduration);
dayjs.extend(relTime);
dayjs.extend(sameafter);
dayjs.extend(samebefore);
dayjs.extend(tz);
dayjs.extend(advancedFormat);
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);