mirror of
https://github.com/FoxxMD/context-mod.git
synced 2026-01-14 16:08:02 -05:00
Compare commits
41 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
487f13f704 | ||
|
|
00b9d87cdc | ||
|
|
2c797e0b9b | ||
|
|
4a2b27bfbf | ||
|
|
463a4dc0eb | ||
|
|
4b3bea661d | ||
|
|
976f310f51 | ||
|
|
4d8d3dc266 | ||
|
|
ce9e678c4c | ||
|
|
8cf30b6b7d | ||
|
|
2b6d08f8a5 | ||
|
|
f8fc63991f | ||
|
|
d96a1f677c | ||
|
|
b14689791c | ||
|
|
b70c877e44 | ||
|
|
041655376a | ||
|
|
e1eab7696b | ||
|
|
65d1d36d53 | ||
|
|
120d776fc2 | ||
|
|
425e16295b | ||
|
|
dd7e9d72cc | ||
|
|
55535ddd62 | ||
|
|
631e21452c | ||
|
|
be6fa4dd50 | ||
|
|
0d7a82836f | ||
|
|
d9a59b6824 | ||
|
|
ddbf8c3189 | ||
|
|
8393c471b2 | ||
|
|
fe66a2e8f7 | ||
|
|
4b0284102d | ||
|
|
95529f14a8 | ||
|
|
26af2c4e4d | ||
|
|
044c293f34 | ||
|
|
a082c9e593 | ||
|
|
4f3685a1f5 | ||
|
|
e242c36c09 | ||
|
|
d2d945db2c | ||
|
|
c5018183e0 | ||
|
|
c5358f196d | ||
|
|
1d9f8245f9 | ||
|
|
20b37f3a40 |
10
Dockerfile
10
Dockerfile
@@ -4,6 +4,12 @@ ENV TZ=Etc/GMT
|
||||
|
||||
RUN apk update
|
||||
|
||||
# required dependencies in order to compile linux-musl (node-canvas) on alpine
|
||||
# https://github.com/node-gfx/node-canvas-prebuilt/issues/77#issuecomment-884365161
|
||||
RUN apk add --no-cache build-base g++ cairo-dev jpeg-dev pango-dev giflib-dev
|
||||
# required dependencies in order to compile linux-musl (node-canvas) on alpine
|
||||
RUN apk add --update --repository http://dl-3.alpinelinux.org/alpine/edge/testing libmount ttf-dejavu ttf-droid ttf-freefont ttf-liberation ttf-ubuntu-font-family fontconfig vips
|
||||
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
||||
WORKDIR /usr/app
|
||||
@@ -11,7 +17,9 @@ WORKDIR /usr/app
|
||||
COPY package*.json ./
|
||||
COPY tsconfig.json .
|
||||
|
||||
RUN npm install
|
||||
# no prebuild support for node-canvas on alpine so need to compile
|
||||
# https://github.com/Automattic/node-canvas#compiling
|
||||
RUN npm install --build-from-source
|
||||
|
||||
ADD . /usr/app
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ Some feature highlights:
|
||||
* Author criteria (name, css flair/text, age, karma, moderator status, and [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes))
|
||||
* Activity state (removed, locked, distinguished, etc.)
|
||||
* Rules and Actions support named references (write once, reference anywhere)
|
||||
* [**Image Comparisons**](/docs/imageComparison.md) via fingerprinting and/or pixel differences
|
||||
* Global/subreddit-level **API caching**
|
||||
* Support for [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) as criteria or Actions (writing notes)
|
||||
* Docker container support
|
||||
|
||||
67
cliff.toml
Normal file
67
cliff.toml
Normal file
@@ -0,0 +1,67 @@
|
||||
# configuration file for git-cliff (0.1.0)
|
||||
|
||||
[changelog]
|
||||
# changelog header
|
||||
header = """
|
||||
# Changelog
|
||||
All notable changes to this project will be documented in this file.\n
|
||||
"""
|
||||
# template for the changelog body
|
||||
# https://tera.netlify.app/docs/#introduction
|
||||
body = """
|
||||
{% if version %}\
|
||||
## [{{ version | replace(from="v", to="") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||
{% else %}\
|
||||
## [unreleased]
|
||||
{% endif %}\
|
||||
{% for group, commits in commits | group_by(attribute="group") %}
|
||||
### {{ group | upper_first }}
|
||||
{% for commit in commits
|
||||
| filter(attribute="scope")
|
||||
| sort(attribute="scope") %}
|
||||
- *({{commit.scope}})* {{ commit.message | upper_first }}
|
||||
{%- if commit.breaking %}
|
||||
{% raw %} {% endraw %}- **BREAKING**: {{commit.breaking_description}}
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
{%- for commit in commits %}
|
||||
{%- if commit.scope -%}
|
||||
{% else -%}
|
||||
- *(No Category)* {{ commit.message | upper_first }}
|
||||
{% if commit.breaking -%}
|
||||
{% raw %} {% endraw %}- **BREAKING**: {{commit.breaking_description}}
|
||||
{% endif -%}
|
||||
{% endif -%}
|
||||
{% endfor -%}
|
||||
{% endfor %}
|
||||
"""
|
||||
# remove the leading and trailing whitespaces from the template
|
||||
trim = true
|
||||
# changelog footer
|
||||
footer = """
|
||||
<!-- generated by git-cliff -->
|
||||
"""
|
||||
|
||||
[git]
|
||||
# allow only conventional commits
|
||||
# https://www.conventionalcommits.org
|
||||
conventional_commits = true
|
||||
# regex for parsing and grouping commits
|
||||
commit_parsers = [
|
||||
{ message = "^feat", group = "Features"},
|
||||
{ message = "^fix", group = "Bug Fixes"},
|
||||
{ message = "^doc", group = "Documentation"},
|
||||
{ message = "^perf", group = "Performance"},
|
||||
{ message = "^refactor", group = "Refactor"},
|
||||
{ message = "^style", group = "Styling"},
|
||||
{ message = "^test", group = "Testing"},
|
||||
{ message = "^chore\\(release\\): prepare for", skip = true},
|
||||
{ message = "^chore", group = "Miscellaneous Tasks"},
|
||||
{ body = ".*security", group = "Security"},
|
||||
]
|
||||
# filter out the commits that are not matched by commit parsers
|
||||
filter_commits = false
|
||||
# glob pattern for matching git tags
|
||||
tag_pattern = "[0-9]*"
|
||||
# regex for skipping tags
|
||||
skip_tags = "v0.1.0-beta.1"
|
||||
@@ -18,6 +18,7 @@
|
||||
* [Activities `window`](#activities-window)
|
||||
* [Comparisons](#thresholds-and-comparisons)
|
||||
* [Activity Templating](/docs/actionTemplating.md)
|
||||
* [Image Comparisons](#image-comparisons)
|
||||
* [Best Practices](#best-practices)
|
||||
* [Named Rules](#named-rules)
|
||||
* [Rule Order](#rule-order)
|
||||
@@ -268,6 +269,12 @@ The duration value compares a time range from **now** to `duration value` time i
|
||||
|
||||
Refer to [duration values in activity window documentation](/docs/activitiesWindow.md#duration-values) as well as the individual rule/criteria schema to see what this duration is comparing against.
|
||||
|
||||
### Image Comparisons
|
||||
|
||||
ContextMod implements two methods for comparing **image content**, perceptual hashing and pixel-to-pixel comparisons. Comparisons can be used to filter activities in some activities.
|
||||
|
||||
See [image comparison documentation](/docs/imageComparison.md) for a full reference.
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Named Rules
|
||||
|
||||
201
docs/imageComparison.md
Normal file
201
docs/imageComparison.md
Normal file
@@ -0,0 +1,201 @@
|
||||
# Overview
|
||||
|
||||
ContextMod supports comparing image content, for the purpose of detecting duplicates, with two different but complimentary systems. Image comparison behavior is available for the following rules:
|
||||
|
||||
* [Recent Activity](/docs/examples/recentActivity)
|
||||
* Repeat Activity (In-progress)
|
||||
|
||||
To enable comparisons reference the example below (at the top-level of your rule) and configure as needed:
|
||||
|
||||
```json5
|
||||
{
|
||||
"name": "ruleWithImageDetection",
|
||||
"kind": "recentActivity",
|
||||
// Add block below...
|
||||
//
|
||||
"imageDetection": {
|
||||
// enables image comparison
|
||||
"enable": true,
|
||||
// The difference, in percentage, between the reference submission and the submissions being checked
|
||||
// must be less than this number to consider the images "the same"
|
||||
"threshold": 5,
|
||||
// optional
|
||||
// set the behavior for determining if image comparison should occur on a URL:
|
||||
//
|
||||
// "extension" => try image detection if URL ends in a known image extension (jpeg, gif, png, bmp, etc.)
|
||||
// "unknown" => try image detection if URL ends in known image extension OR there is no extension OR the extension is unknown (not video, html, doc, etc...)
|
||||
// "all" => ALWAYS try image detection, regardless of URL extension
|
||||
//
|
||||
// if fetchBehavior is not defined then "extension" is the default
|
||||
"fetchBehavior": "extension",
|
||||
},
|
||||
//
|
||||
// And above ^^^
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
**Perceptual Hashing** (`hash`) and **Pixel Comparisons** (`pixel`) may be used at the same time. Refer to the documentation below to see how they interact.
|
||||
|
||||
**Note:** Regardless of `fetchBehavior`, if the response from the URL does not indicate it is an image then image detection will not occur. IE Response `Content-Type` must contain `image`
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Both image comparison systems require [Sharp](https://sharp.pixelplumbing.com/) as a dependency. Most modern operating systems running Node.js >= 12.13.0 do not require installing additional dependencies in order to use Sharp.
|
||||
|
||||
If you are using the docker image for ContextMod (`foxxmd/context-mod`) Sharp is built-in.
|
||||
|
||||
If you are installing ContextMod using npm then **Sharp should be installed automatically as an optional dependency.**
|
||||
|
||||
**If you do not want to install it automatically** install ContextMod with the following command:
|
||||
|
||||
```
|
||||
npm install --no-optional
|
||||
```
|
||||
|
||||
If you are using ContextMod as part of a larger project you may want to require Sharp in your own package:
|
||||
|
||||
```
|
||||
npm install sharp@0.29.1 --save
|
||||
```
|
||||
|
||||
# Comparison Systems
|
||||
|
||||
## Perceptual Hashing
|
||||
|
||||
[Perceptual Hashing](https://en.wikipedia.org/wiki/Perceptual_hashing) creates a text fingerprint of an image by:
|
||||
|
||||
* Dividing up the image into a grid
|
||||
* Using an algorithm to derive a value from the pixels in each grid
|
||||
* Adding up all the values to create a unique string (the "fingerprint")
|
||||
|
||||
An example of how a perceptual hash can work [can be found here.](https://www.hackerfactor.com/blog/?/archives/432-Looks-Like-It.html)
|
||||
|
||||
ContextMod uses [blockhash-js](https://github.com/commonsmachinery/blockhash-js) which is a javascript implementation of the algorithm described in the paper [Block Mean Value Based Image Perceptual Hashing by Bian Yang, Fan Gu and Xiamu Niu.](https://ieeexplore.ieee.org/document/4041692)
|
||||
|
||||
|
||||
**Advantages**
|
||||
|
||||
* Low memory requirements and not CPU intensive
|
||||
* Does not require any image transformations
|
||||
* Hash results can be stored to make future comparisons even faster and skip downloading images (cached by url)
|
||||
* Resolution-independent
|
||||
|
||||
**Disadvantages**
|
||||
|
||||
* Hash is weak when image differences are based only on color
|
||||
* Hash is weak when image contains lots of text
|
||||
* Higher accuracy requires larger calculation (more bits required)
|
||||
|
||||
**When should I use it?**
|
||||
|
||||
* General duplicate detection
|
||||
* Comparing many images
|
||||
* Comparing the same images often
|
||||
|
||||
### How To Use
|
||||
|
||||
If `imageDetection.enable` is `true` then hashing is enabled by default and no further configuration is required.
|
||||
|
||||
To further configure hashing refer to this code block:
|
||||
|
||||
```json5
|
||||
{
|
||||
"name": "ruleWithImageDetectionAndConfiguredHashing",
|
||||
"kind": "recentActivity",
|
||||
"imageDetection": {
|
||||
"enable": true,
|
||||
// Add block below...
|
||||
//
|
||||
"hash": {
|
||||
// enable or disable hash comparisons (enabled by default)
|
||||
"enable": true,
|
||||
// determines accuracy of hash and granularity of hash comparison (comparison to other hashes)
|
||||
// the higher the bits the more accurate the comparison
|
||||
//
|
||||
// NOTE: Hashes of different sizes (bits) cannot be compared. If you are caching hashes make sure all rules where results may be shared use the same bit count to ensure hashes can be compared. Otherwise hashes will be recomputed.
|
||||
"bits": 32, // default is 32 if not defined
|
||||
//
|
||||
// number of seconds to cache an image hash
|
||||
"ttl": 60, // default is 60 if not defined
|
||||
//
|
||||
// "High Confidence" Threshold
|
||||
// If the difference in comparison is equal to or less than this number the images are considered the same and pixel comparison WILL NOT occur
|
||||
//
|
||||
// Defaults to the parent-level `threshold` value if not present
|
||||
//
|
||||
// Use null if you want pixel comparison to ALWAYS occur (softThreshold must be present)
|
||||
"hardThreshold": 5,
|
||||
//
|
||||
// "Low Confidence" Threshold -- only used if `pixel` is enabled
|
||||
// If the difference in comparison is:
|
||||
//
|
||||
// 1) equal to or less than this value and
|
||||
// 2) the value is greater than `hardThreshold`
|
||||
//
|
||||
// the images will be compared using the `pixel` method
|
||||
"softThreshold": 0,
|
||||
},
|
||||
//
|
||||
// And above ^^^
|
||||
//"pixel": {...}
|
||||
},
|
||||
//...
|
||||
```
|
||||
|
||||
## Pixel Comparison
|
||||
|
||||
This approach is as straight forward as it sounds. Both images are compared, pixel by pixel, to determine the difference between the two. ContextMod uses [pixelmatch](https://github.com/mapbox/pixelmatch) to do the comparison.
|
||||
|
||||
**Advantages**
|
||||
|
||||
* Extremely accurate, high-confidence on difference percentage
|
||||
* Strong when comparing text-based images or color-only differences
|
||||
|
||||
**Disadvantages**
|
||||
|
||||
* High memory requirements (10-30MB per comparison) and CPU intensive
|
||||
* Weak against similar images with different aspect ratios
|
||||
* Requires image transformations (resize, crop) before comparison
|
||||
* Can only store image-to-image results (no single image fingerprints)
|
||||
|
||||
**When should I use it?**
|
||||
|
||||
* Require very high accuracy in comparison results
|
||||
* Comparing mostly text-based images or subtle color/detail differences
|
||||
* As a secondary, high-confidence confirmation of comparison result after hashing
|
||||
|
||||
### How To Use
|
||||
|
||||
By default pixel comparisons **are not enabled.** They must be explicitly enabled in configuration.
|
||||
|
||||
Pixel comparisons will be performed in either of these scenarios:
|
||||
|
||||
* pixel is enabled, hashing is enabled and `hash.softThreshold` is defined
|
||||
* When a comparison occurs that is less different than `softThreshold` but more different then `hardThreshold` (or `"hardThreshold": null`), then pixel comparison will occur as a high-confidence check
|
||||
* Example
|
||||
* hash comparison => 7% difference
|
||||
* `"softThreshold": 10`
|
||||
* `"hardThreshold": 4`
|
||||
* `hash.enable` is `false` and `pixel.enable` is true
|
||||
* hashing is skipped entirely and only pixel comparisons are performed
|
||||
|
||||
To configure pixel comparisons refer to this code block:
|
||||
|
||||
```json5
|
||||
{
|
||||
"name": "ruleWithImageDetectionAndPixelEnabled",
|
||||
"kind": "recentActivity",
|
||||
"imageDetection": {
|
||||
//"hash": {...}
|
||||
"pixel": {
|
||||
// enable or disable pixel comparisons (disabled by default)
|
||||
"enable": true,
|
||||
// if the comparison difference percentage is equal to or less than this value the images are considered the same
|
||||
//
|
||||
// if not defined the value from imageDetection.threshold will be used
|
||||
"threshold": 5
|
||||
}
|
||||
},
|
||||
//...
|
||||
```
|
||||
1212
package-lock.json
generated
1212
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
10
package.json
10
package.json
@@ -50,9 +50,11 @@
|
||||
"got": "^11.8.2",
|
||||
"he": "^1.2.0",
|
||||
"http-proxy": "^1.18.1",
|
||||
"image-size": "^1.0.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"json5": "^2.2.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"leven": "^3.1.0",
|
||||
"lodash": "^4.17.21",
|
||||
"lru-cache": "^6.0.0",
|
||||
"monaco-editor": "^0.27.0",
|
||||
@@ -61,11 +63,14 @@
|
||||
"normalize-url": "^6.1.0",
|
||||
"object-hash": "^2.2.0",
|
||||
"p-event": "^4.2.0",
|
||||
"p-map": "^4.0.0",
|
||||
"passport": "^0.4.1",
|
||||
"passport-custom": "^1.1.1",
|
||||
"passport-jwt": "^4.0.0",
|
||||
"pixelmatch": "^5.2.1",
|
||||
"pretty-print-json": "^1.0.3",
|
||||
"safe-stable-stringify": "^1.1.1",
|
||||
"set-random-interval": "^1.1.0",
|
||||
"snoostorm": "^1.5.2",
|
||||
"snoowrap": "^1.23.0",
|
||||
"socket.io": "^4.1.3",
|
||||
@@ -101,10 +106,15 @@
|
||||
"@types/object-hash": "^2.1.0",
|
||||
"@types/passport": "^1.0.7",
|
||||
"@types/passport-jwt": "^3.0.6",
|
||||
"@types/pixelmatch": "^5.2.4",
|
||||
"@types/sharp": "^0.29.2",
|
||||
"@types/tcp-port-used": "^1.0.0",
|
||||
"@types/triple-beam": "^1.3.2",
|
||||
"ts-auto-guard": "*",
|
||||
"ts-json-schema-generator": "^0.93.0",
|
||||
"typescript-json-schema": "^0.50.1"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"sharp": "^0.29.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import {getLogger} from "./Utils/loggerFactory";
|
||||
import {Invokee, OperatorConfig} from "./Common/interfaces";
|
||||
import Bot from "./Bot";
|
||||
import LoggedError from "./Utils/LoggedError";
|
||||
import {sleep} from "./util";
|
||||
|
||||
export class App {
|
||||
|
||||
@@ -66,6 +67,7 @@ export class App {
|
||||
try {
|
||||
await b.testClient();
|
||||
await b.buildManagers();
|
||||
await sleep(2000);
|
||||
b.runManagers(causedBy).catch((err) => {
|
||||
this.logger.error(`Unexpected error occurred while running Bot ${b.botName}. Bot must be re-built to restart`);
|
||||
if (!err.logged || !(err instanceof LoggedError)) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import {DurationComparor, UserNoteCriteria} from "../Rule";
|
||||
import {CompareValue, CompareValueOrPercent} from "../Common/interfaces";
|
||||
import {UserNoteCriteria} from "../Rule";
|
||||
import {CompareValue, CompareValueOrPercent, DurationComparor} from "../Common/interfaces";
|
||||
|
||||
/**
|
||||
* If present then these Author criteria are checked before running the rule. If criteria fails then the rule is skipped.
|
||||
@@ -99,6 +99,13 @@ export interface AuthorCriteria {
|
||||
* Does Author's account have a verified email?
|
||||
* */
|
||||
verified?: boolean
|
||||
|
||||
/**
|
||||
* Is the author shadowbanned?
|
||||
*
|
||||
* This is determined by trying to retrieve the author's profile. If a 404 is returned it is likely they are shadowbanned
|
||||
* */
|
||||
shadowBanned?: boolean
|
||||
}
|
||||
|
||||
export class Author implements AuthorCriteria {
|
||||
@@ -112,6 +119,7 @@ export class Author implements AuthorCriteria {
|
||||
linkKarma?: string;
|
||||
totalKarma?: string;
|
||||
verified?: boolean;
|
||||
shadowBanned?: boolean;
|
||||
|
||||
constructor(options: AuthorCriteria) {
|
||||
this.name = options.name;
|
||||
@@ -123,6 +131,7 @@ export class Author implements AuthorCriteria {
|
||||
this.commentKarma = options.commentKarma;
|
||||
this.linkKarma = options.linkKarma;
|
||||
this.totalKarma = options.totalKarma;
|
||||
this.shadowBanned = options.shadowBanned;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import Snoowrap, {Subreddit} from "snoowrap";
|
||||
import Snoowrap, {Comment, Submission, Subreddit} from "snoowrap";
|
||||
import {Logger} from "winston";
|
||||
import dayjs, {Dayjs} from "dayjs";
|
||||
import {Duration} from "dayjs/plugin/duration";
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
snooLogWrapper
|
||||
} from "../util";
|
||||
import {Manager} from "../Subreddit/Manager";
|
||||
import {ProxiedSnoowrap} from "../Utils/SnoowrapClients";
|
||||
import {ExtendedSnoowrap, ProxiedSnoowrap} from "../Utils/SnoowrapClients";
|
||||
import {ModQueueStream, UnmoderatedStream} from "../Subreddit/Streams";
|
||||
import {BotResourcesManager} from "../Subreddit/SubredditResources";
|
||||
import LoggedError from "../Utils/LoggedError";
|
||||
@@ -24,7 +24,7 @@ import pEvent from "p-event";
|
||||
|
||||
class Bot {
|
||||
|
||||
client!: Snoowrap;
|
||||
client!: ExtendedSnoowrap;
|
||||
logger!: Logger;
|
||||
wikiLocation: string;
|
||||
dryRun?: true | undefined;
|
||||
@@ -49,6 +49,7 @@ class Bot {
|
||||
maxWorkers: number;
|
||||
startedAt: Dayjs = dayjs();
|
||||
sharedModqueue: boolean = false;
|
||||
streamListedOnce: string[] = [];
|
||||
|
||||
apiSample: number[] = [];
|
||||
apiRollingAvg: number = 0;
|
||||
@@ -91,6 +92,7 @@ class Bot {
|
||||
},
|
||||
polling: {
|
||||
sharedMod,
|
||||
stagger,
|
||||
},
|
||||
queue: {
|
||||
maxWorkers,
|
||||
@@ -166,7 +168,7 @@ class Bot {
|
||||
}
|
||||
|
||||
try {
|
||||
this.client = proxy === undefined ? new Snoowrap(creds) : new ProxiedSnoowrap({...creds, proxy});
|
||||
this.client = proxy === undefined ? new ExtendedSnoowrap(creds) : new ProxiedSnoowrap({...creds, proxy});
|
||||
this.client.config({
|
||||
warnings: true,
|
||||
maxRetryAttempts: 5,
|
||||
@@ -199,12 +201,32 @@ class Bot {
|
||||
}
|
||||
}
|
||||
|
||||
const defaultUnmoderatedStream = new UnmoderatedStream(this.client, {subreddit: 'mod'});
|
||||
const modStreamListingListener = (name: string) => async (listing: (Comment|Submission)[]) => {
|
||||
// dole out in order they were received
|
||||
if(!this.streamListedOnce.includes(name)) {
|
||||
this.streamListedOnce.push(name);
|
||||
return;
|
||||
}
|
||||
for(const i of listing) {
|
||||
const foundManager = this.subManagers.find(x => x.subreddit.display_name === i.subreddit.display_name && x.modStreamCallbacks.get(name) !== undefined);
|
||||
if(foundManager !== undefined) {
|
||||
foundManager.modStreamCallbacks.get(name)(i);
|
||||
if(stagger !== undefined) {
|
||||
await sleep(stagger);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const defaultUnmoderatedStream = new UnmoderatedStream(this.client, {subreddit: 'mod', limit: 100, clearProcessed: { size: 100, retain: 100 }});
|
||||
// @ts-ignore
|
||||
defaultUnmoderatedStream.on('error', modStreamErrorListener('unmoderated'));
|
||||
const defaultModqueueStream = new ModQueueStream(this.client, {subreddit: 'mod'});
|
||||
defaultUnmoderatedStream.on('listing', modStreamListingListener('unmoderated'));
|
||||
const defaultModqueueStream = new ModQueueStream(this.client, {subreddit: 'mod', limit: 100, clearProcessed: { size: 100, retain: 100 }});
|
||||
// @ts-ignore
|
||||
defaultModqueueStream.on('error', modStreamErrorListener('modqueue'));
|
||||
defaultModqueueStream.on('listing', modStreamListingListener('modqueue'));
|
||||
this.cacheManager.modStreams.set('unmoderated', defaultUnmoderatedStream);
|
||||
this.cacheManager.modStreams.set('modqueue', defaultModqueueStream);
|
||||
|
||||
@@ -337,7 +359,7 @@ class Bot {
|
||||
|
||||
async runModStreams(notify = false) {
|
||||
for(const [k,v] of this.cacheManager.modStreams) {
|
||||
if(!v.running && v.listeners('item').length > 0) {
|
||||
if(!v.running && this.subManagers.some(x => x.modStreamCallbacks.get(k) !== undefined)) {
|
||||
v.startInterval();
|
||||
this.logger.info(`Starting default ${k.toUpperCase()} mod stream`);
|
||||
if(notify) {
|
||||
@@ -347,6 +369,7 @@ class Bot {
|
||||
}
|
||||
}
|
||||
}
|
||||
await sleep(2000);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -359,6 +382,7 @@ class Bot {
|
||||
for (const manager of this.subManagers) {
|
||||
if (manager.validConfigLoaded && manager.botState.state !== RUNNING) {
|
||||
await manager.start(causedBy, {reason: 'Caused by application startup'});
|
||||
await sleep(2000);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
248
src/Common/ImageData.ts
Normal file
248
src/Common/ImageData.ts
Normal file
@@ -0,0 +1,248 @@
|
||||
import fetch from "node-fetch";
|
||||
import {Submission} from "snoowrap/dist/objects";
|
||||
import {URL} from "url";
|
||||
import {absPercentDifference, getSharpAsync, isValidImageURL} from "../util";
|
||||
import sizeOf from "image-size";
|
||||
import SimpleError from "../Utils/SimpleError";
|
||||
import {Sharp} from "sharp";
|
||||
import {blockhash} from "./blockhash/blockhash";
|
||||
|
||||
export interface ImageDataOptions {
|
||||
width?: number,
|
||||
height?: number,
|
||||
url: string,
|
||||
variants?: ImageData[]
|
||||
}
|
||||
|
||||
class ImageData {
|
||||
|
||||
width?: number
|
||||
height?: number
|
||||
url: URL
|
||||
variants: ImageData[] = []
|
||||
preferredResolution?: [number, number]
|
||||
sharpImg!: Sharp
|
||||
hashResult!: string
|
||||
actualResolution?: [number, number]
|
||||
|
||||
constructor(data: ImageDataOptions, aggressive = false) {
|
||||
this.width = data.width;
|
||||
this.height = data.height;
|
||||
this.url = new URL(data.url);
|
||||
if (!aggressive && !isValidImageURL(`${this.url.origin}${this.url.pathname}`)) {
|
||||
throw new Error('URL did not end with a valid image extension');
|
||||
}
|
||||
this.variants = data.variants || [];
|
||||
}
|
||||
|
||||
async data(format = 'raw'): Promise<Buffer> {
|
||||
// @ts-ignore
|
||||
return await (await this.sharp()).clone().toFormat(format).toBuffer();
|
||||
}
|
||||
|
||||
async hash(bits: number, useVariantIfPossible = true): Promise<string> {
|
||||
if(this.hashResult === undefined) {
|
||||
let ref: ImageData | undefined;
|
||||
if(useVariantIfPossible && this.preferredResolution !== undefined) {
|
||||
ref = this.getSimilarResolutionVariant(this.preferredResolution[0], this.preferredResolution[1]);
|
||||
}
|
||||
if(ref === undefined) {
|
||||
ref = this;
|
||||
}
|
||||
this.hashResult = await blockhash((await ref.sharp()).clone(), bits);
|
||||
}
|
||||
return this.hashResult;
|
||||
}
|
||||
|
||||
async sharp(): Promise<Sharp> {
|
||||
if (this.sharpImg === undefined) {
|
||||
try {
|
||||
const response = await fetch(this.url.toString())
|
||||
if (response.ok) {
|
||||
const ct = response.headers.get('Content-Type');
|
||||
if (ct !== null && ct.includes('image')) {
|
||||
const sFunc = await getSharpAsync();
|
||||
// if image is animated then we want to extract the first frame and convert it to a regular image
|
||||
// so we can compare two static images later (also because sharp can't use resize() on animated images)
|
||||
if(['gif','webp'].some(x => ct.includes(x))) {
|
||||
this.sharpImg = await sFunc(await (await sFunc(await response.buffer(), {pages: 1, animated: false})).png().toBuffer());
|
||||
} else {
|
||||
this.sharpImg = await sFunc(await response.buffer());
|
||||
}
|
||||
const meta = await this.sharpImg.metadata();
|
||||
if (this.width === undefined || this.height === undefined) {
|
||||
this.width = meta.width;
|
||||
this.height = meta.height;
|
||||
}
|
||||
this.actualResolution = [meta.width as number, meta.height as number];
|
||||
} else {
|
||||
throw new SimpleError(`Content-Type for fetched URL ${this.url} did not contain "image"`);
|
||||
}
|
||||
} else {
|
||||
throw new SimpleError(`URL response was not OK: (${response.status})${response.statusText}`);
|
||||
}
|
||||
|
||||
|
||||
} catch (err) {
|
||||
if(!(err instanceof SimpleError)) {
|
||||
throw new Error(`Error occurred while fetching response from URL: ${err.message}`);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
return this.sharpImg;
|
||||
}
|
||||
|
||||
get pixels() {
|
||||
if (this.actualResolution !== undefined) {
|
||||
return this.actualResolution[0] * this.actualResolution[1];
|
||||
}
|
||||
if (this.width === undefined || this.height === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
return this.width * this.height;
|
||||
}
|
||||
|
||||
get hasDimensions() {
|
||||
return this.width !== undefined && this.height !== undefined;
|
||||
}
|
||||
|
||||
get baseUrl() {
|
||||
return `${this.url.origin}${this.url.pathname}`;
|
||||
}
|
||||
|
||||
setPreferredResolutionByWidth(prefWidth: number) {
|
||||
let height: number | undefined = undefined,
|
||||
width: number | undefined = undefined;
|
||||
if (this.variants.length === 0) {
|
||||
return;
|
||||
}
|
||||
for (const v of this.variants) {
|
||||
if (v.hasDimensions && (v.width as number) <= prefWidth) {
|
||||
width = v.width as number;
|
||||
height = v.height as number;
|
||||
}
|
||||
}
|
||||
if (width !== undefined) {
|
||||
this.preferredResolution = [width, (height as number)];
|
||||
}
|
||||
}
|
||||
|
||||
getSimilarResolutionVariant(width: number, height: number, allowablePercentDiff = 0): ImageData | undefined {
|
||||
if (this.variants.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
return this.variants.find(x => {
|
||||
return x.hasDimensions && (absPercentDifference(width, x.width as number) <= allowablePercentDiff) && (absPercentDifference(height, x.height as number) <= allowablePercentDiff);
|
||||
});
|
||||
}
|
||||
|
||||
isSameDimensions(otherImage: ImageData) {
|
||||
if (!this.hasDimensions || !otherImage.hasDimensions) {
|
||||
return false;
|
||||
}
|
||||
return this.width === otherImage.width && this.height === otherImage.height;
|
||||
}
|
||||
|
||||
async sameAspectRatio(otherImage: ImageData) {
|
||||
let thisRes = this.actualResolution;
|
||||
let otherRes = otherImage.actualResolution;
|
||||
if(thisRes === undefined) {
|
||||
const tMeta = await (await this.sharp()).metadata();
|
||||
const thisMeta = {width: tMeta.width as number, height: tMeta.height as number };
|
||||
this.actualResolution = [thisMeta.width, thisMeta.height];
|
||||
thisRes = this.actualResolution;
|
||||
}
|
||||
if(otherRes === undefined) {
|
||||
const otherMeta = await (await otherImage.sharp()).metadata();
|
||||
otherRes = [otherMeta.width as number, otherMeta.height as number];
|
||||
}
|
||||
const thisRatio = thisRes[0] / thisRes[1];
|
||||
const otherRatio = otherRes[0] / otherRes[1];
|
||||
|
||||
// a little leeway
|
||||
return Math.abs(thisRatio - otherRatio) < 0.1;
|
||||
}
|
||||
|
||||
static async dimensionsFromMetadata(img: Sharp) {
|
||||
const {width, height, ...rest} = await img.metadata();
|
||||
return {width: width as number, height: height as number};
|
||||
}
|
||||
|
||||
async normalizeImagesForComparison(compareLibrary: ('pixel' | 'resemble'), imgToCompare: ImageData): Promise<[Sharp, Sharp, number, number]> {
|
||||
const sFunc = await getSharpAsync();
|
||||
|
||||
let refImage = this as ImageData;
|
||||
let compareImage = imgToCompare;
|
||||
if (this.preferredResolution !== undefined) {
|
||||
const matchingVariant = compareImage.getSimilarResolutionVariant(this.preferredResolution[0], this.preferredResolution[1]);
|
||||
if (matchingVariant !== undefined) {
|
||||
compareImage = matchingVariant;
|
||||
refImage = this.getSimilarResolutionVariant(this.preferredResolution[0], this.preferredResolution[1]) as ImageData;
|
||||
}
|
||||
}
|
||||
|
||||
let refSharp = (await refImage.sharp()).clone();
|
||||
let refMeta = await ImageData.dimensionsFromMetadata(refSharp);
|
||||
let compareSharp = (await compareImage.sharp()).clone();
|
||||
let compareMeta = await ImageData.dimensionsFromMetadata(compareSharp);
|
||||
|
||||
// if dimensions on not the same we need to crop or resize before final resize
|
||||
if (refMeta.width !== compareMeta.width || refMeta.height !== compareMeta.height) {
|
||||
const thisRatio = refMeta.width / (refMeta.height);
|
||||
const otherRatio = compareMeta.width / compareMeta.height;
|
||||
|
||||
const sameRatio = Math.abs(thisRatio - otherRatio) < 0.04;
|
||||
if (sameRatio) {
|
||||
// then resize first since its most likely the same image
|
||||
// can be fairly sure a downscale will get pixels close to the same
|
||||
if (refMeta.width > compareMeta.width) {
|
||||
refSharp = sFunc(await refSharp.resize(compareMeta.width, null, {fit: 'outside'}).toBuffer());
|
||||
} else {
|
||||
compareSharp = sFunc(await compareSharp.resize(refMeta.width, null, {fit: 'outside'}).toBuffer());
|
||||
}
|
||||
refMeta = await ImageData.dimensionsFromMetadata(refSharp);
|
||||
compareMeta = await ImageData.dimensionsFromMetadata(compareSharp);
|
||||
}
|
||||
// find smallest common dimensions
|
||||
const sWidth = refMeta.width <= compareMeta.width ? refMeta.width : compareMeta.width;
|
||||
const sHeight = refMeta.height <= compareMeta.height ? refMeta.height : compareMeta.height;
|
||||
|
||||
// crop if necessary
|
||||
if(sWidth !== refMeta.width || sHeight !== refMeta.height) {
|
||||
refSharp = sFunc(await refSharp.extract({left: 0, top: 0, width: sWidth, height: sHeight}).toBuffer());
|
||||
}
|
||||
if(sWidth !== compareMeta.width || sHeight !== compareMeta.height) {
|
||||
compareSharp = sFunc(await compareSharp.extract({left: 0, top: 0, width: sWidth, height: sHeight}).toBuffer());
|
||||
}
|
||||
}
|
||||
|
||||
// final resize to reduce memory/cpu usage during comparison
|
||||
refSharp = sFunc(await refSharp.resize(400, null, {fit: 'outside'}).toBuffer());
|
||||
compareSharp = sFunc(await compareSharp.resize(400, null, {fit: 'outside'}).toBuffer());
|
||||
|
||||
const {width, height} = await ImageData.dimensionsFromMetadata(refSharp);
|
||||
return [refSharp, compareSharp, width, height];
|
||||
}
|
||||
|
||||
static fromSubmission(sub: Submission, aggressive = false): ImageData {
|
||||
const url = new URL(sub.url);
|
||||
const data: any = {
|
||||
url,
|
||||
};
|
||||
let variants = [];
|
||||
if (sub.preview !== undefined && sub.preview.enabled && sub.preview.images.length > 0) {
|
||||
const firstImg = sub.preview.images[0];
|
||||
const ref = sub.preview.images[0].source;
|
||||
data.width = ref.width;
|
||||
data.height = ref.height;
|
||||
|
||||
variants = firstImg.resolutions.map(x => new ImageData(x));
|
||||
data.variants = variants;
|
||||
}
|
||||
return new ImageData(data, aggressive);
|
||||
}
|
||||
}
|
||||
|
||||
export default ImageData;
|
||||
234
src/Common/blockhash/blockhash.ts
Normal file
234
src/Common/blockhash/blockhash.ts
Normal file
@@ -0,0 +1,234 @@
|
||||
// Perceptual image hash calculation tool based on algorithm descibed in
|
||||
// Block Mean Value Based Image Perceptual Hashing by Bian Yang, Fan Gu and Xiamu Niu
|
||||
//
|
||||
// Copyright 2014 Commons Machinery http://commonsmachinery.se/
|
||||
// Distributed under an MIT license, please see LICENSE in the top dir.
|
||||
|
||||
|
||||
// https://github.com/commonsmachinery/blockhash-js/blob/master/index.js
|
||||
|
||||
import {Sharp} from "sharp";
|
||||
|
||||
interface BlockImageData {
|
||||
data: Buffer,
|
||||
width: number,
|
||||
height: number
|
||||
}
|
||||
|
||||
var one_bits = [0, 1, 1, 2, 1, 2, 2, 3, 1, 2, 2, 3, 2, 3, 3, 4];
|
||||
|
||||
/* Calculate the hamming distance for two hashes in hex format */
|
||||
export const hammingDistance = (hash1: string, hash2: string) => {
|
||||
var d = 0;
|
||||
var i;
|
||||
|
||||
if (hash1.length !== hash2.length) {
|
||||
throw new Error("Can't compare hashes with different length");
|
||||
}
|
||||
|
||||
for (i = 0; i < hash1.length; i++) {
|
||||
var n1 = parseInt(hash1[i], 16);
|
||||
var n2 = parseInt(hash2[i], 16);
|
||||
d += one_bits[n1 ^ n2];
|
||||
}
|
||||
return d;
|
||||
};
|
||||
|
||||
var median = function(data: number[]) {
|
||||
var mdarr = data.slice(0);
|
||||
mdarr.sort(function(a, b) { return a-b; });
|
||||
if (mdarr.length % 2 === 0) {
|
||||
return (mdarr[mdarr.length/2 - 1] + mdarr[mdarr.length/2]) / 2.0;
|
||||
}
|
||||
return mdarr[Math.floor(mdarr.length/2)];
|
||||
};
|
||||
|
||||
var translate_blocks_to_bits = function(blocks: number[], pixels_per_block: number) {
|
||||
var half_block_value = pixels_per_block * 256 * 3 / 2;
|
||||
var bandsize = blocks.length / 4;
|
||||
|
||||
// Compare medians across four horizontal bands
|
||||
for (var i = 0; i < 4; i++) {
|
||||
var m = median(blocks.slice(i * bandsize, (i + 1) * bandsize));
|
||||
for (var j = i * bandsize; j < (i + 1) * bandsize; j++) {
|
||||
var v = blocks[j];
|
||||
|
||||
// Output a 1 if the block is brighter than the median.
|
||||
// With images dominated by black or white, the median may
|
||||
// end up being 0 or the max value, and thus having a lot
|
||||
// of blocks of value equal to the median. To avoid
|
||||
// generating hashes of all zeros or ones, in that case output
|
||||
// 0 if the median is in the lower value space, 1 otherwise
|
||||
blocks[j] = Number(v > m || (Math.abs(v - m) < 1 && m > half_block_value));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var bits_to_hexhash = function(bitsArray: number[]) {
|
||||
var hex = [];
|
||||
for (var i = 0; i < bitsArray.length; i += 4) {
|
||||
var nibble = bitsArray.slice(i, i + 4);
|
||||
hex.push(parseInt(nibble.join(''), 2).toString(16));
|
||||
}
|
||||
|
||||
return hex.join('');
|
||||
};
|
||||
|
||||
var bmvbhash_even = function(data: BlockImageData, bits: number) {
|
||||
var blocksize_x = Math.floor(data.width / bits);
|
||||
var blocksize_y = Math.floor(data.height / bits);
|
||||
|
||||
var result = [];
|
||||
|
||||
for (var y = 0; y < bits; y++) {
|
||||
for (var x = 0; x < bits; x++) {
|
||||
var total = 0;
|
||||
|
||||
for (var iy = 0; iy < blocksize_y; iy++) {
|
||||
for (var ix = 0; ix < blocksize_x; ix++) {
|
||||
var cx = x * blocksize_x + ix;
|
||||
var cy = y * blocksize_y + iy;
|
||||
var ii = (cy * data.width + cx) * 4;
|
||||
|
||||
var alpha = data.data[ii+3];
|
||||
if (alpha === 0) {
|
||||
total += 765;
|
||||
} else {
|
||||
total += data.data[ii] + data.data[ii+1] + data.data[ii+2];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result.push(total);
|
||||
}
|
||||
}
|
||||
|
||||
translate_blocks_to_bits(result, blocksize_x * blocksize_y);
|
||||
return bits_to_hexhash(result);
|
||||
};
|
||||
|
||||
var bmvbhash = function(data: BlockImageData, bits: number) {
|
||||
var result = [];
|
||||
|
||||
var i, j, x, y;
|
||||
var block_width, block_height;
|
||||
var weight_top, weight_bottom, weight_left, weight_right;
|
||||
var block_top, block_bottom, block_left, block_right;
|
||||
var y_mod, y_frac, y_int;
|
||||
var x_mod, x_frac, x_int;
|
||||
var blocks: number[][] = [];
|
||||
|
||||
var even_x = data.width % bits === 0;
|
||||
var even_y = data.height % bits === 0;
|
||||
|
||||
if (even_x && even_y) {
|
||||
return bmvbhash_even(data, bits);
|
||||
}
|
||||
|
||||
// initialize blocks array with 0s
|
||||
for (i = 0; i < bits; i++) {
|
||||
blocks.push([]);
|
||||
for (j = 0; j < bits; j++) {
|
||||
blocks[i].push(0);
|
||||
}
|
||||
}
|
||||
|
||||
block_width = data.width / bits;
|
||||
block_height = data.height / bits;
|
||||
|
||||
for (y = 0; y < data.height; y++) {
|
||||
if (even_y) {
|
||||
// don't bother dividing y, if the size evenly divides by bits
|
||||
block_top = block_bottom = Math.floor(y / block_height);
|
||||
weight_top = 1;
|
||||
weight_bottom = 0;
|
||||
} else {
|
||||
y_mod = (y + 1) % block_height;
|
||||
y_frac = y_mod - Math.floor(y_mod);
|
||||
y_int = y_mod - y_frac;
|
||||
|
||||
weight_top = (1 - y_frac);
|
||||
weight_bottom = (y_frac);
|
||||
|
||||
// y_int will be 0 on bottom/right borders and on block boundaries
|
||||
if (y_int > 0 || (y + 1) === data.height) {
|
||||
block_top = block_bottom = Math.floor(y / block_height);
|
||||
} else {
|
||||
block_top = Math.floor(y / block_height);
|
||||
block_bottom = Math.ceil(y / block_height);
|
||||
}
|
||||
}
|
||||
|
||||
for (x = 0; x < data.width; x++) {
|
||||
var ii = (y * data.width + x) * 4;
|
||||
|
||||
var avgvalue, alpha = data.data[ii+3];
|
||||
if (alpha === 0) {
|
||||
avgvalue = 765;
|
||||
} else {
|
||||
avgvalue = data.data[ii] + data.data[ii+1] + data.data[ii+2];
|
||||
}
|
||||
|
||||
if (even_x) {
|
||||
block_left = block_right = Math.floor(x / block_width);
|
||||
weight_left = 1;
|
||||
weight_right = 0;
|
||||
} else {
|
||||
x_mod = (x + 1) % block_width;
|
||||
x_frac = x_mod - Math.floor(x_mod);
|
||||
x_int = x_mod - x_frac;
|
||||
|
||||
weight_left = (1 - x_frac);
|
||||
weight_right = x_frac;
|
||||
|
||||
// x_int will be 0 on bottom/right borders and on block boundaries
|
||||
if (x_int > 0 || (x + 1) === data.width) {
|
||||
block_left = block_right = Math.floor(x / block_width);
|
||||
} else {
|
||||
block_left = Math.floor(x / block_width);
|
||||
block_right = Math.ceil(x / block_width);
|
||||
}
|
||||
}
|
||||
|
||||
// add weighted pixel value to relevant blocks
|
||||
blocks[block_top][block_left] += avgvalue * weight_top * weight_left;
|
||||
blocks[block_top][block_right] += avgvalue * weight_top * weight_right;
|
||||
blocks[block_bottom][block_left] += avgvalue * weight_bottom * weight_left;
|
||||
blocks[block_bottom][block_right] += avgvalue * weight_bottom * weight_right;
|
||||
}
|
||||
}
|
||||
|
||||
for (i = 0; i < bits; i++) {
|
||||
for (j = 0; j < bits; j++) {
|
||||
result.push(blocks[i][j]);
|
||||
}
|
||||
}
|
||||
|
||||
translate_blocks_to_bits(result, block_width * block_height);
|
||||
return bits_to_hexhash(result);
|
||||
};
|
||||
|
||||
var blockhashData = function(imgData: BlockImageData, bits: number, method: number) {
|
||||
var hash;
|
||||
|
||||
if (method === 1) {
|
||||
hash = bmvbhash_even(imgData, bits);
|
||||
}
|
||||
else if (method === 2) {
|
||||
hash = bmvbhash(imgData, bits);
|
||||
}
|
||||
else {
|
||||
throw new Error("Bad hashing method");
|
||||
}
|
||||
|
||||
return hash;
|
||||
};
|
||||
|
||||
export const blockhash = async function(src: Sharp, bits: number, method: number = 2): Promise<string> {
|
||||
const {data: buff, info} = await src.ensureAlpha().raw().toBuffer({resolveWithObject: true});
|
||||
return blockhashData({
|
||||
width: info.width,
|
||||
height: info.height,
|
||||
data: buff,
|
||||
}, bits, method);
|
||||
};
|
||||
@@ -1,2 +1,31 @@
|
||||
import {HistoricalStats} from "./interfaces";
|
||||
|
||||
export const cacheOptDefaults = {ttl: 60, max: 500, checkPeriod: 600};
|
||||
export const cacheTTLDefaults = {authorTTL: 60, userNotesTTL: 300, wikiTTL: 300, submissionTTL: 60, commentTTL: 60, filterCriteriaTTL: 60, subredditTTL: 600};
|
||||
export const historicalDefaults: HistoricalStats = {
|
||||
eventsCheckedTotal: 0,
|
||||
eventsActionedTotal: 0,
|
||||
checksRun: new Map(),
|
||||
checksFromCache: new Map(),
|
||||
checksTriggered: new Map(),
|
||||
rulesRun: new Map(),
|
||||
//rulesCached: new Map(),
|
||||
rulesCachedTotal: 0,
|
||||
rulesTriggered: new Map(),
|
||||
actionsRun: new Map(),
|
||||
}
|
||||
|
||||
export const createHistoricalDefaults = (): HistoricalStats => {
|
||||
return {
|
||||
eventsCheckedTotal: 0,
|
||||
eventsActionedTotal: 0,
|
||||
checksRun: new Map(),
|
||||
checksFromCache: new Map(),
|
||||
checksTriggered: new Map(),
|
||||
rulesRun: new Map(),
|
||||
//rulesCached: new Map(),
|
||||
rulesCachedTotal: 0,
|
||||
rulesTriggered: new Map(),
|
||||
actionsRun: new Map(),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import {MESSAGE} from 'triple-beam';
|
||||
import Poll from "snoostorm/out/util/Poll";
|
||||
import Snoowrap from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {IncomingMessage} from "http";
|
||||
|
||||
/**
|
||||
* An ISO 8601 Duration
|
||||
@@ -223,6 +224,186 @@ export interface ReferenceSubmission {
|
||||
useSubmissionAsReference?: boolean,
|
||||
}
|
||||
|
||||
/**
|
||||
* When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.
|
||||
*
|
||||
* **Note:** This is an **experimental feature**
|
||||
* */
|
||||
export interface ImageDetection {
|
||||
/**
|
||||
* Is image detection enabled?
|
||||
* */
|
||||
enable?: boolean
|
||||
/**
|
||||
* Determines how and when to check if a URL is an image
|
||||
*
|
||||
* **Note:** After fetching a URL the **Content-Type** is validated to contain `image` before detection occurs
|
||||
*
|
||||
* **When `extension`:** (default)
|
||||
*
|
||||
* * Only URLs that end in known image extensions (.png, .jpg, etc...) are fetched
|
||||
*
|
||||
* **When `unknown`:**
|
||||
*
|
||||
* * URLs that end in known image extensions (.png, .jpg, etc...) are fetched
|
||||
* * URLs with no extension or unknown (IE non-video, non-doc, etc...) are fetched
|
||||
*
|
||||
* **When `all`:**
|
||||
*
|
||||
* * All submissions that have URLs (non-self) will be fetched, regardless of extension
|
||||
* * **Note:** This can be bandwidth/CPU intensive if history window is large so use with care
|
||||
*
|
||||
* @default "extension"
|
||||
* */
|
||||
fetchBehavior?: 'extension' | 'unknown' | 'all',
|
||||
/**
|
||||
* The percentage, as a whole number, of difference between two images at which point they will not be considered the same.
|
||||
*
|
||||
* Will be used as `hash.hardThreshold` and `pixel.threshold` if those values are not specified
|
||||
*
|
||||
* Default is `5`
|
||||
*
|
||||
* @default 5
|
||||
* */
|
||||
threshold?: number
|
||||
|
||||
/**
|
||||
* Use perceptual hashing (blockhash-js) to compare images
|
||||
*
|
||||
* Pros:
|
||||
*
|
||||
* * very fast
|
||||
* * low cpu/memory usage
|
||||
* * results can be cached
|
||||
*
|
||||
* Cons:
|
||||
*
|
||||
* * not as accurate as pixel comparison
|
||||
* * weaker for text-heavy images
|
||||
* * mostly color-blind
|
||||
*
|
||||
* Best uses:
|
||||
*
|
||||
* * Detecting (general) duplicate images
|
||||
* * Comparing large number of images
|
||||
* */
|
||||
hash?: {
|
||||
/**
|
||||
* Enabled by default.
|
||||
*
|
||||
* If both `hash` and `pixel` are enabled then `pixel` will be used to verify image comparison when hashes matches
|
||||
*
|
||||
* @default true
|
||||
* */
|
||||
enable?: boolean
|
||||
|
||||
/**
|
||||
* Bit count determines accuracy of hash and granularity of hash comparison (comparison to other hashes)
|
||||
*
|
||||
* Default is `32`
|
||||
*
|
||||
* **NOTE:** Hashes of different sizes (bits) cannot be compared. If you are caching results make sure all rules where results may be shared use the same bit count to ensure hashes can be compared. Otherwise hashes will be recomputed.
|
||||
*
|
||||
* @default 32
|
||||
* */
|
||||
bits?: number
|
||||
|
||||
/**
|
||||
* Number of seconds to cache image hash
|
||||
* */
|
||||
ttl?: number
|
||||
/**
|
||||
* High Confidence Threshold
|
||||
*
|
||||
* If the difference in comparison is equal to or less than this number the images are considered the same and pixel comparison WILL NOT occur
|
||||
*
|
||||
* Defaults to the parent-level `threshold` value if not present
|
||||
*
|
||||
* Use `null` if you want pixel comparison to ALWAYS occur (softThreshold must be present)
|
||||
* */
|
||||
hardThreshold?: number | null
|
||||
/**
|
||||
* Low Confidence Threshold -- only used if `pixel` is enabled
|
||||
*
|
||||
* If the difference in comparison is
|
||||
*
|
||||
* 1) equal to or less than this value and
|
||||
* 2) the value is greater than `hardThreshold`
|
||||
*
|
||||
* the images will be compared using the `pixel` method
|
||||
* */
|
||||
softThreshold?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Use pixel counting to compare images
|
||||
*
|
||||
* Pros:
|
||||
*
|
||||
* * most accurate
|
||||
* * strong with text or color-only changes
|
||||
*
|
||||
* Cons:
|
||||
*
|
||||
* * much slower than hashing
|
||||
* * memory/cpu intensive
|
||||
*
|
||||
* Best uses:
|
||||
*
|
||||
* * Comparison text-only images
|
||||
* * Comparison requires high degree of accuracy or changes are subtle
|
||||
* */
|
||||
pixel?: {
|
||||
/**
|
||||
* Disabled by default.
|
||||
*
|
||||
* @default false
|
||||
* */
|
||||
enable?: boolean
|
||||
/**
|
||||
* The percentage, as a whole number, of pixels that are **different** between the two images at which point the images are not considered the same.
|
||||
* */
|
||||
threshold?: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface StrongImageDetection {
|
||||
enable: boolean,
|
||||
fetchBehavior: 'extension' | 'unknown' | 'all'
|
||||
threshold: number,
|
||||
hash: {
|
||||
enable: boolean
|
||||
bits: number
|
||||
ttl?: number
|
||||
hardThreshold: number | null
|
||||
softThreshold?: number
|
||||
}
|
||||
pixel: {
|
||||
enable: boolean
|
||||
threshold: number
|
||||
}
|
||||
}
|
||||
|
||||
// export interface ImageData {
|
||||
// data: Promise<Buffer>,
|
||||
// buf?: Buffer,
|
||||
// width: number,
|
||||
// height: number
|
||||
// pixels?: number
|
||||
// url: string
|
||||
// variants?: ImageData[]
|
||||
// }
|
||||
|
||||
export interface ImageComparisonResult {
|
||||
isSameDimensions: boolean
|
||||
dimensionDifference: {
|
||||
width: number;
|
||||
height: number;
|
||||
};
|
||||
misMatchPercentage: number;
|
||||
analysisTime: number;
|
||||
}
|
||||
|
||||
export interface RichContent {
|
||||
/**
|
||||
* The Content to submit for this Action. Content is interpreted as reddit-flavored Markdown.
|
||||
@@ -305,6 +486,38 @@ export type PollOn = 'unmoderated' | 'modqueue' | 'newSub' | 'newComm';
|
||||
export interface PollingOptionsStrong extends PollingOptions {
|
||||
limit: number,
|
||||
interval: number,
|
||||
clearProcessed: ClearProcessedOptions
|
||||
}
|
||||
|
||||
/**
|
||||
* For very long-running, high-volume subreddits clearing the list of processed activities helps manage memory bloat
|
||||
*
|
||||
* All of these options have default values based on the limit and/or interval set for polling options on each subreddit stream. They only need to modified if the defaults are not sufficient.
|
||||
*
|
||||
* If both `after` and `size` are defined whichever is hit first will trigger the list to clear. `after` will be reset after ever clear.
|
||||
* */
|
||||
export interface ClearProcessedOptions {
|
||||
/**
|
||||
* An interval the processed list should be cleared after.
|
||||
*
|
||||
* * EX `9 days`
|
||||
* * EX `3 months`
|
||||
* * EX `5 minutes`
|
||||
* @pattern ^\s*(?<time>\d+)\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\s*$
|
||||
* */
|
||||
after?: string,
|
||||
/**
|
||||
* Number of activities found in processed list after which the list should be cleared.
|
||||
*
|
||||
* Defaults to the `limit` value from `PollingOptions`
|
||||
* */
|
||||
size?: number,
|
||||
/**
|
||||
* The number of activities to retain in processed list after clearing.
|
||||
*
|
||||
* Defaults to `limit` value from `PollingOptions`
|
||||
* */
|
||||
retain?: number,
|
||||
}
|
||||
|
||||
export interface PollingDefaults {
|
||||
@@ -378,6 +591,8 @@ export interface PollingOptions extends PollingDefaults {
|
||||
*
|
||||
* */
|
||||
pollOn: 'unmoderated' | 'modqueue' | 'newSub' | 'newComm'
|
||||
|
||||
clearProcessed?: ClearProcessedOptions
|
||||
}
|
||||
|
||||
export interface TTLConfig {
|
||||
@@ -630,6 +845,22 @@ export interface ManagerOptions {
|
||||
* */
|
||||
export type CompareValue = string;
|
||||
|
||||
/**
|
||||
* A duration and how to compare it against a value
|
||||
*
|
||||
* The syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`
|
||||
*
|
||||
* * EX `> 100 days` => Passes if the date being compared is before 100 days ago
|
||||
* * EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months
|
||||
*
|
||||
* Unit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)
|
||||
*
|
||||
* [See] https://regexr.com/609n8 for example
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\s*$
|
||||
* */
|
||||
export type DurationComparor = string;
|
||||
|
||||
/**
|
||||
* A string containing a comparison operator and a value to compare against
|
||||
*
|
||||
@@ -677,6 +908,7 @@ export interface ActivityState {
|
||||
approved?: boolean
|
||||
score?: CompareValue
|
||||
reports?: CompareValue
|
||||
age?: DurationComparor
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1144,6 +1376,13 @@ export interface BotInstanceJsonConfig {
|
||||
* @default false
|
||||
* */
|
||||
sharedMod?: boolean,
|
||||
|
||||
/**
|
||||
* If sharing a mod stream stagger pushing relevant Activities to individual subreddits.
|
||||
*
|
||||
* Useful when running many subreddits and rules are potentially cpu/memory/traffic heavy -- allows spreading out load
|
||||
* */
|
||||
stagger?: number,
|
||||
},
|
||||
/**
|
||||
* Settings related to default configurations for queue behavior for subreddits
|
||||
@@ -1430,6 +1669,7 @@ export interface BotInstanceConfig extends BotInstanceJsonConfig {
|
||||
},
|
||||
polling: {
|
||||
sharedMod: boolean,
|
||||
stagger?: number,
|
||||
limit: number,
|
||||
interval: number,
|
||||
},
|
||||
@@ -1547,3 +1787,91 @@ export interface RedditEntity {
|
||||
name: string
|
||||
type: RedditEntityType
|
||||
}
|
||||
|
||||
export interface StatusCodeError extends Error {
|
||||
name: 'StatusCodeError',
|
||||
statusCode: number,
|
||||
message: string,
|
||||
response: IncomingMessage,
|
||||
error: Error
|
||||
}
|
||||
|
||||
export interface HistoricalStatsDisplay extends HistoricalStats {
|
||||
checksRunTotal: number
|
||||
checksFromCacheTotal: number
|
||||
checksTriggeredTotal: number
|
||||
rulesRunTotal: number
|
||||
rulesCachedTotal: number
|
||||
rulesTriggeredTotal: number
|
||||
actionsRunTotal: number
|
||||
}
|
||||
|
||||
export interface HistoricalStats {
|
||||
eventsCheckedTotal: number
|
||||
eventsActionedTotal: number
|
||||
checksRun: Map<string, number>
|
||||
checksFromCache: Map<string, number>
|
||||
checksTriggered: Map<string, number>
|
||||
rulesRun: Map<string, number>
|
||||
//rulesCached: Map<string, number>
|
||||
rulesCachedTotal: number
|
||||
rulesTriggered: Map<string, number>
|
||||
actionsRun: Map<string, number>
|
||||
[index: string]: any
|
||||
}
|
||||
|
||||
export interface SubredditHistoricalStats {
|
||||
allTime: HistoricalStats
|
||||
lastReload: HistoricalStats
|
||||
}
|
||||
|
||||
export interface SubredditHistoricalStatsDisplay {
|
||||
allTime: HistoricalStatsDisplay
|
||||
lastReload: HistoricalStatsDisplay
|
||||
}
|
||||
|
||||
export interface ManagerStats {
|
||||
// eventsCheckedTotal: number
|
||||
// eventsCheckedSinceStartTotal: number
|
||||
eventsAvg: number
|
||||
// checksRunTotal: number
|
||||
// checksRunSinceStartTotal: number
|
||||
// checksTriggered: number
|
||||
// checksTriggeredTotal: number
|
||||
// checksTriggeredSinceStart: number
|
||||
// checksTriggeredSinceStartTotal: number
|
||||
// rulesRunTotal: number
|
||||
// rulesRunSinceStartTotal: number
|
||||
// rulesCachedTotal: number
|
||||
// rulesCachedSinceStartTotal: number
|
||||
// rulesTriggeredTotal: number
|
||||
// rulesTriggeredSinceStartTotal: number
|
||||
rulesAvg: number
|
||||
// actionsRun: number
|
||||
// actionsRunTotal: number
|
||||
// actionsRunSinceStart: number,
|
||||
// actionsRunSinceStartTotal: number
|
||||
historical: SubredditHistoricalStatsDisplay
|
||||
cache: {
|
||||
provider: string,
|
||||
currentKeyCount: number,
|
||||
isShared: boolean,
|
||||
totalRequests: number,
|
||||
totalMiss: number,
|
||||
missPercent: string,
|
||||
requestRate: number,
|
||||
types: ResourceStats
|
||||
},
|
||||
}
|
||||
|
||||
export interface HistoricalStatUpdateData {
|
||||
eventsCheckedTotal?: number
|
||||
eventsActionedTotal?: number
|
||||
checksRun: string[] | string
|
||||
checksTriggered: string[] | string
|
||||
checksFromCache: string[] | string
|
||||
actionsRun: string[] | string
|
||||
rulesRun: string[] | string
|
||||
rulesCachedTotal: number
|
||||
rulesTriggered: string[] | string
|
||||
}
|
||||
|
||||
@@ -142,15 +142,30 @@ export const buildPollingOptions = (values: (string | PollingOptions)[]): Pollin
|
||||
let opts: PollingOptionsStrong[] = [];
|
||||
for (const v of values) {
|
||||
if (typeof v === 'string') {
|
||||
opts.push({pollOn: v as PollOn, interval: DEFAULT_POLLING_INTERVAL, limit: DEFAULT_POLLING_LIMIT});
|
||||
opts.push({
|
||||
pollOn: v as PollOn,
|
||||
interval: DEFAULT_POLLING_INTERVAL,
|
||||
limit: DEFAULT_POLLING_LIMIT,
|
||||
clearProcessed: {
|
||||
size: DEFAULT_POLLING_LIMIT,
|
||||
retain: DEFAULT_POLLING_LIMIT,
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const {
|
||||
pollOn: p,
|
||||
interval = DEFAULT_POLLING_INTERVAL,
|
||||
limit = DEFAULT_POLLING_LIMIT,
|
||||
delayUntil,
|
||||
clearProcessed = {size: limit, retain: limit},
|
||||
} = v;
|
||||
opts.push({pollOn: p as PollOn, interval, limit, delayUntil});
|
||||
opts.push({
|
||||
pollOn: p as PollOn,
|
||||
interval,
|
||||
limit,
|
||||
delayUntil,
|
||||
clearProcessed
|
||||
});
|
||||
}
|
||||
}
|
||||
return opts;
|
||||
@@ -604,6 +619,7 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
name: botName,
|
||||
polling: {
|
||||
sharedMod = false,
|
||||
stagger,
|
||||
limit = 100,
|
||||
interval = 30,
|
||||
} = {},
|
||||
@@ -654,7 +670,7 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
} = caching;
|
||||
|
||||
botActionedEventsDefault = actionedEventsDefault;
|
||||
if(actionedEventsMax !== undefined) {
|
||||
if (actionedEventsMax !== undefined) {
|
||||
botActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
|
||||
}
|
||||
|
||||
@@ -708,6 +724,7 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
caching: botCache,
|
||||
polling: {
|
||||
sharedMod,
|
||||
stagger,
|
||||
limit,
|
||||
interval,
|
||||
},
|
||||
|
||||
@@ -99,15 +99,16 @@ export interface AttributionCriteria {
|
||||
* This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`
|
||||
*
|
||||
* * If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)
|
||||
* * If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)
|
||||
* * If `link` is included then aggregate author's submission history which is external links but not media
|
||||
* * If `redditMedia` is included then aggregate on author's submissions history which are media hosted on reddit: galleries, videos, and images (i.redd.it / v.redd.it)
|
||||
* * If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or domain is `reddit.com`
|
||||
* * If `link` is included then aggregate author's submission history which is external links and not recognized as `media` by reddit
|
||||
*
|
||||
* If nothing is specified or list is empty (default) all domains are aggregated
|
||||
* If nothing is specified or list is empty (default) rule will only aggregate on `link` and `media` (ignores reddit-hosted content and self-posts)
|
||||
*
|
||||
* @default undefined
|
||||
* @examples [[]]
|
||||
* */
|
||||
aggregateOn?: ('media' | 'self' | 'link')[],
|
||||
aggregateOn?: ('media' | 'redditMedia' | 'self' | 'link')[],
|
||||
|
||||
/**
|
||||
* Should the criteria consolidate recognized media domains into the parent domain?
|
||||
@@ -173,7 +174,7 @@ export class AttributionRule extends Rule {
|
||||
window,
|
||||
thresholdOn = 'all',
|
||||
minActivityCount = 10,
|
||||
aggregateOn = [],
|
||||
aggregateOn = ['link','media'],
|
||||
consolidateMediaDomains = false,
|
||||
domains = [],
|
||||
domainsCombined = false,
|
||||
@@ -232,16 +233,23 @@ export class AttributionRule extends Rule {
|
||||
const domainInfo = getAttributionIdentifier(sub, consolidateMediaDomains)
|
||||
|
||||
let domainType = 'link';
|
||||
if(sub.secure_media !== undefined && sub.secure_media !== null) {
|
||||
domainType = 'media';
|
||||
} else if((sub.is_self || sub.is_video || sub.domain === 'i.redd.it')) {
|
||||
if(sub.is_video || ['i.redd.it','v.redd.it'].includes(sub.domain)
|
||||
// @ts-ignore
|
||||
|| sub.gallery_data !== undefined) {
|
||||
domainType = 'redditMedia';
|
||||
} else if(sub.is_self || sub.domain === 'reddit.com') {
|
||||
domainType = 'self';
|
||||
} else if(sub.secure_media !== undefined && sub.secure_media !== null) {
|
||||
domainType = 'media';
|
||||
}
|
||||
|
||||
if(aggregateOn.length !== 0) {
|
||||
if(domainType === 'media' && !aggregateOn.includes('media')) {
|
||||
return acc;
|
||||
}
|
||||
if(domainType === 'redditMedia' && !aggregateOn.includes('redditMedia')) {
|
||||
return acc;
|
||||
}
|
||||
if(domainType === 'self' && !aggregateOn.includes('self')) {
|
||||
return acc;
|
||||
}
|
||||
@@ -384,7 +392,7 @@ export class AttributionRule extends Rule {
|
||||
|
||||
}
|
||||
|
||||
interface AttributionConfig extends ReferenceSubmission {
|
||||
interface AttributionConfig {
|
||||
|
||||
/**
|
||||
* A list threshold-window values to test attribution against
|
||||
|
||||
@@ -1,21 +1,42 @@
|
||||
import {Rule, RuleJSONConfig, RuleOptions, RulePremise, RuleResult} from "./index";
|
||||
import {Comment, VoteableContent} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import as from 'async';
|
||||
import pMap from 'p-map';
|
||||
// @ts-ignore
|
||||
import subImageMatch from 'matches-subimage';
|
||||
import {
|
||||
activityWindowText, asSubmission,
|
||||
comparisonTextOp, FAIL, formatNumber, getActivitySubredditName, isSubmission, objectToStringSummary,
|
||||
parseGenericValueOrPercentComparison, parseStringToRegex, parseSubredditName,
|
||||
activityWindowText,
|
||||
asSubmission, bitsToHexLength,
|
||||
// blockHashImage,
|
||||
compareImages,
|
||||
comparisonTextOp,
|
||||
FAIL,
|
||||
formatNumber,
|
||||
getActivitySubredditName, imageCompareMaxConcurrencyGuess,
|
||||
//getImageDataFromUrl,
|
||||
isSubmission,
|
||||
isValidImageURL,
|
||||
objectToStringSummary,
|
||||
parseGenericValueOrPercentComparison,
|
||||
parseStringToRegex,
|
||||
parseSubredditName,
|
||||
parseUsableLinkIdentifier,
|
||||
PASS, toStrongSubredditState
|
||||
PASS, sleep,
|
||||
toStrongSubredditState
|
||||
} from "../util";
|
||||
import {
|
||||
ActivityWindow,
|
||||
ActivityWindowCriteria,
|
||||
ActivityWindowType, CommentState,
|
||||
ReferenceSubmission, StrongSubredditState, SubmissionState,
|
||||
//ImageData,
|
||||
ImageDetection,
|
||||
ReferenceSubmission, StrongImageDetection, StrongSubredditState, SubmissionState,
|
||||
SubredditCriteria, SubredditState
|
||||
} from "../Common/interfaces";
|
||||
import {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import ImageData from "../Common/ImageData";
|
||||
import {blockhash, hammingDistance} from "../Common/blockhash/blockhash";
|
||||
import leven from "leven";
|
||||
|
||||
const parseLink = parseUsableLinkIdentifier();
|
||||
|
||||
@@ -23,6 +44,7 @@ export class RecentActivityRule extends Rule {
|
||||
window: ActivityWindowType;
|
||||
thresholds: ActivityThreshold[];
|
||||
useSubmissionAsReference: boolean;
|
||||
imageDetection: StrongImageDetection
|
||||
lookAt?: 'comments' | 'submissions';
|
||||
|
||||
constructor(options: RecentActivityRuleOptions) {
|
||||
@@ -30,8 +52,47 @@ export class RecentActivityRule extends Rule {
|
||||
const {
|
||||
window = 15,
|
||||
useSubmissionAsReference = true,
|
||||
imageDetection,
|
||||
lookAt,
|
||||
} = options || {};
|
||||
|
||||
const {
|
||||
enable = false,
|
||||
fetchBehavior = 'extension',
|
||||
threshold = 5,
|
||||
hash = {},
|
||||
pixel = {},
|
||||
} = imageDetection || {};
|
||||
|
||||
const {
|
||||
enable: hEnable = true,
|
||||
bits = 16,
|
||||
ttl = 60,
|
||||
hardThreshold = threshold,
|
||||
softThreshold
|
||||
} = hash || {};
|
||||
|
||||
const {
|
||||
enable: pEnable = true,
|
||||
threshold: pThreshold = threshold,
|
||||
} = pixel || {};
|
||||
|
||||
this.imageDetection = {
|
||||
enable,
|
||||
fetchBehavior,
|
||||
threshold,
|
||||
hash: {
|
||||
enable: hEnable,
|
||||
hardThreshold,
|
||||
softThreshold,
|
||||
bits,
|
||||
ttl,
|
||||
},
|
||||
pixel: {
|
||||
enable: pEnable,
|
||||
threshold: pThreshold
|
||||
}
|
||||
};
|
||||
this.lookAt = lookAt;
|
||||
this.useSubmissionAsReference = useSubmissionAsReference;
|
||||
this.window = window;
|
||||
@@ -73,16 +134,138 @@ export class RecentActivityRule extends Rule {
|
||||
} else if (item.is_self) {
|
||||
this.logger.warn('Cannot use post as reference because triggered Submission is not a link type');
|
||||
} else {
|
||||
const usableUrl = parseLink(await item.url);
|
||||
viableActivity = viableActivity.filter((x) => {
|
||||
if (!asSubmission(x)) {
|
||||
return false;
|
||||
const itemId = item.id;
|
||||
const referenceUrl = await item.url;
|
||||
const usableUrl = parseLink(referenceUrl);
|
||||
let filteredActivity: (Submission|Comment)[] = [];
|
||||
let analysisTimes: number[] = [];
|
||||
let referenceImage: ImageData | undefined;
|
||||
if (this.imageDetection.enable) {
|
||||
try {
|
||||
referenceImage = ImageData.fromSubmission(item);
|
||||
referenceImage.setPreferredResolutionByWidth(800);
|
||||
if(this.imageDetection.hash.enable) {
|
||||
let refHash: string | undefined;
|
||||
if(this.imageDetection.hash.ttl !== undefined) {
|
||||
refHash = await this.resources.getImageHash(referenceImage);
|
||||
if(refHash === undefined) {
|
||||
refHash = await referenceImage.hash(this.imageDetection.hash.bits);
|
||||
await this.resources.setImageHash(referenceImage, refHash, this.imageDetection.hash.ttl);
|
||||
} else if(refHash.length !== bitsToHexLength(this.imageDetection.hash.bits)) {
|
||||
this.logger.warn('Reference image hash length did not correspond to bits specified in config. Recomputing...');
|
||||
refHash = await referenceImage.hash(this.imageDetection.hash.bits);
|
||||
await this.resources.setImageHash(referenceImage, refHash, this.imageDetection.hash.ttl);
|
||||
}
|
||||
} else {
|
||||
refHash = await referenceImage.hash(this.imageDetection.hash.bits);
|
||||
}
|
||||
}
|
||||
//await referenceImage.sharp();
|
||||
// await referenceImage.hash();
|
||||
// if (referenceImage.preferredResolution !== undefined) {
|
||||
// await (referenceImage.getSimilarResolutionVariant(...referenceImage.preferredResolution) as ImageData).sharp();
|
||||
// }
|
||||
} catch (err) {
|
||||
this.logger.verbose(err.message);
|
||||
}
|
||||
}
|
||||
let longRun;
|
||||
if (referenceImage !== undefined) {
|
||||
const l = this.logger;
|
||||
longRun = setTimeout(() => {
|
||||
l.verbose('FYI: Image processing is causing rule to take longer than normal');
|
||||
}, 2500);
|
||||
}
|
||||
// @ts-ignore
|
||||
const ci = async (x: (Submission|Comment)) => {
|
||||
if (!asSubmission(x) || x.id === itemId) {
|
||||
return null;
|
||||
}
|
||||
if (x.url === undefined) {
|
||||
return false;
|
||||
return null;
|
||||
}
|
||||
return parseLink(x.url) === usableUrl;
|
||||
});
|
||||
if (parseLink(x.url) === usableUrl) {
|
||||
return x;
|
||||
}
|
||||
// only do image detection if regular URL comparison and other conditions fail first
|
||||
// to reduce CPU/bandwidth usage
|
||||
if (referenceImage !== undefined) {
|
||||
try {
|
||||
let imgData = ImageData.fromSubmission(x);
|
||||
imgData.setPreferredResolutionByWidth(800);
|
||||
if(this.imageDetection.hash.enable) {
|
||||
let compareHash: string | undefined;
|
||||
if(this.imageDetection.hash.ttl !== undefined) {
|
||||
compareHash = await this.resources.getImageHash(imgData);
|
||||
}
|
||||
if(compareHash === undefined)
|
||||
{
|
||||
compareHash = await imgData.hash(this.imageDetection.hash.bits);
|
||||
if(this.imageDetection.hash.ttl !== undefined) {
|
||||
await this.resources.setImageHash(imgData, compareHash, this.imageDetection.hash.ttl);
|
||||
}
|
||||
}
|
||||
const refHash = await referenceImage.hash(this.imageDetection.hash.bits);
|
||||
if(refHash.length !== compareHash.length) {
|
||||
this.logger.debug(`Hash lengths were not the same! Will need to recompute compare hash to match reference.\n\nReference: ${referenceImage.baseUrl} has is ${refHash.length} char long | Comparing: ${imgData.baseUrl} has is ${compareHash} ${compareHash.length} long`);
|
||||
compareHash = await imgData.hash(this.imageDetection.hash.bits)
|
||||
}
|
||||
const distance = leven(refHash, compareHash);
|
||||
const diff = (distance/refHash.length)*100;
|
||||
|
||||
|
||||
// return image if hard is defined and diff is less
|
||||
if(null !== this.imageDetection.hash.hardThreshold && diff <= this.imageDetection.hash.hardThreshold) {
|
||||
return x;
|
||||
}
|
||||
// hard is either not defined or diff was gerater than hard
|
||||
|
||||
// if soft is defined
|
||||
if (this.imageDetection.hash.softThreshold !== undefined) {
|
||||
// and diff is greater than soft allowance
|
||||
if(diff > this.imageDetection.hash.softThreshold) {
|
||||
// not similar enough
|
||||
return null;
|
||||
}
|
||||
// similar enough, will continue on to pixel (if enabled!)
|
||||
} else {
|
||||
// only hard was defined and did not pass
|
||||
return null;
|
||||
}
|
||||
}
|
||||
// at this point either hash was not enabled or it was and we hit soft threshold but not hard
|
||||
if(this.imageDetection.pixel.enable) {
|
||||
try {
|
||||
const [compareResult, sameImage] = await compareImages(referenceImage, imgData, this.imageDetection.pixel.threshold / 100);
|
||||
analysisTimes.push(compareResult.analysisTime);
|
||||
if (sameImage) {
|
||||
return x;
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.warn(`Unexpected error encountered while pixel-comparing images, will skip comparison => ${err.message}`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if(!err.message.includes('did not end with a valid image extension')) {
|
||||
this.logger.warn(`Will not compare image from Submission ${x.id} due to error while parsing image URL => ${err.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
// parallel all the things
|
||||
this.logger.profile('asyncCompare');
|
||||
const results = await pMap(viableActivity, ci, {concurrency: imageCompareMaxConcurrencyGuess});
|
||||
this.logger.profile('asyncCompare', {level: 'debug', message: 'Total time for image comparison (incl download/cache calls)'});
|
||||
const totalAnalysisTime = analysisTimes.reduce((acc, x) => acc + x,0);
|
||||
if(analysisTimes.length > 0) {
|
||||
this.logger.debug(`Reference image pixel-compared ${analysisTimes.length} times. Timings: Avg ${formatNumber(totalAnalysisTime / analysisTimes.length, {toFixed: 0})}ms | Max: ${Math.max(...analysisTimes)}ms | Min: ${Math.min(...analysisTimes)}ms | Total: ${totalAnalysisTime}ms (${formatNumber(totalAnalysisTime/1000)}s)`);
|
||||
}
|
||||
filteredActivity = filteredActivity.concat(results.filter(x => x !== null));
|
||||
if (longRun !== undefined) {
|
||||
clearTimeout(longRun);
|
||||
}
|
||||
viableActivity = filteredActivity;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -102,42 +285,74 @@ export class RecentActivityRule extends Rule {
|
||||
|
||||
// convert subreddits array into entirely StrongSubredditState
|
||||
const subStates: StrongSubredditState[] = subreddits.map((x) => {
|
||||
if(typeof x === 'string') {
|
||||
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
|
||||
if (typeof x === 'string') {
|
||||
return toStrongSubredditState({name: x, stateDescription: x}, {
|
||||
defaultFlags: 'i',
|
||||
generateDescription: true
|
||||
});
|
||||
}
|
||||
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
|
||||
});
|
||||
|
||||
for(const activity of viableActivity) {
|
||||
if(asSubmission(activity) && submissionState !== undefined) {
|
||||
if(!(await this.resources.testItemCriteria(activity, [submissionState]))) {
|
||||
let validActivity: (Comment | Submission)[] = await as.filter(viableActivity, async (activity) => {
|
||||
if (asSubmission(activity) && submissionState !== undefined) {
|
||||
return await this.resources.testItemCriteria(activity, [submissionState]);
|
||||
} else if (commentState !== undefined) {
|
||||
return await this.resources.testItemCriteria(activity, [commentState]);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
validActivity = await this.resources.batchTestSubredditCriteria(validActivity, subStates);
|
||||
for (const activity of validActivity) {
|
||||
currCount++;
|
||||
// @ts-ignore
|
||||
combinedKarma += activity.score;
|
||||
const pSub = getActivitySubredditName(activity);
|
||||
if (!presentSubs.includes(pSub)) {
|
||||
presentSubs.push(pSub);
|
||||
}
|
||||
}
|
||||
|
||||
for (const activity of viableActivity) {
|
||||
if (asSubmission(activity) && submissionState !== undefined) {
|
||||
if (!(await this.resources.testItemCriteria(activity, [submissionState]))) {
|
||||
continue;
|
||||
}
|
||||
} else if(commentState !== undefined) {
|
||||
if(!(await this.resources.testItemCriteria(activity, [commentState]))) {
|
||||
} else if (commentState !== undefined) {
|
||||
if (!(await this.resources.testItemCriteria(activity, [commentState]))) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
let inSubreddits = false;
|
||||
for(const ss of subStates) {
|
||||
for (const ss of subStates) {
|
||||
const res = await this.resources.testSubredditCriteria(activity, ss);
|
||||
if(res) {
|
||||
if (res) {
|
||||
inSubreddits = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(inSubreddits) {
|
||||
if (inSubreddits) {
|
||||
currCount++;
|
||||
combinedKarma += activity.score;
|
||||
const pSub = getActivitySubredditName(activity);
|
||||
if(!presentSubs.includes(pSub)) {
|
||||
if (!presentSubs.includes(pSub)) {
|
||||
presentSubs.push(pSub);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(threshold);
|
||||
let sum = {subsWithActivity: presentSubs, combinedKarma, karmaThreshold, subreddits: subStates.map(x => x.stateDescription), count: currCount, threshold, triggered: false, testValue: currCount.toString()};
|
||||
let sum = {
|
||||
subsWithActivity: presentSubs,
|
||||
combinedKarma,
|
||||
karmaThreshold,
|
||||
subreddits: subStates.map(x => x.stateDescription),
|
||||
count: currCount,
|
||||
threshold,
|
||||
triggered: false,
|
||||
testValue: currCount.toString()
|
||||
};
|
||||
if (isPercent) {
|
||||
sum.testValue = `${formatNumber((currCount / viableActivity.length) * 100)}%`;
|
||||
if (comparisonTextOp(currCount / viableActivity.length, operator, value / 100)) {
|
||||
@@ -149,9 +364,9 @@ export class RecentActivityRule extends Rule {
|
||||
totalTriggeredOn = sum;
|
||||
}
|
||||
// if we would trigger on threshold need to also test for karma
|
||||
if(totalTriggeredOn !== undefined && karmaThreshold !== undefined) {
|
||||
if (totalTriggeredOn !== undefined && karmaThreshold !== undefined) {
|
||||
const {operator: opKarma, value: valueKarma} = parseGenericValueOrPercentComparison(karmaThreshold);
|
||||
if(!comparisonTextOp(combinedKarma, opKarma, valueKarma)) {
|
||||
if (!comparisonTextOp(combinedKarma, opKarma, valueKarma)) {
|
||||
sum.triggered = false;
|
||||
totalTriggeredOn = undefined;
|
||||
}
|
||||
@@ -169,7 +384,7 @@ export class RecentActivityRule extends Rule {
|
||||
result = `${PASS} ${resultData.result}`;
|
||||
this.logger.verbose(result);
|
||||
return Promise.resolve([true, this.getResult(true, resultData)]);
|
||||
} else if(summaries.length === 1) {
|
||||
} else if (summaries.length === 1) {
|
||||
// can display result if its only one summary otherwise need to log to debug
|
||||
const res = this.generateResultData(summaries[0], viableActivity);
|
||||
result = `${FAIL} ${res.result}`;
|
||||
@@ -182,7 +397,7 @@ export class RecentActivityRule extends Rule {
|
||||
|
||||
return Promise.resolve([false, this.getResult(false, {result})]);
|
||||
}
|
||||
|
||||
|
||||
generateResultData(summary: any, activities: (Submission | Comment)[] = []) {
|
||||
const {
|
||||
count,
|
||||
@@ -196,7 +411,7 @@ export class RecentActivityRule extends Rule {
|
||||
} = summary;
|
||||
const relevantSubs = subsWithActivity.length === 0 ? subreddits : subsWithActivity;
|
||||
let totalSummary = `${testValue} activities over ${relevantSubs.length} subreddits${karmaThreshold !== undefined ? ` with ${combinedKarma} combined karma` : ''} ${triggered ? 'met' : 'did not meet'} threshold of ${threshold}${karmaThreshold !== undefined ? ` and ${karmaThreshold} combined karma` : ''}`;
|
||||
if(triggered && subsWithActivity.length > 0) {
|
||||
if (triggered && subsWithActivity.length > 0) {
|
||||
totalSummary = `${totalSummary} -- subreddits: ${subsWithActivity.join(', ')}`;
|
||||
}
|
||||
return {
|
||||
@@ -223,12 +438,12 @@ export class RecentActivityRule extends Rule {
|
||||
* */
|
||||
export interface ActivityThreshold {
|
||||
/**
|
||||
* When present, a Submission will only be counted if it meets this criteria
|
||||
* */
|
||||
* When present, a Submission will only be counted if it meets this criteria
|
||||
* */
|
||||
submissionState?: SubmissionState
|
||||
/**
|
||||
* When present, a Comment will only be counted if it meets this criteria
|
||||
* */
|
||||
* When present, a Comment will only be counted if it meets this criteria
|
||||
* */
|
||||
commentState?: CommentState
|
||||
|
||||
/**
|
||||
@@ -288,6 +503,8 @@ interface RecentActivityConfig extends ActivityWindow, ReferenceSubmission {
|
||||
* @minItems 1
|
||||
* */
|
||||
thresholds: ActivityThreshold[],
|
||||
|
||||
imageDetection?: ImageDetection
|
||||
}
|
||||
|
||||
export interface RecentActivityRuleOptions extends RecentActivityConfig, RuleOptions {
|
||||
|
||||
@@ -4,7 +4,7 @@ import {
|
||||
activityWindowText, asSubmission,
|
||||
comparisonTextOp, FAIL, getActivitySubredditName, isExternalUrlSubmission, isRedditMedia,
|
||||
parseGenericValueComparison, parseSubredditName,
|
||||
parseUsableLinkIdentifier as linkParser, PASS, toStrongSubredditState
|
||||
parseUsableLinkIdentifier as linkParser, PASS, subredditStateIsNameOnly, toStrongSubredditState
|
||||
} from "../util";
|
||||
import {
|
||||
ActivityWindow,
|
||||
@@ -58,6 +58,7 @@ export class RepeatActivityRule extends Rule {
|
||||
lookAt: 'submissions' | 'all';
|
||||
include: (string | SubredditState)[];
|
||||
exclude: (string | SubredditState)[];
|
||||
hasFullSubredditCrits: boolean = false;
|
||||
activityFilterFunc: (x: Submission|Comment) => Promise<boolean> = async (x) => true;
|
||||
keepRemoved: boolean;
|
||||
minWordCount: number;
|
||||
@@ -91,6 +92,7 @@ export class RepeatActivityRule extends Rule {
|
||||
}
|
||||
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
|
||||
});
|
||||
this.hasFullSubredditCrits = !subStates.every(x => subredditStateIsNameOnly(x));
|
||||
this.activityFilterFunc = async (x: Submission|Comment) => {
|
||||
for(const ss of subStates) {
|
||||
if(await this.resources.testSubredditCriteria(x, ss)) {
|
||||
@@ -106,6 +108,7 @@ export class RepeatActivityRule extends Rule {
|
||||
}
|
||||
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
|
||||
});
|
||||
this.hasFullSubredditCrits = !subStates.every(x => subredditStateIsNameOnly(x));
|
||||
this.activityFilterFunc = async (x: Submission|Comment) => {
|
||||
for(const ss of subStates) {
|
||||
if(await this.resources.testSubredditCriteria(x, ss)) {
|
||||
@@ -149,6 +152,12 @@ export class RepeatActivityRule extends Rule {
|
||||
break;
|
||||
}
|
||||
|
||||
if(this.hasFullSubredditCrits) {
|
||||
// go ahead and cache subreddits now
|
||||
// because we can't use batch test since testing activities in order is important for this rule
|
||||
await this.resources.cacheSubreddits(activities.map(x => x.subreddit));
|
||||
}
|
||||
|
||||
const condensedActivities = await activities.reduce(async (accProm: Promise<RepeatActivityReducer>, activity: (Submission | Comment), index: number) => {
|
||||
const acc = await accProm;
|
||||
const {openSets = [], allSets = []} = acc;
|
||||
|
||||
@@ -28,6 +28,7 @@ interface ResultContext {
|
||||
|
||||
export interface RuleResult extends ResultContext {
|
||||
premise: RulePremise
|
||||
kind: string
|
||||
name: string
|
||||
triggered: (boolean | null)
|
||||
}
|
||||
@@ -153,6 +154,7 @@ export abstract class Rule implements IRule, Triggerable {
|
||||
protected getResult(triggered: (boolean | null) = null, context: ResultContext = {}): RuleResult {
|
||||
return {
|
||||
premise: this.getPremise(),
|
||||
kind: this.getKind(),
|
||||
name: this.name,
|
||||
triggered,
|
||||
...context,
|
||||
@@ -209,22 +211,6 @@ export interface UserNoteCriteria {
|
||||
search?: 'current' | 'consecutive' | 'total'
|
||||
}
|
||||
|
||||
/**
|
||||
* A duration and how to compare it against a value
|
||||
*
|
||||
* The syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`
|
||||
*
|
||||
* * EX `> 100 days` => Passes if the date being compared is before 100 days ago
|
||||
* * EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months
|
||||
*
|
||||
* Unit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)
|
||||
*
|
||||
* [See] https://regexr.com/609n8 for example
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\s*$
|
||||
* */
|
||||
export type DurationComparor = string;
|
||||
|
||||
export interface IRule extends ChecksActivityState {
|
||||
/**
|
||||
* An optional, but highly recommended, friendly name for this rule. If not present will default to `kind`.
|
||||
|
||||
@@ -69,6 +69,10 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"shadowBanned": {
|
||||
"description": "Is the author shadowbanned?\n\nThis is determined by trying to retrieve the author's profile. If a 404 is returned it is likely they are shadowbanned",
|
||||
"type": "boolean"
|
||||
},
|
||||
"totalKarma": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
@@ -132,6 +136,11 @@
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"age": {
|
||||
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"approved": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -189,6 +198,11 @@
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"age": {
|
||||
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"approved": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
||||
@@ -233,7 +233,7 @@
|
||||
"properties": {
|
||||
"aggregateOn": {
|
||||
"default": "undefined",
|
||||
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
|
||||
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `redditMedia` is included then aggregate on author's submissions history which are media hosted on reddit: galleries, videos, and images (i.redd.it / v.redd.it)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or domain is `reddit.com`\n* If `link` is included then aggregate author's submission history which is external links and not recognized as `media` by reddit\n\nIf nothing is specified or list is empty (default) rule will only aggregate on `link` and `media` (ignores reddit-hosted content and self-posts)",
|
||||
"examples": [
|
||||
[
|
||||
]
|
||||
@@ -242,6 +242,7 @@
|
||||
"enum": [
|
||||
"link",
|
||||
"media",
|
||||
"redditMedia",
|
||||
"self"
|
||||
],
|
||||
"type": "string"
|
||||
@@ -418,11 +419,6 @@
|
||||
],
|
||||
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
|
||||
"type": "string"
|
||||
},
|
||||
"useSubmissionAsReference": {
|
||||
"default": true,
|
||||
"description": "If activity is a Submission and is a link (not self-post) then only look at Submissions that contain this link, otherwise consider all activities.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@@ -498,6 +494,10 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"shadowBanned": {
|
||||
"description": "Is the author shadowbanned?\n\nThis is determined by trying to retrieve the author's profile. If a 404 is returned it is likely they are shadowbanned",
|
||||
"type": "boolean"
|
||||
},
|
||||
"totalKarma": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
@@ -922,6 +922,25 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ClearProcessedOptions": {
|
||||
"description": "For very long-running, high-volume subreddits clearing the list of processed activities helps manage memory bloat\n\nAll of these options have default values based on the limit and/or interval set for polling options on each subreddit stream. They only need to modified if the defaults are not sufficient.\n\nIf both `after` and `size` are defined whichever is hit first will trigger the list to clear. `after` will be reset after ever clear.",
|
||||
"properties": {
|
||||
"after": {
|
||||
"description": "An interval the processed list should be cleared after.\n\n* EX `9 days`\n* EX `3 months`\n* EX `5 minutes`",
|
||||
"pattern": "^\\s*(?<time>\\d+)\\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"retain": {
|
||||
"description": "The number of activities to retain in processed list after clearing.\n\nDefaults to `limit` value from `PollingOptions`",
|
||||
"type": "number"
|
||||
},
|
||||
"size": {
|
||||
"description": "Number of activities found in processed list after which the list should be cleared.\n\nDefaults to the `limit` value from `PollingOptions`",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"CommentActionJson": {
|
||||
"description": "Reply to the Activity. For a submission the reply will be a top-level comment.",
|
||||
"properties": {
|
||||
@@ -1216,6 +1235,11 @@
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"age": {
|
||||
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"approved": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1604,6 +1628,77 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ImageDetection": {
|
||||
"description": "When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.\n\n**Note:** This is an **experimental feature**",
|
||||
"properties": {
|
||||
"enable": {
|
||||
"description": "Is image detection enabled?",
|
||||
"type": "boolean"
|
||||
},
|
||||
"fetchBehavior": {
|
||||
"default": "extension",
|
||||
"description": "Determines how and when to check if a URL is an image\n\n**Note:** After fetching a URL the **Content-Type** is validated to contain `image` before detection occurs\n\n**When `extension`:** (default)\n\n* Only URLs that end in known image extensions (.png, .jpg, etc...) are fetched\n\n**When `unknown`:**\n\n* URLs that end in known image extensions (.png, .jpg, etc...) are fetched\n* URLs with no extension or unknown (IE non-video, non-doc, etc...) are fetched\n\n**When `all`:**\n\n* All submissions that have URLs (non-self) will be fetched, regardless of extension\n* **Note:** This can be bandwidth/CPU intensive if history window is large so use with care",
|
||||
"enum": [
|
||||
"all",
|
||||
"extension",
|
||||
"unknown"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"hash": {
|
||||
"description": "Use perceptual hashing (blockhash-js) to compare images\n\nPros:\n\n* very fast\n* low cpu/memory usage\n* results can be cached\n\nCons:\n\n* not as accurate as pixel comparison\n* weaker for text-heavy images\n* mostly color-blind\n\nBest uses:\n\n* Detecting (general) duplicate images\n* Comparing large number of images",
|
||||
"properties": {
|
||||
"bits": {
|
||||
"default": 32,
|
||||
"description": "Bit count determines accuracy of hash and granularity of hash comparison (comparison to other hashes)\n\nDefault is `32`\n\n**NOTE:** Hashes of different sizes (bits) cannot be compared. If you are caching results make sure all rules where results may be shared use the same bit count to ensure hashes can be compared. Otherwise hashes will be recomputed.",
|
||||
"type": "number"
|
||||
},
|
||||
"enable": {
|
||||
"default": true,
|
||||
"description": "Enabled by default.\n\nIf both `hash` and `pixel` are enabled then `pixel` will be used to verify image comparison when hashes matches",
|
||||
"type": "boolean"
|
||||
},
|
||||
"hardThreshold": {
|
||||
"description": "High Confidence Threshold\n\nIf the difference in comparison is equal to or less than this number the images are considered the same and pixel comparison WILL NOT occur\n\nDefaults to the parent-level `threshold` value if not present\n\nUse `null` if you want pixel comparison to ALWAYS occur (softThreshold must be present)",
|
||||
"type": [
|
||||
"null",
|
||||
"number"
|
||||
]
|
||||
},
|
||||
"softThreshold": {
|
||||
"description": "Low Confidence Threshold -- only used if `pixel` is enabled\n\nIf the difference in comparison is\n\n1) equal to or less than this value and\n2) the value is greater than `hardThreshold`\n\nthe images will be compared using the `pixel` method",
|
||||
"type": "number"
|
||||
},
|
||||
"ttl": {
|
||||
"description": "Number of seconds to cache image hash",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"pixel": {
|
||||
"description": "Use pixel counting to compare images\n\nPros:\n\n* most accurate\n* strong with text or color-only changes\n\nCons:\n\n* much slower than hashing\n* memory/cpu intensive\n\nBest uses:\n\n* Comparison text-only images\n* Comparison requires high degree of accuracy or changes are subtle",
|
||||
"properties": {
|
||||
"enable": {
|
||||
"default": false,
|
||||
"description": "Disabled by default.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"threshold": {
|
||||
"description": "The percentage, as a whole number, of pixels that are **different** between the two images at which point the images are not considered the same.",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"threshold": {
|
||||
"default": 5,
|
||||
"description": "The percentage, as a whole number, of difference between two images at which point they will not be considered the same.\n\nWill be used as `hash.hardThreshold` and `pixel.threshold` if those values are not specified\n\nDefault is `5`",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"LockActionJson": {
|
||||
"description": "Lock the Activity",
|
||||
"properties": {
|
||||
@@ -1871,6 +1966,10 @@
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"clearProcessed": {
|
||||
"$ref": "#/definitions/ClearProcessedOptions",
|
||||
"description": "For very long-running, high-volume subreddits clearing the list of processed activities helps manage memory bloat\n\nAll of these options have default values based on the limit and/or interval set for polling options on each subreddit stream. They only need to modified if the defaults are not sufficient.\n\nIf both `after` and `size` are defined whichever is hit first will trigger the list to clear. `after` will be reset after ever clear."
|
||||
},
|
||||
"delayUntil": {
|
||||
"description": "Delay processing Activity until it is `N` seconds old\n\nUseful if there are other bots that may process an Activity and you want this bot to run first/last/etc.\n\nIf the Activity is already `N` seconds old when it is initially retrieved no refresh of the Activity occurs (no API request is made) and it is immediately processed.",
|
||||
"type": "number"
|
||||
@@ -1929,6 +2028,10 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"imageDetection": {
|
||||
"$ref": "#/definitions/ImageDetection",
|
||||
"description": "When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.\n\n**Note:** This is an **experimental feature**"
|
||||
},
|
||||
"itemIs": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -2802,6 +2905,11 @@
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"age": {
|
||||
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"approved": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
||||
@@ -79,6 +79,10 @@
|
||||
"default": false,
|
||||
"description": "If set to `true` all subreddits polling unmoderated/modqueue with default polling settings will share a request to \"r/mod\"\notherwise each subreddit will poll its own mod view\n\n* ENV => `SHARE_MOD`\n* ARG => `--shareMod`",
|
||||
"type": "boolean"
|
||||
},
|
||||
"stagger": {
|
||||
"description": "If sharing a mod stream stagger pushing relevant Activities to individual subreddits.\n\nUseful when running many subreddits and rules are potentially cpu/memory/traffic heavy -- allows spreading out load",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
|
||||
@@ -179,7 +179,7 @@
|
||||
"properties": {
|
||||
"aggregateOn": {
|
||||
"default": "undefined",
|
||||
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
|
||||
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `redditMedia` is included then aggregate on author's submissions history which are media hosted on reddit: galleries, videos, and images (i.redd.it / v.redd.it)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or domain is `reddit.com`\n* If `link` is included then aggregate author's submission history which is external links and not recognized as `media` by reddit\n\nIf nothing is specified or list is empty (default) rule will only aggregate on `link` and `media` (ignores reddit-hosted content and self-posts)",
|
||||
"examples": [
|
||||
[
|
||||
]
|
||||
@@ -188,6 +188,7 @@
|
||||
"enum": [
|
||||
"link",
|
||||
"media",
|
||||
"redditMedia",
|
||||
"self"
|
||||
],
|
||||
"type": "string"
|
||||
@@ -364,11 +365,6 @@
|
||||
],
|
||||
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
|
||||
"type": "string"
|
||||
},
|
||||
"useSubmissionAsReference": {
|
||||
"default": true,
|
||||
"description": "If activity is a Submission and is a link (not self-post) then only look at Submissions that contain this link, otherwise consider all activities.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@@ -444,6 +440,10 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"shadowBanned": {
|
||||
"description": "Is the author shadowbanned?\n\nThis is determined by trying to retrieve the author's profile. If a 404 is returned it is likely they are shadowbanned",
|
||||
"type": "boolean"
|
||||
},
|
||||
"totalKarma": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
@@ -582,6 +582,11 @@
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"age": {
|
||||
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"approved": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -863,6 +868,77 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ImageDetection": {
|
||||
"description": "When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.\n\n**Note:** This is an **experimental feature**",
|
||||
"properties": {
|
||||
"enable": {
|
||||
"description": "Is image detection enabled?",
|
||||
"type": "boolean"
|
||||
},
|
||||
"fetchBehavior": {
|
||||
"default": "extension",
|
||||
"description": "Determines how and when to check if a URL is an image\n\n**Note:** After fetching a URL the **Content-Type** is validated to contain `image` before detection occurs\n\n**When `extension`:** (default)\n\n* Only URLs that end in known image extensions (.png, .jpg, etc...) are fetched\n\n**When `unknown`:**\n\n* URLs that end in known image extensions (.png, .jpg, etc...) are fetched\n* URLs with no extension or unknown (IE non-video, non-doc, etc...) are fetched\n\n**When `all`:**\n\n* All submissions that have URLs (non-self) will be fetched, regardless of extension\n* **Note:** This can be bandwidth/CPU intensive if history window is large so use with care",
|
||||
"enum": [
|
||||
"all",
|
||||
"extension",
|
||||
"unknown"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"hash": {
|
||||
"description": "Use perceptual hashing (blockhash-js) to compare images\n\nPros:\n\n* very fast\n* low cpu/memory usage\n* results can be cached\n\nCons:\n\n* not as accurate as pixel comparison\n* weaker for text-heavy images\n* mostly color-blind\n\nBest uses:\n\n* Detecting (general) duplicate images\n* Comparing large number of images",
|
||||
"properties": {
|
||||
"bits": {
|
||||
"default": 32,
|
||||
"description": "Bit count determines accuracy of hash and granularity of hash comparison (comparison to other hashes)\n\nDefault is `32`\n\n**NOTE:** Hashes of different sizes (bits) cannot be compared. If you are caching results make sure all rules where results may be shared use the same bit count to ensure hashes can be compared. Otherwise hashes will be recomputed.",
|
||||
"type": "number"
|
||||
},
|
||||
"enable": {
|
||||
"default": true,
|
||||
"description": "Enabled by default.\n\nIf both `hash` and `pixel` are enabled then `pixel` will be used to verify image comparison when hashes matches",
|
||||
"type": "boolean"
|
||||
},
|
||||
"hardThreshold": {
|
||||
"description": "High Confidence Threshold\n\nIf the difference in comparison is equal to or less than this number the images are considered the same and pixel comparison WILL NOT occur\n\nDefaults to the parent-level `threshold` value if not present\n\nUse `null` if you want pixel comparison to ALWAYS occur (softThreshold must be present)",
|
||||
"type": [
|
||||
"null",
|
||||
"number"
|
||||
]
|
||||
},
|
||||
"softThreshold": {
|
||||
"description": "Low Confidence Threshold -- only used if `pixel` is enabled\n\nIf the difference in comparison is\n\n1) equal to or less than this value and\n2) the value is greater than `hardThreshold`\n\nthe images will be compared using the `pixel` method",
|
||||
"type": "number"
|
||||
},
|
||||
"ttl": {
|
||||
"description": "Number of seconds to cache image hash",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"pixel": {
|
||||
"description": "Use pixel counting to compare images\n\nPros:\n\n* most accurate\n* strong with text or color-only changes\n\nCons:\n\n* much slower than hashing\n* memory/cpu intensive\n\nBest uses:\n\n* Comparison text-only images\n* Comparison requires high degree of accuracy or changes are subtle",
|
||||
"properties": {
|
||||
"enable": {
|
||||
"default": false,
|
||||
"description": "Disabled by default.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"threshold": {
|
||||
"description": "The percentage, as a whole number, of pixels that are **different** between the two images at which point the images are not considered the same.",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"threshold": {
|
||||
"default": 5,
|
||||
"description": "The percentage, as a whole number, of difference between two images at which point they will not be considered the same.\n\nWill be used as `hash.hardThreshold` and `pixel.threshold` if those values are not specified\n\nDefault is `5`",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"RecentActivityRuleJSONConfig": {
|
||||
"description": "Checks a user's history for any Activity (Submission/Comment) in the subreddits specified in thresholds\n\nAvailable data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):\n\n```\nsummary => comma-deliminated list of subreddits that hit the threshold and their count EX subredditA(1), subredditB(4),...\nsubCount => Total number of subreddits that hit the threshold\ntotalCount => Total number of all activity occurrences in subreddits\n```",
|
||||
"properties": {
|
||||
@@ -885,6 +961,10 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"imageDetection": {
|
||||
"$ref": "#/definitions/ImageDetection",
|
||||
"description": "When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.\n\n**Note:** This is an **experimental feature**"
|
||||
},
|
||||
"itemIs": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1367,6 +1447,11 @@
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"age": {
|
||||
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"approved": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
||||
@@ -156,7 +156,7 @@
|
||||
"properties": {
|
||||
"aggregateOn": {
|
||||
"default": "undefined",
|
||||
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
|
||||
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `redditMedia` is included then aggregate on author's submissions history which are media hosted on reddit: galleries, videos, and images (i.redd.it / v.redd.it)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or domain is `reddit.com`\n* If `link` is included then aggregate author's submission history which is external links and not recognized as `media` by reddit\n\nIf nothing is specified or list is empty (default) rule will only aggregate on `link` and `media` (ignores reddit-hosted content and self-posts)",
|
||||
"examples": [
|
||||
[
|
||||
]
|
||||
@@ -165,6 +165,7 @@
|
||||
"enum": [
|
||||
"link",
|
||||
"media",
|
||||
"redditMedia",
|
||||
"self"
|
||||
],
|
||||
"type": "string"
|
||||
@@ -341,11 +342,6 @@
|
||||
],
|
||||
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
|
||||
"type": "string"
|
||||
},
|
||||
"useSubmissionAsReference": {
|
||||
"default": true,
|
||||
"description": "If activity is a Submission and is a link (not self-post) then only look at Submissions that contain this link, otherwise consider all activities.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@@ -421,6 +417,10 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"shadowBanned": {
|
||||
"description": "Is the author shadowbanned?\n\nThis is determined by trying to retrieve the author's profile. If a 404 is returned it is likely they are shadowbanned",
|
||||
"type": "boolean"
|
||||
},
|
||||
"totalKarma": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
@@ -559,6 +559,11 @@
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"age": {
|
||||
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"approved": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -840,6 +845,77 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ImageDetection": {
|
||||
"description": "When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.\n\n**Note:** This is an **experimental feature**",
|
||||
"properties": {
|
||||
"enable": {
|
||||
"description": "Is image detection enabled?",
|
||||
"type": "boolean"
|
||||
},
|
||||
"fetchBehavior": {
|
||||
"default": "extension",
|
||||
"description": "Determines how and when to check if a URL is an image\n\n**Note:** After fetching a URL the **Content-Type** is validated to contain `image` before detection occurs\n\n**When `extension`:** (default)\n\n* Only URLs that end in known image extensions (.png, .jpg, etc...) are fetched\n\n**When `unknown`:**\n\n* URLs that end in known image extensions (.png, .jpg, etc...) are fetched\n* URLs with no extension or unknown (IE non-video, non-doc, etc...) are fetched\n\n**When `all`:**\n\n* All submissions that have URLs (non-self) will be fetched, regardless of extension\n* **Note:** This can be bandwidth/CPU intensive if history window is large so use with care",
|
||||
"enum": [
|
||||
"all",
|
||||
"extension",
|
||||
"unknown"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"hash": {
|
||||
"description": "Use perceptual hashing (blockhash-js) to compare images\n\nPros:\n\n* very fast\n* low cpu/memory usage\n* results can be cached\n\nCons:\n\n* not as accurate as pixel comparison\n* weaker for text-heavy images\n* mostly color-blind\n\nBest uses:\n\n* Detecting (general) duplicate images\n* Comparing large number of images",
|
||||
"properties": {
|
||||
"bits": {
|
||||
"default": 32,
|
||||
"description": "Bit count determines accuracy of hash and granularity of hash comparison (comparison to other hashes)\n\nDefault is `32`\n\n**NOTE:** Hashes of different sizes (bits) cannot be compared. If you are caching results make sure all rules where results may be shared use the same bit count to ensure hashes can be compared. Otherwise hashes will be recomputed.",
|
||||
"type": "number"
|
||||
},
|
||||
"enable": {
|
||||
"default": true,
|
||||
"description": "Enabled by default.\n\nIf both `hash` and `pixel` are enabled then `pixel` will be used to verify image comparison when hashes matches",
|
||||
"type": "boolean"
|
||||
},
|
||||
"hardThreshold": {
|
||||
"description": "High Confidence Threshold\n\nIf the difference in comparison is equal to or less than this number the images are considered the same and pixel comparison WILL NOT occur\n\nDefaults to the parent-level `threshold` value if not present\n\nUse `null` if you want pixel comparison to ALWAYS occur (softThreshold must be present)",
|
||||
"type": [
|
||||
"null",
|
||||
"number"
|
||||
]
|
||||
},
|
||||
"softThreshold": {
|
||||
"description": "Low Confidence Threshold -- only used if `pixel` is enabled\n\nIf the difference in comparison is\n\n1) equal to or less than this value and\n2) the value is greater than `hardThreshold`\n\nthe images will be compared using the `pixel` method",
|
||||
"type": "number"
|
||||
},
|
||||
"ttl": {
|
||||
"description": "Number of seconds to cache image hash",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"pixel": {
|
||||
"description": "Use pixel counting to compare images\n\nPros:\n\n* most accurate\n* strong with text or color-only changes\n\nCons:\n\n* much slower than hashing\n* memory/cpu intensive\n\nBest uses:\n\n* Comparison text-only images\n* Comparison requires high degree of accuracy or changes are subtle",
|
||||
"properties": {
|
||||
"enable": {
|
||||
"default": false,
|
||||
"description": "Disabled by default.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"threshold": {
|
||||
"description": "The percentage, as a whole number, of pixels that are **different** between the two images at which point the images are not considered the same.",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"threshold": {
|
||||
"default": 5,
|
||||
"description": "The percentage, as a whole number, of difference between two images at which point they will not be considered the same.\n\nWill be used as `hash.hardThreshold` and `pixel.threshold` if those values are not specified\n\nDefault is `5`",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"RecentActivityRuleJSONConfig": {
|
||||
"description": "Checks a user's history for any Activity (Submission/Comment) in the subreddits specified in thresholds\n\nAvailable data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):\n\n```\nsummary => comma-deliminated list of subreddits that hit the threshold and their count EX subredditA(1), subredditB(4),...\nsubCount => Total number of subreddits that hit the threshold\ntotalCount => Total number of all activity occurrences in subreddits\n```",
|
||||
"properties": {
|
||||
@@ -862,6 +938,10 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"imageDetection": {
|
||||
"$ref": "#/definitions/ImageDetection",
|
||||
"description": "When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.\n\n**Note:** This is an **experimental feature**"
|
||||
},
|
||||
"itemIs": {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -1344,6 +1424,11 @@
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"age": {
|
||||
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"approved": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
||||
@@ -3,7 +3,7 @@ import {Logger} from "winston";
|
||||
import {SubmissionCheck} from "../Check/SubmissionCheck";
|
||||
import {CommentCheck} from "../Check/CommentCheck";
|
||||
import {
|
||||
cacheStats,
|
||||
cacheStats, createHistoricalStatsDisplay,
|
||||
createRetryHandler,
|
||||
determineNewResults, findLastIndex, formatNumber,
|
||||
mergeArr, parseFromJsonOrYamlToObject, pollingInfo, resultsSummary, sleep, totalFromMapStats, triggeredIndicator,
|
||||
@@ -17,7 +17,7 @@ import {
|
||||
ActionResult,
|
||||
DEFAULT_POLLING_INTERVAL,
|
||||
DEFAULT_POLLING_LIMIT, Invokee,
|
||||
ManagerOptions, ManagerStateChangeOption, PAUSED,
|
||||
ManagerOptions, ManagerStateChangeOption, ManagerStats, PAUSED,
|
||||
PollingOptionsStrong, ResourceStats, RUNNING, RunState, STOPPED, SYSTEM, USER
|
||||
} from "../Common/interfaces";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
@@ -39,6 +39,8 @@ import {JSONConfig} from "../JsonConfig";
|
||||
import {CheckStructuredJson} from "../Check";
|
||||
import NotificationManager from "../Notification/NotificationManager";
|
||||
import action from "../Web/Server/routes/authenticated/user/action";
|
||||
import {createHistoricalDefaults, historicalDefaults} from "../Common/defaults";
|
||||
import {ExtendedSnoowrap} from "../Utils/SnoowrapClients";
|
||||
|
||||
export interface RunningState {
|
||||
state: RunState,
|
||||
@@ -65,39 +67,6 @@ export interface RuntimeManagerOptions extends ManagerOptions {
|
||||
maxWorkers: number;
|
||||
}
|
||||
|
||||
export interface ManagerStats {
|
||||
eventsCheckedTotal: number
|
||||
eventsCheckedSinceStartTotal: number
|
||||
eventsAvg: number
|
||||
checksRunTotal: number
|
||||
checksRunSinceStartTotal: number
|
||||
checksTriggered: number
|
||||
checksTriggeredTotal: number
|
||||
checksTriggeredSinceStart: number
|
||||
checksTriggeredSinceStartTotal: number
|
||||
rulesRunTotal: number
|
||||
rulesRunSinceStartTotal: number
|
||||
rulesCachedTotal: number
|
||||
rulesCachedSinceStartTotal: number
|
||||
rulesTriggeredTotal: number
|
||||
rulesTriggeredSinceStartTotal: number
|
||||
rulesAvg: number
|
||||
actionsRun: number
|
||||
actionsRunTotal: number
|
||||
actionsRunSinceStart: number,
|
||||
actionsRunSinceStartTotal: number
|
||||
cache: {
|
||||
provider: string,
|
||||
currentKeyCount: number,
|
||||
isShared: boolean,
|
||||
totalRequests: number,
|
||||
totalMiss: number,
|
||||
missPercent: string,
|
||||
requestRate: number,
|
||||
types: ResourceStats
|
||||
},
|
||||
}
|
||||
|
||||
interface QueuedIdentifier {
|
||||
id: string,
|
||||
shouldRefresh: boolean
|
||||
@@ -106,7 +75,7 @@ interface QueuedIdentifier {
|
||||
|
||||
export class Manager {
|
||||
subreddit: Subreddit;
|
||||
client: Snoowrap;
|
||||
client: ExtendedSnoowrap;
|
||||
logger: Logger;
|
||||
botName: string;
|
||||
pollOptions: PollingOptionsStrong[] = [];
|
||||
@@ -164,50 +133,22 @@ export class Manager {
|
||||
// use by api nanny to slow event consumption
|
||||
delayBy?: number;
|
||||
|
||||
eventsCheckedTotal: number = 0;
|
||||
eventsCheckedSinceStartTotal: number = 0;
|
||||
eventsSample: number[] = [];
|
||||
eventsSampleInterval: any;
|
||||
eventsRollingAvg: number = 0;
|
||||
checksRunTotal: number = 0;
|
||||
checksRunSinceStartTotal: number = 0;
|
||||
checksTriggered: Map<string, number> = new Map();
|
||||
checksTriggeredSinceStart: Map<string, number> = new Map();
|
||||
rulesRunTotal: number = 0;
|
||||
rulesRunSinceStartTotal: number = 0;
|
||||
rulesCachedTotal: number = 0;
|
||||
rulesCachedSinceStartTotal: number = 0;
|
||||
rulesTriggeredTotal: number = 0;
|
||||
rulesTriggeredSinceStartTotal: number = 0;
|
||||
rulesUniqueSample: number[] = [];
|
||||
rulesUniqueSampleInterval: any;
|
||||
rulesUniqueRollingAvg: number = 0;
|
||||
actionsRun: Map<string, number> = new Map();
|
||||
actionsRunSinceStart: Map<string, number> = new Map();
|
||||
actionedEvents: ActionedEvent[] = [];
|
||||
|
||||
getStats = async (): Promise<ManagerStats> => {
|
||||
const data: any = {
|
||||
eventsCheckedTotal: this.eventsCheckedTotal,
|
||||
eventsCheckedSinceStartTotal: this.eventsCheckedSinceStartTotal,
|
||||
eventsAvg: formatNumber(this.eventsRollingAvg),
|
||||
checksRunTotal: this.checksRunTotal,
|
||||
checksRunSinceStartTotal: this.checksRunSinceStartTotal,
|
||||
checksTriggered: this.checksTriggered,
|
||||
checksTriggeredTotal: totalFromMapStats(this.checksTriggered),
|
||||
checksTriggeredSinceStart: this.checksTriggeredSinceStart,
|
||||
checksTriggeredSinceStartTotal: totalFromMapStats(this.checksTriggeredSinceStart),
|
||||
rulesRunTotal: this.rulesRunTotal,
|
||||
rulesRunSinceStartTotal: this.rulesRunSinceStartTotal,
|
||||
rulesCachedTotal: this.rulesCachedTotal,
|
||||
rulesCachedSinceStartTotal: this.rulesCachedSinceStartTotal,
|
||||
rulesTriggeredTotal: this.rulesTriggeredTotal,
|
||||
rulesTriggeredSinceStartTotal: this.rulesTriggeredSinceStartTotal,
|
||||
rulesAvg: formatNumber(this.rulesUniqueRollingAvg),
|
||||
actionsRun: this.actionsRun,
|
||||
actionsRunTotal: totalFromMapStats(this.actionsRun),
|
||||
actionsRunSinceStart: this.actionsRunSinceStart,
|
||||
actionsRunSinceStartTotal: totalFromMapStats(this.actionsRunSinceStart),
|
||||
historical: {
|
||||
lastReload: createHistoricalStatsDisplay(createHistoricalDefaults()),
|
||||
allTime: createHistoricalStatsDisplay(createHistoricalDefaults()),
|
||||
},
|
||||
cache: {
|
||||
provider: 'none',
|
||||
currentKeyCount: 0,
|
||||
@@ -223,6 +164,7 @@ export class Manager {
|
||||
if (this.resources !== undefined) {
|
||||
const resStats = await this.resources.getStats();
|
||||
|
||||
data.historical = this.resources.getHistoricalDisplayStats();
|
||||
data.cache = resStats.cache;
|
||||
data.cache.currentKeyCount = await this.resources.getCacheKeyCount();
|
||||
data.cache.isShared = this.resources.cacheSettingsHash === 'default';
|
||||
@@ -239,7 +181,7 @@ export class Manager {
|
||||
return this.displayLabel;
|
||||
}
|
||||
|
||||
constructor(sub: Subreddit, client: Snoowrap, logger: Logger, cacheManager: BotResourcesManager, opts: RuntimeManagerOptions = {botName: 'ContextMod', maxWorkers: 1}) {
|
||||
constructor(sub: Subreddit, client: ExtendedSnoowrap, logger: Logger, cacheManager: BotResourcesManager, opts: RuntimeManagerOptions = {botName: 'ContextMod', maxWorkers: 1}) {
|
||||
const {dryRun, sharedModqueue = false, wikiLocation = 'botconfig/contextbot', botName, maxWorkers} = opts;
|
||||
this.displayLabel = opts.nickname || `${sub.display_name_prefixed}`;
|
||||
const getLabels = this.getCurrentLabels;
|
||||
@@ -270,8 +212,9 @@ export class Manager {
|
||||
|
||||
this.eventsSampleInterval = setInterval((function(self) {
|
||||
return function() {
|
||||
const et = self.resources !== undefined ? self.resources.stats.historical.allTime.eventsCheckedTotal : 0;
|
||||
const rollingSample = self.eventsSample.slice(0, 7)
|
||||
rollingSample.unshift(self.eventsCheckedTotal)
|
||||
rollingSample.unshift(et)
|
||||
self.eventsSample = rollingSample;
|
||||
const diff = self.eventsSample.reduceRight((acc: number[], curr, index) => {
|
||||
if(self.eventsSample[index + 1] !== undefined) {
|
||||
@@ -291,7 +234,8 @@ export class Manager {
|
||||
this.rulesUniqueSampleInterval = setInterval((function(self) {
|
||||
return function() {
|
||||
const rollingSample = self.rulesUniqueSample.slice(0, 7)
|
||||
rollingSample.unshift(self.rulesRunTotal - self.rulesCachedTotal);
|
||||
const rt = self.resources !== undefined ? self.resources.stats.historical.allTime.rulesRunTotal - self.resources.stats.historical.allTime.rulesCachedTotal : 0;
|
||||
rollingSample.unshift(rt);
|
||||
self.rulesUniqueSample = rollingSample;
|
||||
const diff = self.rulesUniqueSample.reduceRight((acc: number[], curr, index) => {
|
||||
if(self.rulesUniqueSample[index + 1] !== undefined) {
|
||||
@@ -387,7 +331,7 @@ export class Manager {
|
||||
return q;
|
||||
}
|
||||
|
||||
protected parseConfigurationFromObject(configObj: object) {
|
||||
protected async parseConfigurationFromObject(configObj: object) {
|
||||
try {
|
||||
const configBuilder = new ConfigBuilder({logger: this.logger});
|
||||
const validJson = configBuilder.validateJson(configObj);
|
||||
@@ -437,7 +381,7 @@ export class Manager {
|
||||
caching,
|
||||
client: this.client,
|
||||
};
|
||||
this.resources = this.cacheManager.set(this.subreddit.display_name, resourceConfig);
|
||||
this.resources = await this.cacheManager.set(this.subreddit.display_name, resourceConfig);
|
||||
this.resources.setLogger(this.logger);
|
||||
|
||||
this.logger.info('Subreddit-specific options updated');
|
||||
@@ -532,7 +476,7 @@ export class Manager {
|
||||
throw new ConfigParseError('Could not parse wiki page contents as JSON or YAML')
|
||||
}
|
||||
|
||||
this.parseConfigurationFromObject(configObj);
|
||||
await this.parseConfigurationFromObject(configObj);
|
||||
this.logger.info('Checks updated');
|
||||
|
||||
if(!suppressNotification) {
|
||||
@@ -549,8 +493,6 @@ export class Manager {
|
||||
async runChecks(checkType: ('Comment' | 'Submission'), activity: (Submission | Comment), options?: runCheckOptions): Promise<void> {
|
||||
const checks = checkType === 'Comment' ? this.commentChecks : this.submissionChecks;
|
||||
let item = activity;
|
||||
this.eventsCheckedTotal++;
|
||||
this.eventsCheckedSinceStartTotal++;
|
||||
const itemId = await item.id;
|
||||
let allRuleResults: RuleResult[] = [];
|
||||
const itemIdentifier = `${checkType === 'Submission' ? 'SUB' : 'COM'} ${itemId}`;
|
||||
@@ -622,7 +564,9 @@ export class Manager {
|
||||
actionResults: [],
|
||||
}
|
||||
let triggered = false;
|
||||
|
||||
let triggeredCheckName;
|
||||
const checksRunNames = [];
|
||||
const cachedCheckNames = [];
|
||||
try {
|
||||
for (const check of checks) {
|
||||
if (checkNames.length > 0 && !checkNames.map(x => x.toLowerCase()).some(x => x === check.name.toLowerCase())) {
|
||||
@@ -633,6 +577,7 @@ export class Manager {
|
||||
this.logger.info(`Check ${check.name} not run because it is not enabled, skipping...`);
|
||||
continue;
|
||||
}
|
||||
checksRunNames.push(check.name);
|
||||
checksRun++;
|
||||
triggered = false;
|
||||
let isFromCache = false;
|
||||
@@ -642,6 +587,8 @@ export class Manager {
|
||||
isFromCache = fromCache;
|
||||
if(!fromCache) {
|
||||
await check.setCacheResult(item, {result: checkTriggered, ruleResults: checkResults});
|
||||
} else {
|
||||
cachedCheckNames.push(check.name);
|
||||
}
|
||||
currentResults = checkResults;
|
||||
totalRulesRun += checkResults.length;
|
||||
@@ -658,6 +605,7 @@ export class Manager {
|
||||
}
|
||||
|
||||
if (triggered) {
|
||||
triggeredCheckName = check.name;
|
||||
actionedEvent.check = check.name;
|
||||
actionedEvent.ruleResults = currentResults;
|
||||
if(isFromCache) {
|
||||
@@ -665,8 +613,6 @@ export class Manager {
|
||||
} else {
|
||||
actionedEvent.ruleSummary = resultsSummary(currentResults, check.condition);
|
||||
}
|
||||
this.checksTriggered.set(check.name, (this.checksTriggered.get(check.name) || 0) + 1);
|
||||
this.checksTriggeredSinceStart.set(check.name, (this.checksTriggeredSinceStart.get(check.name) || 0) + 1);
|
||||
runActions = await check.runActions(item, currentResults.filter(x => x.triggered), dryRun);
|
||||
actionsRun = runActions.length;
|
||||
|
||||
@@ -688,23 +634,6 @@ export class Manager {
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
const cachedTotal = totalRulesRun - allRuleResults.length;
|
||||
const triggeredRulesTotal = allRuleResults.filter(x => x.triggered).length;
|
||||
|
||||
this.checksRunTotal += checksRun;
|
||||
this.checksRunSinceStartTotal += checksRun;
|
||||
this.rulesRunTotal += totalRulesRun;
|
||||
this.rulesRunSinceStartTotal += totalRulesRun;
|
||||
this.rulesCachedTotal += cachedTotal;
|
||||
this.rulesCachedSinceStartTotal += cachedTotal;
|
||||
this.rulesTriggeredTotal += triggeredRulesTotal;
|
||||
this.rulesTriggeredSinceStartTotal += triggeredRulesTotal;
|
||||
|
||||
for (const a of runActions) {
|
||||
const name = a.name;
|
||||
this.actionsRun.set(name, (this.actionsRun.get(name) || 0) + 1);
|
||||
this.actionsRunSinceStart.set(name, (this.actionsRunSinceStart.get(name) || 0) + 1);
|
||||
}
|
||||
actionedEvent.actionResults = runActions;
|
||||
if(triggered) {
|
||||
await this.resources.addActionedEvent(actionedEvent);
|
||||
@@ -715,6 +644,18 @@ export class Manager {
|
||||
this.currentLabels = [];
|
||||
} catch (err) {
|
||||
this.logger.error('Error occurred while cleaning up Activity check and generating stats', err);
|
||||
} finally {
|
||||
this.resources.updateHistoricalStats({
|
||||
eventsCheckedTotal: 1,
|
||||
eventsActionedTotal: triggered ? 1 : 0,
|
||||
checksTriggered: triggeredCheckName !== undefined ? [triggeredCheckName] : [],
|
||||
checksRun: checksRunNames,
|
||||
checksFromCache: cachedCheckNames,
|
||||
actionsRun: runActions.map(x => x.name),
|
||||
rulesRun: allRuleResults.map(x => x.name),
|
||||
rulesTriggered: allRuleResults.filter(x => x.triggered).map(x => x.name),
|
||||
rulesCachedTotal: totalRulesRun - allRuleResults.length,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -732,7 +673,8 @@ export class Manager {
|
||||
pollOn,
|
||||
limit,
|
||||
interval,
|
||||
delayUntil
|
||||
delayUntil,
|
||||
clearProcessed,
|
||||
} = pollOpt;
|
||||
let stream: SPoll<Snoowrap.Submission | Snoowrap.Comment>;
|
||||
let modStreamType: string | undefined;
|
||||
@@ -748,6 +690,7 @@ export class Manager {
|
||||
subreddit: this.subreddit.display_name,
|
||||
limit: limit,
|
||||
pollTime: interval * 1000,
|
||||
clearProcessed,
|
||||
});
|
||||
}
|
||||
break;
|
||||
@@ -761,6 +704,7 @@ export class Manager {
|
||||
subreddit: this.subreddit.display_name,
|
||||
limit: limit,
|
||||
pollTime: interval * 1000,
|
||||
clearProcessed
|
||||
});
|
||||
}
|
||||
break;
|
||||
@@ -769,6 +713,7 @@ export class Manager {
|
||||
subreddit: this.subreddit.display_name,
|
||||
limit: limit,
|
||||
pollTime: interval * 1000,
|
||||
clearProcessed
|
||||
});
|
||||
break;
|
||||
case 'newComm':
|
||||
@@ -776,6 +721,7 @@ export class Manager {
|
||||
subreddit: this.subreddit.display_name,
|
||||
limit: limit,
|
||||
pollTime: interval * 1000,
|
||||
clearProcessed
|
||||
});
|
||||
break;
|
||||
}
|
||||
@@ -813,11 +759,10 @@ export class Manager {
|
||||
}
|
||||
};
|
||||
|
||||
stream.on('item', onItem);
|
||||
|
||||
if (modStreamType !== undefined) {
|
||||
this.modStreamCallbacks.set(pollOn, onItem);
|
||||
} else {
|
||||
stream.on('item', onItem);
|
||||
// @ts-ignore
|
||||
stream.on('error', async (err: any) => {
|
||||
|
||||
@@ -997,18 +942,12 @@ export class Manager {
|
||||
s.end();
|
||||
}
|
||||
this.streams = [];
|
||||
for (const [k, v] of this.modStreamCallbacks) {
|
||||
const stream = this.cacheManager.modStreams.get(k) as Poll<Snoowrap.Submission | Snoowrap.Comment>;
|
||||
stream.removeListener('item', v);
|
||||
}
|
||||
// for (const [k, v] of this.modStreamCallbacks) {
|
||||
// const stream = this.cacheManager.modStreams.get(k) as Poll<Snoowrap.Submission | Snoowrap.Comment>;
|
||||
// stream.removeListener('item', v);
|
||||
// }
|
||||
this.modStreamCallbacks = new Map();
|
||||
this.startedAt = undefined;
|
||||
this.eventsCheckedSinceStartTotal = 0;
|
||||
this.checksRunSinceStartTotal = 0;
|
||||
this.rulesRunSinceStartTotal = 0;
|
||||
this.rulesCachedSinceStartTotal = 0;
|
||||
this.rulesTriggeredSinceStartTotal = 0;
|
||||
this.checksTriggeredSinceStart = new Map();
|
||||
this.actionsRunSinceStart = new Map();
|
||||
this.logger.info(`Events STOPPED by ${causedBy}`);
|
||||
this.eventsState = {
|
||||
state: STOPPED,
|
||||
|
||||
@@ -2,51 +2,107 @@ import {Poll, SnooStormOptions} from "snoostorm"
|
||||
import Snoowrap from "snoowrap";
|
||||
import {EventEmitter} from "events";
|
||||
import {PollConfiguration} from "snoostorm/out/util/Poll";
|
||||
import {DEFAULT_POLLING_INTERVAL} from "../Common/interfaces";
|
||||
import {ClearProcessedOptions, DEFAULT_POLLING_INTERVAL} from "../Common/interfaces";
|
||||
import dayjs, {Dayjs} from "dayjs";
|
||||
import { Duration } from "dayjs/plugin/duration";
|
||||
import {parseDuration, sleep} from "../util";
|
||||
import setRandomInterval from 'set-random-interval';
|
||||
|
||||
type Awaitable<T> = Promise<T> | T;
|
||||
|
||||
interface RCBPollingOptions extends SnooStormOptions {
|
||||
subreddit: string,
|
||||
clearProcessed?: ClearProcessedOptions
|
||||
}
|
||||
|
||||
interface RCBPollConfiguration<T> extends PollConfiguration<T> {
|
||||
clearProcessed?: ClearProcessedOptions
|
||||
}
|
||||
|
||||
export class SPoll<T extends object> extends Poll<T> {
|
||||
identifier: keyof T;
|
||||
getter: () => Awaitable<T[]>;
|
||||
frequency;
|
||||
running: boolean = false;
|
||||
clearProcessedDuration?: Duration;
|
||||
clearProcessedSize?: number;
|
||||
clearProcessedAfter?: Dayjs;
|
||||
retainProcessed: number = 0;
|
||||
randInterval?: { clear: () => void };
|
||||
|
||||
constructor(options: PollConfiguration<T>) {
|
||||
constructor(options: RCBPollConfiguration<T>) {
|
||||
super(options);
|
||||
this.identifier = options.identifier;
|
||||
this.getter = options.get;
|
||||
this.frequency = options.frequency;
|
||||
const {
|
||||
after,
|
||||
size,
|
||||
retain = 0,
|
||||
} = options.clearProcessed || {};
|
||||
if(after !== undefined) {
|
||||
this.clearProcessedDuration = parseDuration(after);
|
||||
}
|
||||
this.clearProcessedSize = size;
|
||||
this.retainProcessed = retain;
|
||||
if (this.clearProcessedDuration !== undefined) {
|
||||
this.clearProcessedAfter = dayjs().add(this.clearProcessedDuration.asSeconds(), 's');
|
||||
}
|
||||
clearInterval(this.interval);
|
||||
}
|
||||
|
||||
startInterval = () => {
|
||||
this.running = true;
|
||||
this.interval = setInterval(async () => {
|
||||
try {
|
||||
const batch = await this.getter();
|
||||
const newItems: T[] = [];
|
||||
for (const item of batch) {
|
||||
const id = item[this.identifier];
|
||||
if (this.processed.has(id)) continue;
|
||||
this.randInterval = setRandomInterval((function (self) {
|
||||
return async () => {
|
||||
try {
|
||||
// clear the tracked, processed activity ids after a set period or number of activities have been processed
|
||||
// because when RCB is long-running and has streams from high-volume subreddits this list never gets smaller...
|
||||
|
||||
// Emit for new items and add it to the list
|
||||
newItems.push(item);
|
||||
this.processed.add(id);
|
||||
this.emit("item", item);
|
||||
// so clear if after time period
|
||||
if ((self.clearProcessedAfter !== undefined && dayjs().isSameOrAfter(self.clearProcessedAfter))
|
||||
// or clear if processed list is larger than defined max allowable size (default setting, 2 * polling option limit)
|
||||
|| (self.clearProcessedSize !== undefined && self.processed.size >= self.clearProcessedSize)) {
|
||||
if (self.retainProcessed === 0) {
|
||||
self.processed = new Set();
|
||||
} else {
|
||||
// retain some processed so we have continuity between processed list resets -- this is default behavior and retains polling option limit # of activities
|
||||
// we can slice from the set here because ID order is guaranteed for Set object so list is oldest -> newest
|
||||
// -- retain last LIMIT number of activities (or all if retain # is larger than list due to user config error)
|
||||
self.processed = new Set(Array.from(self.processed).slice(Math.max(0, self.processed.size - self.retainProcessed)));
|
||||
}
|
||||
// reset time interval if there is one
|
||||
if (self.clearProcessedAfter !== undefined && self.clearProcessedDuration !== undefined) {
|
||||
self.clearProcessedAfter = dayjs().add(self.clearProcessedDuration.asSeconds(), 's');
|
||||
}
|
||||
}
|
||||
const batch = await self.getter();
|
||||
const newItems: T[] = [];
|
||||
for (const item of batch) {
|
||||
const id = item[self.identifier];
|
||||
if (self.processed.has(id)) continue;
|
||||
|
||||
// Emit for new items and add it to the list
|
||||
newItems.push(item);
|
||||
self.processed.add(id);
|
||||
self.emit("item", item);
|
||||
}
|
||||
|
||||
// Emit the new listing of all new items
|
||||
self.emit("listing", newItems);
|
||||
} catch (err) {
|
||||
self.emit('error', err);
|
||||
self.end();
|
||||
}
|
||||
|
||||
// Emit the new listing of all new items
|
||||
this.emit("listing", newItems);
|
||||
} catch (err) {
|
||||
this.emit('error', err);
|
||||
this.end();
|
||||
}
|
||||
}, this.frequency);
|
||||
})(this), this.frequency - 1, this.frequency + 1);
|
||||
}
|
||||
|
||||
end = () => {
|
||||
this.running = false;
|
||||
if(this.randInterval !== undefined) {
|
||||
this.randInterval.clear();
|
||||
}
|
||||
super.end();
|
||||
}
|
||||
}
|
||||
@@ -54,11 +110,12 @@ export class SPoll<T extends object> extends Poll<T> {
|
||||
export class UnmoderatedStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
|
||||
constructor(
|
||||
client: Snoowrap,
|
||||
options: SnooStormOptions & { subreddit: string }) {
|
||||
options: RCBPollingOptions) {
|
||||
super({
|
||||
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
|
||||
get: async () => client.getSubreddit(options.subreddit).getUnmoderated(options),
|
||||
identifier: "id",
|
||||
clearProcessed: options.clearProcessed
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -66,11 +123,12 @@ export class UnmoderatedStream extends SPoll<Snoowrap.Submission | Snoowrap.Comm
|
||||
export class ModQueueStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
|
||||
constructor(
|
||||
client: Snoowrap,
|
||||
options: SnooStormOptions & { subreddit: string }) {
|
||||
options: RCBPollingOptions) {
|
||||
super({
|
||||
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
|
||||
get: async () => client.getSubreddit(options.subreddit).getModqueue(options),
|
||||
identifier: "id",
|
||||
clearProcessed: options.clearProcessed
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -78,11 +136,12 @@ export class ModQueueStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment
|
||||
export class SubmissionStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
|
||||
constructor(
|
||||
client: Snoowrap,
|
||||
options: SnooStormOptions & { subreddit: string }) {
|
||||
options: RCBPollingOptions) {
|
||||
super({
|
||||
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
|
||||
get: async () => client.getNew(options.subreddit, options),
|
||||
identifier: "id",
|
||||
clearProcessed: options.clearProcessed
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -90,11 +149,12 @@ export class SubmissionStream extends SPoll<Snoowrap.Submission | Snoowrap.Comme
|
||||
export class CommentStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
|
||||
constructor(
|
||||
client: Snoowrap,
|
||||
options: SnooStormOptions & { subreddit: string }) {
|
||||
options: RCBPollingOptions) {
|
||||
super({
|
||||
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
|
||||
get: async () => client.getNewComments(options.subreddit, options),
|
||||
identifier: "id",
|
||||
clearProcessed: options.clearProcessed
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import Snoowrap, {RedditUser, Subreddit} from "snoowrap";
|
||||
import Snoowrap, {RedditUser} from "snoowrap";
|
||||
import objectHash from 'object-hash';
|
||||
import {
|
||||
activityIsDeleted, activityIsFiltered,
|
||||
@@ -9,22 +9,35 @@ import {
|
||||
testAuthorCriteria
|
||||
} from "../Utils/SnoowrapUtils";
|
||||
import winston, {Logger} from "winston";
|
||||
import as from 'async';
|
||||
import fetch from 'node-fetch';
|
||||
import {
|
||||
asSubmission,
|
||||
buildCacheOptionsFromProvider, buildCachePrefix,
|
||||
cacheStats, comparisonTextOp, createCacheManager,
|
||||
cacheStats, compareDurationValue, comparisonTextOp, createCacheManager, createHistoricalStatsDisplay,
|
||||
formatNumber, getActivityAuthorName, getActivitySubredditName, isStrongSubredditState,
|
||||
mergeArr,
|
||||
mergeArr, parseDurationComparison,
|
||||
parseExternalUrl, parseGenericValueComparison,
|
||||
parseWikiContext, toStrongSubredditState
|
||||
parseWikiContext, shouldCacheSubredditStateCriteriaResult, subredditStateIsNameOnly, toStrongSubredditState
|
||||
} from "../util";
|
||||
import LoggedError from "../Utils/LoggedError";
|
||||
import {
|
||||
BotInstanceConfig,
|
||||
CacheOptions, CommentState,
|
||||
Footer, OperatorConfig, ResourceStats, StrongCache, SubmissionState,
|
||||
CacheConfig, TTLConfig, TypedActivityStates, UserResultCache, ActionedEvent, SubredditState, StrongSubredditState
|
||||
CacheOptions,
|
||||
CommentState,
|
||||
Footer,
|
||||
OperatorConfig,
|
||||
ResourceStats,
|
||||
StrongCache,
|
||||
SubmissionState,
|
||||
CacheConfig,
|
||||
TTLConfig,
|
||||
TypedActivityStates,
|
||||
UserResultCache,
|
||||
ActionedEvent,
|
||||
SubredditState,
|
||||
StrongSubredditState,
|
||||
HistoricalStats, HistoricalStatUpdateData, SubredditHistoricalStats, SubredditHistoricalStatsDisplay
|
||||
} from "../Common/interfaces";
|
||||
import UserNotes from "./UserNotes";
|
||||
import Mustache from "mustache";
|
||||
@@ -32,9 +45,12 @@ import he from "he";
|
||||
import {AuthorCriteria} from "../Author/Author";
|
||||
import {SPoll} from "./Streams";
|
||||
import {Cache} from 'cache-manager';
|
||||
import {Submission, Comment} from "snoowrap/dist/objects";
|
||||
import {cacheTTLDefaults} from "../Common/defaults";
|
||||
import {Submission, Comment, Subreddit} from "snoowrap/dist/objects";
|
||||
import {cacheTTLDefaults, createHistoricalDefaults, historicalDefaults} from "../Common/defaults";
|
||||
import {check} from "tcp-port-used";
|
||||
import {ExtendedSnoowrap} from "../Utils/SnoowrapClients";
|
||||
import dayjs from "dayjs";
|
||||
import ImageData from "../Common/ImageData";
|
||||
|
||||
export const DEFAULT_FOOTER = '\r\n*****\r\nThis action was performed by [a bot.]({{botLink}}) Mention a moderator or [send a modmail]({{modmailLink}}) if you any ideas, questions, or concerns about this action.';
|
||||
|
||||
@@ -42,7 +58,7 @@ export interface SubredditResourceConfig extends Footer {
|
||||
caching?: CacheConfig,
|
||||
subreddit: Subreddit,
|
||||
logger: Logger;
|
||||
client: Snoowrap
|
||||
client: ExtendedSnoowrap
|
||||
}
|
||||
|
||||
interface SubredditResourceOptions extends Footer {
|
||||
@@ -52,7 +68,7 @@ interface SubredditResourceOptions extends Footer {
|
||||
cacheSettingsHash: string
|
||||
subreddit: Subreddit,
|
||||
logger: Logger;
|
||||
client: Snoowrap;
|
||||
client: ExtendedSnoowrap;
|
||||
prefix?: string;
|
||||
actionedEventsMax: number;
|
||||
}
|
||||
@@ -74,15 +90,19 @@ export class SubredditResources {
|
||||
userNotes: UserNotes;
|
||||
footer: false | string = DEFAULT_FOOTER;
|
||||
subreddit: Subreddit
|
||||
client: Snoowrap
|
||||
client: ExtendedSnoowrap
|
||||
cache: Cache
|
||||
cacheType: string
|
||||
cacheSettingsHash?: string;
|
||||
pruneInterval?: any;
|
||||
historicalSaveInterval?: any;
|
||||
prefix?: string
|
||||
actionedEventsMax: number;
|
||||
|
||||
stats: { cache: ResourceStats };
|
||||
stats: {
|
||||
cache: ResourceStats
|
||||
historical: SubredditHistoricalStats
|
||||
};
|
||||
|
||||
constructor(name: string, options: SubredditResourceOptions) {
|
||||
const {
|
||||
@@ -127,7 +147,11 @@ export class SubredditResources {
|
||||
}
|
||||
|
||||
this.stats = {
|
||||
cache: cacheStats()
|
||||
cache: cacheStats(),
|
||||
historical: {
|
||||
allTime: createHistoricalDefaults(),
|
||||
lastReload: createHistoricalDefaults()
|
||||
}
|
||||
};
|
||||
|
||||
const cacheUseCB = (miss: boolean) => {
|
||||
@@ -151,10 +175,97 @@ export class SubredditResources {
|
||||
}
|
||||
}
|
||||
|
||||
async initHistoricalStats() {
|
||||
const at = await this.cache.wrap(`${this.name}-historical-allTime`, () => createHistoricalDefaults(), {ttl: 0}) as object;
|
||||
const rehydratedAt: any = {};
|
||||
for(const [k, v] of Object.entries(at)) {
|
||||
if(Array.isArray(v)) {
|
||||
rehydratedAt[k] = new Map(v);
|
||||
} else {
|
||||
rehydratedAt[k] = v;
|
||||
}
|
||||
}
|
||||
this.stats.historical.allTime = rehydratedAt as HistoricalStats;
|
||||
|
||||
// const lr = await this.cache.wrap(`${this.name}-historical-lastReload`, () => createHistoricalDefaults(), {ttl: 0}) as object;
|
||||
// const rehydratedLr: any = {};
|
||||
// for(const [k, v] of Object.entries(lr)) {
|
||||
// if(Array.isArray(v)) {
|
||||
// rehydratedLr[k] = new Map(v);
|
||||
// } else {
|
||||
// rehydratedLr[k] = v;
|
||||
// }
|
||||
// }
|
||||
// this.stats.historical.lastReload = rehydratedLr;
|
||||
}
|
||||
|
||||
updateHistoricalStats(data: HistoricalStatUpdateData) {
|
||||
for(const [k, v] of Object.entries(data)) {
|
||||
if(this.stats.historical.lastReload[k] !== undefined) {
|
||||
if(typeof v === 'number') {
|
||||
this.stats.historical.lastReload[k] += v;
|
||||
} else if(this.stats.historical.lastReload[k] instanceof Map) {
|
||||
const keys = Array.isArray(v) ? v : [v];
|
||||
for(const key of keys) {
|
||||
this.stats.historical.lastReload[k].set(key, (this.stats.historical.lastReload[k].get(key) || 0) + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
if(this.stats.historical.allTime[k] !== undefined) {
|
||||
if(typeof v === 'number') {
|
||||
this.stats.historical.allTime[k] += v;
|
||||
} else if(this.stats.historical.allTime[k] instanceof Map) {
|
||||
const keys = Array.isArray(v) ? v : [v];
|
||||
for(const key of keys) {
|
||||
this.stats.historical.allTime[k].set(key, (this.stats.historical.allTime[k].get(key) || 0) + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
getHistoricalDisplayStats(): SubredditHistoricalStatsDisplay {
|
||||
return {
|
||||
allTime: createHistoricalStatsDisplay(this.stats.historical.allTime),
|
||||
lastReload: createHistoricalStatsDisplay(this.stats.historical.lastReload)
|
||||
}
|
||||
}
|
||||
|
||||
async saveHistoricalStats() {
|
||||
const atSerializable: any = {};
|
||||
for(const [k, v] of Object.entries(this.stats.historical.allTime)) {
|
||||
if(v instanceof Map) {
|
||||
atSerializable[k] = Array.from(v.entries());
|
||||
} else {
|
||||
atSerializable[k] = v;
|
||||
}
|
||||
}
|
||||
await this.cache.set(`${this.name}-historical-allTime`, atSerializable, {ttl: 0});
|
||||
|
||||
// const lrSerializable: any = {};
|
||||
// for(const [k, v] of Object.entries(this.stats.historical.lastReload)) {
|
||||
// if(v instanceof Map) {
|
||||
// lrSerializable[k] = Array.from(v.entries());
|
||||
// } else {
|
||||
// lrSerializable[k] = v;
|
||||
// }
|
||||
// }
|
||||
// await this.cache.set(`${this.name}-historical-lastReload`, lrSerializable, {ttl: 0});
|
||||
}
|
||||
|
||||
setHistoricalSaveInterval() {
|
||||
this.historicalSaveInterval = setInterval((function(self) {
|
||||
return async () => {
|
||||
await self.saveHistoricalStats();
|
||||
}
|
||||
})(this),10000);
|
||||
}
|
||||
|
||||
async getCacheKeyCount() {
|
||||
if (this.cache.store.keys !== undefined) {
|
||||
if(this.cacheType === 'redis') {
|
||||
return (await this.cache.store.keys(`${this.prefix}*`)).length;
|
||||
const keys = await this.cache.store.keys(`${this.prefix}*`);
|
||||
return keys.length;
|
||||
}
|
||||
return (await this.cache.store.keys()).length;
|
||||
}
|
||||
@@ -276,19 +387,20 @@ export class SubredditResources {
|
||||
async getSubreddit(item: Submission | Comment) {
|
||||
try {
|
||||
let hash = '';
|
||||
const subName = getActivitySubredditName(item);
|
||||
if (this.subredditTTL !== false) {
|
||||
hash = `sub-${getActivitySubredditName(item)}`;
|
||||
hash = `sub-${subName}`;
|
||||
await this.stats.cache.subreddit.identifierRequestCount.set(hash, (await this.stats.cache.subreddit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
this.stats.cache.subreddit.requestTimestamps.push(Date.now());
|
||||
this.stats.cache.subreddit.requests++;
|
||||
const cachedSubreddit = await this.cache.get(hash);
|
||||
if (cachedSubreddit !== undefined && cachedSubreddit !== null) {
|
||||
this.logger.debug(`Cache Hit: Subreddit ${item.subreddit.display_name}`);
|
||||
this.logger.debug(`Cache Hit: Subreddit ${subName}`);
|
||||
// @ts-ignore
|
||||
return cachedSubreddit as Subreddit;
|
||||
}
|
||||
// @ts-ignore
|
||||
const subreddit = await this.client.getSubreddit(getActivitySubredditName(item)).fetch() as Subreddit;
|
||||
const subreddit = await this.client.getSubreddit(subName).fetch() as Subreddit;
|
||||
this.stats.cache.subreddit.miss++;
|
||||
// @ts-ignore
|
||||
await this.cache.set(hash, subreddit, {ttl: this.subredditTTL});
|
||||
@@ -296,7 +408,7 @@ export class SubredditResources {
|
||||
return subreddit as Subreddit;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
let subreddit = await this.client.getSubreddit(getActivitySubredditName(item));
|
||||
let subreddit = await this.client.getSubreddit(subName);
|
||||
|
||||
return subreddit as Subreddit;
|
||||
}
|
||||
@@ -306,6 +418,21 @@ export class SubredditResources {
|
||||
}
|
||||
}
|
||||
|
||||
async hasSubreddit(name: string) {
|
||||
if (this.subredditTTL !== false) {
|
||||
const hash = `sub-${name}`;
|
||||
this.stats.cache.subreddit.requests++
|
||||
this.stats.cache.subreddit.requestTimestamps.push(Date.now());
|
||||
await this.stats.cache.subreddit.identifierRequestCount.set(hash, (await this.stats.cache.subreddit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
const val = await this.cache.get(hash);
|
||||
if(val === undefined || val === null) {
|
||||
this.stats.cache.subreddit.miss++;
|
||||
}
|
||||
return val !== undefined && val !== null;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
|
||||
const userName = getActivityAuthorName(user);
|
||||
if (this.authorTTL !== false) {
|
||||
@@ -427,6 +554,67 @@ export class SubredditResources {
|
||||
return wikiContent;
|
||||
}
|
||||
|
||||
async cacheSubreddits(subs: (Subreddit | string)[]) {
|
||||
const allSubs = subs.map(x => typeof x !== 'string' ? x.display_name : x);
|
||||
const subNames = [...new Set(allSubs)];
|
||||
const uncachedSubs = [];
|
||||
|
||||
for(const s of subNames) {
|
||||
if(!(await this.hasSubreddit(s))) {
|
||||
uncachedSubs.push(s);
|
||||
}
|
||||
}
|
||||
if(uncachedSubs.length > 0) {
|
||||
// cache all uncached subs batchly-like
|
||||
const subResults = await this.client.getManySubreddits(uncachedSubs);
|
||||
for(const s of subResults) {
|
||||
// @ts-ignore
|
||||
await this.cache.set(`sub-${s.display_name}`, s, {ttl: this.subredditTTL});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async batchTestSubredditCriteria(items: (Comment | Submission)[], states: (SubredditState | StrongSubredditState)[]): Promise<(Comment | Submission)[]> {
|
||||
let passedItems: (Comment | Submission)[] = [];
|
||||
let unpassedItems: (Comment | Submission)[] = [];
|
||||
|
||||
const {nameOnly = [], full = []} = states.reduce((acc: {nameOnly: (SubredditState | StrongSubredditState)[], full: (SubredditState | StrongSubredditState)[]}, curr) => {
|
||||
if(subredditStateIsNameOnly(curr)) {
|
||||
return {...acc, nameOnly: acc.nameOnly.concat(curr)};
|
||||
}
|
||||
return {...acc, full: acc.full.concat(curr)};
|
||||
}, {nameOnly: [], full: []});
|
||||
|
||||
if(nameOnly.length === 0) {
|
||||
unpassedItems = items;
|
||||
} else {
|
||||
for(const item of items) {
|
||||
const subName = getActivitySubredditName(item);
|
||||
for(const state of nameOnly) {
|
||||
if(await this.isSubreddit({display_name: subName} as Subreddit, state, this.logger)) {
|
||||
passedItems.push(item);
|
||||
break;
|
||||
}
|
||||
}
|
||||
unpassedItems.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
if(unpassedItems.length > 0 && full.length > 0) {
|
||||
await this.cacheSubreddits(unpassedItems.map(x => x.subreddit));
|
||||
for(const item of unpassedItems) {
|
||||
for(const state of full) {
|
||||
if(await this.isSubreddit(await this.getSubreddit(item), state, this.logger)) {
|
||||
passedItems.push(item);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return passedItems;
|
||||
}
|
||||
|
||||
async testSubredditCriteria(item: (Comment | Submission), state: SubredditState | StrongSubredditState) {
|
||||
if(Object.keys(state).length === 0) {
|
||||
return true;
|
||||
@@ -441,7 +629,8 @@ export class SubredditResources {
|
||||
return await this.isSubreddit({display_name: subName} as Subreddit, state, this.logger);
|
||||
}
|
||||
|
||||
if (this.filterCriteriaTTL !== false) {
|
||||
// see comments on shouldCacheSubredditStateCriteriaResult() for why this is needed
|
||||
if (this.filterCriteriaTTL !== false && shouldCacheSubredditStateCriteriaResult(state)) {
|
||||
try {
|
||||
const hash = `subredditCrit-${getActivitySubredditName(item)}-${objectHash.sha1(state)}`;
|
||||
await this.stats.cache.subredditCrit.identifierRequestCount.set(hash, (await this.stats.cache.subredditCrit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
@@ -497,6 +686,10 @@ export class SubredditResources {
|
||||
}
|
||||
|
||||
async testItemCriteria(i: (Comment | Submission), activityStates: TypedActivityStates) {
|
||||
// return early if nothing is being checked for so we don't store an empty cache result for this (duh)
|
||||
if(activityStates.length === 0) {
|
||||
return true;
|
||||
}
|
||||
if (this.filterCriteriaTTL !== false) {
|
||||
let item = i;
|
||||
let states = activityStates;
|
||||
@@ -555,9 +748,20 @@ export class SubredditResources {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'over18':
|
||||
case 'over_18':
|
||||
// handling an edge case where user may have confused Comment/Submission state "over_18" with SubredditState "over18"
|
||||
|
||||
// @ts-ignore
|
||||
if (crit[k] !== subreddit.over18) {
|
||||
// @ts-ignore
|
||||
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${subreddit.over18}`)
|
||||
return false
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// @ts-ignore
|
||||
if (crit[k] !== undefined) {
|
||||
if (subreddit[k] !== undefined) {
|
||||
// @ts-ignore
|
||||
if (crit[k] !== subreddit[k]) {
|
||||
// @ts-ignore
|
||||
@@ -645,6 +849,13 @@ export class SubredditResources {
|
||||
return false
|
||||
}
|
||||
break;
|
||||
case 'age':
|
||||
const ageTest = compareDurationValue(parseDurationComparison(crit[k] as string), dayjs.unix(await item.created));
|
||||
if (!ageTest) {
|
||||
log.debug(`Failed: Activity did not pass age test "${crit[k] as string}"`);
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'title':
|
||||
if((item instanceof Comment)) {
|
||||
log.warn('`title` is not allowed in `itemIs` criteria when the main Activity is a Comment');
|
||||
@@ -693,6 +904,8 @@ export class SubredditResources {
|
||||
const userName = getActivityAuthorName(item.author);
|
||||
const hash = `commentUserResult-${userName}-${item.link_id}-${objectHash.sha1(checkConfig)}`;
|
||||
this.stats.cache.commentCheck.requests++;
|
||||
this.stats.cache.commentCheck.requestTimestamps.push(Date.now());
|
||||
await this.stats.cache.commentCheck.identifierRequestCount.set(hash, (await this.stats.cache.commentCheck.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
let result = await this.cache.get(hash) as UserResultCache | undefined | null;
|
||||
if(result === null) {
|
||||
result = undefined;
|
||||
@@ -723,6 +936,33 @@ export class SubredditResources {
|
||||
const footerRawContent = await this.getContent(footer, item.subreddit);
|
||||
return he.decode(Mustache.render(footerRawContent, {subName, permaLink, modmailLink, botLink: BOT_LINK}));
|
||||
}
|
||||
|
||||
async getImageHash(img: ImageData): Promise<string|undefined> {
|
||||
const hash = `imgHash-${img.baseUrl}`;
|
||||
const result = await this.cache.get(hash) as string | undefined | null;
|
||||
this.stats.cache.imageHash.requests++
|
||||
this.stats.cache.imageHash.requestTimestamps.push(Date.now());
|
||||
await this.stats.cache.imageHash.identifierRequestCount.set(hash, (await this.stats.cache.imageHash.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
|
||||
if(result !== undefined && result !== null) {
|
||||
return result;
|
||||
}
|
||||
this.stats.cache.commentCheck.miss++;
|
||||
return undefined;
|
||||
// const hash = await this.cache.wrap(img.baseUrl, async () => await img.hash(true), { ttl }) as string;
|
||||
// if(img.hashResult === undefined) {
|
||||
// img.hashResult = hash;
|
||||
// }
|
||||
// return hash;
|
||||
}
|
||||
|
||||
async setImageHash(img: ImageData, hash: string, ttl: number): Promise<void> {
|
||||
await this.cache.set(`imgHash-${img.baseUrl}`, hash, {ttl});
|
||||
// const hash = await this.cache.wrap(img.baseUrl, async () => await img.hash(true), { ttl }) as string;
|
||||
// if(img.hashResult === undefined) {
|
||||
// img.hashResult = hash;
|
||||
// }
|
||||
// return hash;
|
||||
}
|
||||
}
|
||||
|
||||
export class BotResourcesManager {
|
||||
@@ -791,7 +1031,7 @@ export class BotResourcesManager {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
set(subName: string, initOptions: SubredditResourceConfig): SubredditResources {
|
||||
async set(subName: string, initOptions: SubredditResourceConfig): Promise<SubredditResources> {
|
||||
let hash = 'default';
|
||||
const { caching, ...init } = initOptions;
|
||||
|
||||
@@ -841,6 +1081,8 @@ export class BotResourcesManager {
|
||||
res.cache.reset();
|
||||
}
|
||||
resource = new SubredditResources(subName, opts);
|
||||
await resource.initHistoricalStats();
|
||||
resource.setHistoricalSaveInterval();
|
||||
this.resources.set(subName, resource);
|
||||
} else {
|
||||
// just set non-cache related settings
|
||||
@@ -851,6 +1093,7 @@ export class BotResourcesManager {
|
||||
// reset cache stats when configuration is reloaded
|
||||
resource.stats.cache = cacheStats();
|
||||
}
|
||||
resource.stats.historical.lastReload = createHistoricalDefaults();
|
||||
|
||||
return resource;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import Snoowrap from "snoowrap";
|
||||
import Snoowrap, {Listing} from "snoowrap";
|
||||
import {Subreddit} from "snoowrap/dist/objects";
|
||||
import {parseSubredditName} from "../util";
|
||||
|
||||
// const proxyFactory = (endpoint: string) => {
|
||||
// return class ProxiedSnoowrap extends Snoowrap {
|
||||
@@ -12,7 +14,28 @@ import Snoowrap from "snoowrap";
|
||||
// }
|
||||
// }
|
||||
|
||||
export class RequestTrackingSnoowrap extends Snoowrap {
|
||||
export class ExtendedSnoowrap extends Snoowrap {
|
||||
/**
|
||||
* https://www.reddit.com/r/redditdev/comments/jfltfx/comment/g9le48w/?utm_source=reddit&utm_medium=web2x&context=3
|
||||
* */
|
||||
async getManySubreddits(subs: (Subreddit | string)[]): Promise<Listing<Subreddit>> {
|
||||
// parse all names
|
||||
const names = subs.map(x => {
|
||||
if(typeof x !== 'string') {
|
||||
return x.display_name;
|
||||
}
|
||||
try {
|
||||
return parseSubredditName(x);
|
||||
} catch (err) {
|
||||
return x;
|
||||
}
|
||||
});
|
||||
|
||||
return await this.oauthRequest({uri: '/api/info', method: 'get', qs: { sr_name: names.join(',')}}) as Listing<Subreddit>;
|
||||
}
|
||||
}
|
||||
|
||||
export class RequestTrackingSnoowrap extends ExtendedSnoowrap {
|
||||
requestCount: number = 0;
|
||||
|
||||
oauthRequest(...args: any) {
|
||||
|
||||
@@ -14,8 +14,8 @@ import {
|
||||
} from "../Common/interfaces";
|
||||
import {
|
||||
compareDurationValue,
|
||||
comparisonTextOp,
|
||||
isActivityWindowCriteria,
|
||||
comparisonTextOp, getActivityAuthorName,
|
||||
isActivityWindowCriteria, isStatusError,
|
||||
normalizeName,
|
||||
parseDuration,
|
||||
parseDurationComparison,
|
||||
@@ -125,17 +125,25 @@ export async function getAuthorActivities(user: RedditUser, options: AuthorTyped
|
||||
|
||||
let items: Array<Submission | Comment> = [];
|
||||
//let count = 1;
|
||||
let listing;
|
||||
switch (options.type) {
|
||||
case 'comment':
|
||||
listing = await user.getComments({limit: chunkSize});
|
||||
break;
|
||||
case 'submission':
|
||||
listing = await user.getSubmissions({limit: chunkSize});
|
||||
break;
|
||||
default:
|
||||
listing = await user.getOverview({limit: chunkSize});
|
||||
break;
|
||||
let listing = [];
|
||||
try {
|
||||
switch (options.type) {
|
||||
case 'comment':
|
||||
listing = await user.getComments({limit: chunkSize});
|
||||
break;
|
||||
case 'submission':
|
||||
listing = await user.getSubmissions({limit: chunkSize});
|
||||
break;
|
||||
default:
|
||||
listing = await user.getOverview({limit: chunkSize});
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
if(isStatusError(err) && err.statusCode === 404) {
|
||||
throw new SimpleError('Reddit returned a 404 for user history. Likely this user is shadowbanned.');
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
let hitEnd = false;
|
||||
let offset = chunkSize;
|
||||
@@ -317,179 +325,211 @@ export const renderContent = async (template: string, data: (Submission | Commen
|
||||
}
|
||||
|
||||
export const testAuthorCriteria = async (item: (Comment | Submission), authorOpts: AuthorCriteria, include = true, userNotes: UserNotes) => {
|
||||
// @ts-ignore
|
||||
const author: RedditUser = await item.author;
|
||||
for (const k of Object.keys(authorOpts)) {
|
||||
// @ts-ignore
|
||||
if (authorOpts[k] !== undefined) {
|
||||
switch (k) {
|
||||
case 'name':
|
||||
const authPass = () => {
|
||||
// @ts-ignore
|
||||
for (const n of authorOpts[k]) {
|
||||
if (n.toLowerCase() === author.name.toLowerCase()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const authResult = authPass();
|
||||
if ((include && !authResult) || (!include && authResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'flairCssClass':
|
||||
const css = await item.author_flair_css_class;
|
||||
const cssPass = () => {
|
||||
// @ts-ignore
|
||||
for (const c of authorOpts[k]) {
|
||||
if (c === css) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const cssResult = cssPass();
|
||||
if ((include && !cssResult) || (!include && cssResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'flairText':
|
||||
const text = await item.author_flair_text;
|
||||
const textPass = () => {
|
||||
// @ts-ignore
|
||||
for (const c of authorOpts[k]) {
|
||||
if (c === text) {
|
||||
return
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const textResult = textPass();
|
||||
if ((include && !textResult) || (!include && textResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'isMod':
|
||||
const mods: RedditUser[] = await item.subreddit.getModerators();
|
||||
const isModerator = mods.some(x => x.name === item.author.name);
|
||||
const modMatch = authorOpts.isMod === isModerator;
|
||||
if ((include && !modMatch) || (!include && modMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'age':
|
||||
const ageTest = compareDurationValue(parseDurationComparison(await authorOpts.age as string), dayjs.unix(await item.author.created));
|
||||
if ((include && !ageTest) || (!include && ageTest)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'linkKarma':
|
||||
const lkCompare = parseGenericValueOrPercentComparison(await authorOpts.linkKarma as string);
|
||||
let lkMatch;
|
||||
if (lkCompare.isPercent) {
|
||||
// @ts-ignore
|
||||
const tk = author.total_karma as number;
|
||||
lkMatch = comparisonTextOp(author.link_karma / tk, lkCompare.operator, lkCompare.value / 100);
|
||||
} else {
|
||||
lkMatch = comparisonTextOp(author.link_karma, lkCompare.operator, lkCompare.value);
|
||||
}
|
||||
if ((include && !lkMatch) || (!include && lkMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'commentKarma':
|
||||
const ckCompare = parseGenericValueOrPercentComparison(await authorOpts.commentKarma as string);
|
||||
let ckMatch;
|
||||
if (ckCompare.isPercent) {
|
||||
// @ts-ignore
|
||||
const ck = author.total_karma as number;
|
||||
ckMatch = comparisonTextOp(author.comment_karma / ck, ckCompare.operator, ckCompare.value / 100);
|
||||
} else {
|
||||
ckMatch = comparisonTextOp(author.comment_karma, ckCompare.operator, ckCompare.value);
|
||||
}
|
||||
if ((include && !ckMatch) || (!include && ckMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'totalKarma':
|
||||
const tkCompare = parseGenericValueComparison(await authorOpts.totalKarma as string);
|
||||
if (tkCompare.isPercent) {
|
||||
throw new SimpleError(`'totalKarma' value on AuthorCriteria cannot be a percentage`);
|
||||
}
|
||||
// @ts-ignore
|
||||
const totalKarma = author.total_karma as number;
|
||||
const tkMatch = comparisonTextOp(totalKarma, tkCompare.operator, tkCompare.value);
|
||||
if ((include && !tkMatch) || (!include && tkMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'verified':
|
||||
const vMatch = await author.has_verified_mail === authorOpts.verified as boolean;
|
||||
if ((include && !vMatch) || (!include && vMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'userNotes':
|
||||
const notes = await userNotes.getUserNotes(item.author);
|
||||
const notePass = () => {
|
||||
for (const noteCriteria of authorOpts[k] as UserNoteCriteria[]) {
|
||||
const {count = '>= 1', search = 'current', type} = noteCriteria;
|
||||
const {
|
||||
value,
|
||||
operator,
|
||||
isPercent,
|
||||
extra = ''
|
||||
} = parseGenericValueOrPercentComparison(count);
|
||||
const order = extra.includes('asc') ? 'ascending' : 'descending';
|
||||
switch (search) {
|
||||
case 'current':
|
||||
if (notes.length > 0 && notes[notes.length - 1].noteType === type) {
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case 'consecutive':
|
||||
let orderedNotes = notes;
|
||||
if (order === 'descending') {
|
||||
orderedNotes = [...notes];
|
||||
orderedNotes.reverse();
|
||||
}
|
||||
let currCount = 0;
|
||||
for (const note of orderedNotes) {
|
||||
if (note.noteType === type) {
|
||||
currCount++;
|
||||
} else {
|
||||
currCount = 0;
|
||||
}
|
||||
if (isPercent) {
|
||||
throw new SimpleError(`When comparing UserNotes with 'consecutive' search 'count' cannot be a percentage. Given: ${count}`);
|
||||
}
|
||||
if (comparisonTextOp(currCount, operator, value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 'total':
|
||||
if (isPercent) {
|
||||
if (comparisonTextOp(notes.filter(x => x.noteType === type).length / notes.length, operator, value / 100)) {
|
||||
return true;
|
||||
}
|
||||
} else if (comparisonTextOp(notes.filter(x => x.noteType === type).length, operator, value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const noteResult = notePass();
|
||||
if ((include && !noteResult) || (!include && noteResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
const {shadowBanned, ...rest} = authorOpts;
|
||||
|
||||
if(shadowBanned !== undefined) {
|
||||
try {
|
||||
// @ts-ignore
|
||||
await item.author.fetch();
|
||||
// user is not shadowbanned
|
||||
// if criteria specifies they SHOULD be shadowbanned then return false now
|
||||
if(shadowBanned) {
|
||||
return false;
|
||||
}
|
||||
} catch (err) {
|
||||
if(isStatusError(err) && err.statusCode === 404) {
|
||||
// user is shadowbanned
|
||||
// if criteria specifies they should not be shadowbanned then return false now
|
||||
if(!shadowBanned) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
||||
try {
|
||||
const authorName = getActivityAuthorName(item.author);
|
||||
|
||||
for (const k of Object.keys(rest)) {
|
||||
// @ts-ignore
|
||||
if (authorOpts[k] !== undefined) {
|
||||
switch (k) {
|
||||
case 'name':
|
||||
const authPass = () => {
|
||||
// @ts-ignore
|
||||
for (const n of authorOpts[k]) {
|
||||
if (n.toLowerCase() === authorName.toLowerCase()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const authResult = authPass();
|
||||
if ((include && !authResult) || (!include && authResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'flairCssClass':
|
||||
const css = await item.author_flair_css_class;
|
||||
const cssPass = () => {
|
||||
// @ts-ignore
|
||||
for (const c of authorOpts[k]) {
|
||||
if (c === css) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const cssResult = cssPass();
|
||||
if ((include && !cssResult) || (!include && cssResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'flairText':
|
||||
const text = await item.author_flair_text;
|
||||
const textPass = () => {
|
||||
// @ts-ignore
|
||||
for (const c of authorOpts[k]) {
|
||||
if (c === text) {
|
||||
return
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const textResult = textPass();
|
||||
if ((include && !textResult) || (!include && textResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'isMod':
|
||||
const mods: RedditUser[] = await item.subreddit.getModerators();
|
||||
const isModerator = mods.some(x => x.name === authorName);
|
||||
const modMatch = authorOpts.isMod === isModerator;
|
||||
if ((include && !modMatch) || (!include && modMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'age':
|
||||
const ageTest = compareDurationValue(parseDurationComparison(await authorOpts.age as string), dayjs.unix(await item.author.created));
|
||||
if ((include && !ageTest) || (!include && ageTest)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'linkKarma':
|
||||
const lkCompare = parseGenericValueOrPercentComparison(await authorOpts.linkKarma as string);
|
||||
let lkMatch;
|
||||
if (lkCompare.isPercent) {
|
||||
// @ts-ignore
|
||||
const tk = await item.author.total_karma as number;
|
||||
lkMatch = comparisonTextOp(item.author.link_karma / tk, lkCompare.operator, lkCompare.value / 100);
|
||||
} else {
|
||||
lkMatch = comparisonTextOp(item.author.link_karma, lkCompare.operator, lkCompare.value);
|
||||
}
|
||||
if ((include && !lkMatch) || (!include && lkMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'commentKarma':
|
||||
const ckCompare = parseGenericValueOrPercentComparison(await authorOpts.commentKarma as string);
|
||||
let ckMatch;
|
||||
if (ckCompare.isPercent) {
|
||||
// @ts-ignore
|
||||
const ck = await item.author.total_karma as number;
|
||||
ckMatch = comparisonTextOp(item.author.comment_karma / ck, ckCompare.operator, ckCompare.value / 100);
|
||||
} else {
|
||||
ckMatch = comparisonTextOp(item.author.comment_karma, ckCompare.operator, ckCompare.value);
|
||||
}
|
||||
if ((include && !ckMatch) || (!include && ckMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'totalKarma':
|
||||
const tkCompare = parseGenericValueComparison(await authorOpts.totalKarma as string);
|
||||
if (tkCompare.isPercent) {
|
||||
throw new SimpleError(`'totalKarma' value on AuthorCriteria cannot be a percentage`);
|
||||
}
|
||||
// @ts-ignore
|
||||
const totalKarma = await item.author.total_karma as number;
|
||||
const tkMatch = comparisonTextOp(totalKarma, tkCompare.operator, tkCompare.value);
|
||||
if ((include && !tkMatch) || (!include && tkMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'verified':
|
||||
const vMatch = await item.author.has_verified_mail === authorOpts.verified as boolean;
|
||||
if ((include && !vMatch) || (!include && vMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'userNotes':
|
||||
const notes = await userNotes.getUserNotes(item.author);
|
||||
const notePass = () => {
|
||||
for (const noteCriteria of authorOpts[k] as UserNoteCriteria[]) {
|
||||
const {count = '>= 1', search = 'current', type} = noteCriteria;
|
||||
const {
|
||||
value,
|
||||
operator,
|
||||
isPercent,
|
||||
extra = ''
|
||||
} = parseGenericValueOrPercentComparison(count);
|
||||
const order = extra.includes('asc') ? 'ascending' : 'descending';
|
||||
switch (search) {
|
||||
case 'current':
|
||||
if (notes.length > 0 && notes[notes.length - 1].noteType === type) {
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case 'consecutive':
|
||||
let orderedNotes = notes;
|
||||
if (order === 'descending') {
|
||||
orderedNotes = [...notes];
|
||||
orderedNotes.reverse();
|
||||
}
|
||||
let currCount = 0;
|
||||
for (const note of orderedNotes) {
|
||||
if (note.noteType === type) {
|
||||
currCount++;
|
||||
} else {
|
||||
currCount = 0;
|
||||
}
|
||||
if (isPercent) {
|
||||
throw new SimpleError(`When comparing UserNotes with 'consecutive' search 'count' cannot be a percentage. Given: ${count}`);
|
||||
}
|
||||
if (comparisonTextOp(currCount, operator, value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 'total':
|
||||
if (isPercent) {
|
||||
if (comparisonTextOp(notes.filter(x => x.noteType === type).length / notes.length, operator, value / 100)) {
|
||||
return true;
|
||||
}
|
||||
} else if (comparisonTextOp(notes.filter(x => x.noteType === type).length, operator, value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const noteResult = notePass();
|
||||
if ((include && !noteResult) || (!include && noteResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
} catch (err) {
|
||||
if(isStatusError(err) && err.statusCode === 404) {
|
||||
throw new SimpleError('Reddit returned a 404 while trying to retrieve User profile. It is likely this user is shadowbanned.');
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface ItemContent {
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
import {BotStats, BotStatusResponse, SubredditDataResponse} from "./interfaces";
|
||||
import {ManagerStats, RunningState} from "../../Subreddit/Manager";
|
||||
import {Invokee, RunState} from "../../Common/interfaces";
|
||||
import {cacheStats} from "../../util";
|
||||
import {RunningState} from "../../Subreddit/Manager";
|
||||
import {Invokee, ManagerStats, RunState} from "../../Common/interfaces";
|
||||
import {cacheStats, createHistoricalStatsDisplay} from "../../util";
|
||||
import {createHistoricalDefaults, historicalDefaults} from "../../Common/defaults";
|
||||
|
||||
const managerStats: ManagerStats = {
|
||||
actionsRun: 0,
|
||||
actionsRunSinceStart: 0,
|
||||
actionsRunSinceStartTotal: 0,
|
||||
actionsRunTotal: 0,
|
||||
cache: {
|
||||
currentKeyCount: 0,
|
||||
isShared: false,
|
||||
@@ -18,22 +15,12 @@ const managerStats: ManagerStats = {
|
||||
totalRequests: 0,
|
||||
types: cacheStats()
|
||||
},
|
||||
checksRunSinceStartTotal: 0,
|
||||
checksRunTotal: 0,
|
||||
checksTriggered: 0,
|
||||
checksTriggeredSinceStart: 0,
|
||||
checksTriggeredSinceStartTotal: 0,
|
||||
checksTriggeredTotal: 0,
|
||||
historical: {
|
||||
lastReload: createHistoricalStatsDisplay(createHistoricalDefaults()),
|
||||
allTime: createHistoricalStatsDisplay(createHistoricalDefaults()),
|
||||
},
|
||||
eventsAvg: 0,
|
||||
eventsCheckedSinceStartTotal: 0,
|
||||
eventsCheckedTotal: 0,
|
||||
rulesAvg: 0,
|
||||
rulesCachedSinceStartTotal: 0,
|
||||
rulesCachedTotal: 0,
|
||||
rulesRunSinceStartTotal: 0,
|
||||
rulesRunTotal: 0,
|
||||
rulesTriggeredSinceStartTotal: 0,
|
||||
rulesTriggeredTotal: 0,
|
||||
};
|
||||
const botStats: BotStats = {
|
||||
apiAvg: '-',
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import {ManagerStats, RunningState} from "../../Subreddit/Manager";
|
||||
import {RunningState} from "../../Subreddit/Manager";
|
||||
import {ManagerStats} from "../../Common/interfaces";
|
||||
|
||||
export interface BotStats {
|
||||
startedAtHuman: string,
|
||||
|
||||
@@ -160,13 +160,19 @@ const status = () => {
|
||||
submissions: acc.checks.submissions + curr.checks.submissions,
|
||||
comments: acc.checks.comments + curr.checks.comments,
|
||||
},
|
||||
eventsCheckedTotal: acc.eventsCheckedTotal + curr.stats.eventsCheckedTotal,
|
||||
checksRunTotal: acc.checksRunTotal + curr.stats.checksRunTotal,
|
||||
checksTriggeredTotal: acc.checksTriggeredTotal + curr.stats.checksTriggeredTotal,
|
||||
rulesRunTotal: acc.rulesRunTotal + curr.stats.rulesRunTotal,
|
||||
rulesCachedTotal: acc.rulesCachedTotal + curr.stats.rulesCachedTotal,
|
||||
rulesTriggeredTotal: acc.rulesTriggeredTotal + curr.stats.rulesTriggeredTotal,
|
||||
actionsRunTotal: acc.actionsRunTotal + curr.stats.actionsRunTotal,
|
||||
historical: {
|
||||
allTime: {
|
||||
eventsCheckedTotal: acc.historical.allTime.eventsCheckedTotal + curr.stats.historical.allTime.eventsCheckedTotal,
|
||||
eventsActionedTotal: acc.historical.allTime.eventsActionedTotal + curr.stats.historical.allTime.eventsActionedTotal,
|
||||
checksRunTotal: acc.historical.allTime.checksRunTotal + curr.stats.historical.allTime.checksRunTotal,
|
||||
checksFromCacheTotal: acc.historical.allTime.checksFromCacheTotal + curr.stats.historical.allTime.checksFromCacheTotal,
|
||||
checksTriggeredTotal: acc.historical.allTime.checksTriggeredTotal + curr.stats.historical.allTime.checksTriggeredTotal,
|
||||
rulesRunTotal: acc.historical.allTime.rulesRunTotal + curr.stats.historical.allTime.rulesRunTotal,
|
||||
rulesCachedTotal: acc.historical.allTime.rulesCachedTotal + curr.stats.historical.allTime.rulesCachedTotal,
|
||||
rulesTriggeredTotal: acc.historical.allTime.rulesTriggeredTotal + curr.stats.historical.allTime.rulesTriggeredTotal,
|
||||
actionsRunTotal: acc.historical.allTime.actionsRunTotal + curr.stats.historical.allTime.actionsRunTotal,
|
||||
}
|
||||
},
|
||||
maxWorkers: acc.maxWorkers + curr.maxWorkers,
|
||||
subMaxWorkers: acc.subMaxWorkers + curr.subMaxWorkers,
|
||||
globalMaxWorkers: acc.globalMaxWorkers + curr.globalMaxWorkers,
|
||||
@@ -178,13 +184,19 @@ const status = () => {
|
||||
submissions: 0,
|
||||
comments: 0,
|
||||
},
|
||||
eventsCheckedTotal: 0,
|
||||
checksRunTotal: 0,
|
||||
checksTriggeredTotal: 0,
|
||||
rulesRunTotal: 0,
|
||||
rulesCachedTotal: 0,
|
||||
rulesTriggeredTotal: 0,
|
||||
actionsRunTotal: 0,
|
||||
historical: {
|
||||
allTime: {
|
||||
eventsCheckedTotal: 0,
|
||||
eventsActionedTotal: 0,
|
||||
checksRunTotal: 0,
|
||||
checksFromCacheTotal: 0,
|
||||
checksTriggeredTotal: 0,
|
||||
rulesRunTotal: 0,
|
||||
rulesCachedTotal: 0,
|
||||
rulesTriggeredTotal: 0,
|
||||
actionsRunTotal: 0,
|
||||
}
|
||||
},
|
||||
maxWorkers: 0,
|
||||
subMaxWorkers: 0,
|
||||
globalMaxWorkers: 0,
|
||||
|
||||
@@ -333,64 +333,63 @@
|
||||
<% } %>
|
||||
<% if (data.name !== 'All') { %>
|
||||
<div data-subreddit="<%= data.name %>"
|
||||
class="stats botStats reloadStats">
|
||||
class="stats botStats reloadStats mb-2">
|
||||
<label>Events</label>
|
||||
<span>
|
||||
<%= data.stats.eventsCheckedSinceStartTotal === undefined ? '-' : data.stats.eventsCheckedSinceStartTotal %>
|
||||
<%= data.stats.historical.lastReload.eventsCheckedTotal === undefined ? '-' : data.stats.historical.lastReload.eventsCheckedTotal %>
|
||||
</span>
|
||||
<label>Checks</label>
|
||||
<span class="has-tooltip">
|
||||
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
|
||||
<span><%= data.stats.checksTriggeredSinceStartTotal %></span> Triggered / <span><%= data.stats.checksRunSinceStartTotal %></span> Run
|
||||
<span><%= data.stats.historical.lastReload.checksTriggeredTotal %></span> Triggered / <span><%= data.stats.historical.lastReload.checksRunTotal %></span> Run / <span><%= data.stats.historical.lastReload.checksFromCacheTotal %></span> Cached
|
||||
</span>
|
||||
<% if (data.name !== 'All') { %>
|
||||
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>&subreddit=<%= data.name %>" class="underline" style="text-decoration-style: dotted"><%= data.stats.checksTriggeredSinceStartTotal %> T</a>
|
||||
<% } else { %>
|
||||
<%= data.stats.checksTriggeredSinceStartTotal %> T
|
||||
<% } %>/ <span><%= data.stats.checksRunSinceStartTotal %></span> R
|
||||
<span class="cursor-help underline" style="text-decoration-style: dotted"><%= data.stats.historical.lastReload.checksTriggeredTotal %> Triggered</span>
|
||||
</span>
|
||||
|
||||
<label>Rules</label>
|
||||
<span class="has-tooltip">
|
||||
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
|
||||
<span><%= data.stats.rulesTriggeredSinceStartTotal %></span> Triggered / <span><%= data.stats.rulesCachedSinceStartTotal %></span> Cached / <span><%= data.stats.rulesRunSinceStartTotal %></span> Run
|
||||
<span><%= data.stats.historical.lastReload.rulesTriggeredTotal %></span> Triggered / <span><%= data.stats.historical.lastReload.rulesCachedTotal %></span> Cached / <span><%= data.stats.historical.lastReload.rulesRunTotal %></span> Run
|
||||
</span>
|
||||
<span class="cursor-help">
|
||||
<span><%= data.stats.rulesTriggeredSinceStartTotal %></span> T / <span><%= data.stats.rulesCachedSinceStartTotal %></span> C / <span><%= data.stats.rulesRunSinceStartTotal %></span> R</span>
|
||||
<span class="cursor-help cursor-help underline" style="text-decoration-style: dotted">
|
||||
<span><%= data.stats.historical.lastReload.rulesTriggeredTotal %></span> Triggered</span>
|
||||
</span>
|
||||
|
||||
<label>Actions</label>
|
||||
<span class="cursor-help"><%= data.stats.actionsRunSinceStartTotal === undefined ? '-' : data.stats.actionsRunSinceStartTotal %></span>
|
||||
<span><%= data.stats.historical.lastReload.actionsRunTotal === undefined ? '0' : data.stats.historical.lastReload.actionsRunTotal %> Run</span>
|
||||
</div>
|
||||
<% } %>
|
||||
<div data-subreddit="<%= data.name %>" class="stats botStats allStats">
|
||||
<div data-subreddit="<%= data.name %>" class="stats botStats allStats mb-2">
|
||||
<label>Events</label>
|
||||
<span>
|
||||
<%= data.stats.eventsCheckedTotal %>
|
||||
<%= data.stats.historical.allTime.eventsCheckedTotal %>
|
||||
</span>
|
||||
<label>Checks</label>
|
||||
<span class="has-tooltip">
|
||||
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
|
||||
<span><%= data.stats.checksTriggeredTotal %></span> Triggered / <span><%= data.stats.checksRunTotal %></span> Run
|
||||
<span><%= data.stats.historical.allTime.checksTriggeredTotal %></span> Triggered / <span><%= data.stats.historical.allTime.checksRunTotal %></span> Run / <span><%= data.stats.historical.allTime.checksFromCacheTotal %></span> Cached
|
||||
</span>
|
||||
<span><%= data.stats.checksTriggeredTotal %> T / <span><%= data.stats.checksRunTotal %></span> R</span>
|
||||
<span class="cursor-help underline" style="text-decoration-style: dotted"><%= data.stats.historical.allTime.checksTriggeredTotal %> Triggered</span>
|
||||
</span>
|
||||
|
||||
<label>Rules</label>
|
||||
<span class="has-tooltip">
|
||||
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
|
||||
<span><%= data.stats.rulesTriggeredTotal %></span> Triggered / <span><%= data.stats.rulesCachedTotal %></span> Cached / <span><%= data.stats.rulesRunTotal %></span> Run
|
||||
<span><%= data.stats.historical.allTime.rulesTriggeredTotal %></span> Triggered / <span><%= data.stats.historical.allTime.rulesCachedTotal %></span> Cached / <span><%= data.stats.historical.allTime.rulesRunTotal %></span> Run
|
||||
</span>
|
||||
<span class="cursor-help"><span><%= data.stats.rulesTriggeredTotal %></span> T / <span><%= data.stats.rulesCachedTotal %></span> C / <span><%= data.stats.rulesRunTotal %></span> R</span>
|
||||
<span class="cursor-help underline" style="text-decoration-style: dotted"><span><%= data.stats.historical.allTime.rulesTriggeredTotal %></span> Triggered</span>
|
||||
</span>
|
||||
|
||||
<label>Actions</label>
|
||||
<% if (data.name !== 'All') { %>
|
||||
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>&subreddit=<%= data.name %>" class="underline" style="text-decoration-style: dotted"><%= data.stats.actionsRunTotal %> Run</a>
|
||||
<% } else { %>
|
||||
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>" class="underline" style="text-decoration-style: dotted"><%= data.stats.actionsRunTotal %> Run</a>
|
||||
<% } %>
|
||||
<span>
|
||||
<span><%= data.stats.historical.allTime.actionsRunTotal === undefined ? '0' : data.stats.historical.allTime.actionsRunTotal %> Run</span>
|
||||
</span>
|
||||
</div>
|
||||
<% if (data.name !== 'All') { %>
|
||||
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>&subreddit=<%= data.name %>" style="text-decoration-style: dotted">Actioned Events</a>
|
||||
<% } else { %>
|
||||
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>">Actioned Events</a>
|
||||
<% } %>
|
||||
</div>
|
||||
<div>
|
||||
<div class="text-left pb-2">
|
||||
@@ -495,6 +494,16 @@
|
||||
<span>
|
||||
</span>
|
||||
|
||||
<label class="newRow">Subreddit Criteria</label>
|
||||
<span class="newRow"><%= data.stats.cache.types.subredditCrit.requests %> | <%= data.stats.cache.types.subredditCrit.miss %> (<%= data.stats.cache.types.subredditCrit.missPercent %>) miss</span>
|
||||
<span class="newRow">
|
||||
- <span class="font-mono">SubredditState</span> results
|
||||
</span>
|
||||
<label>Avgs</label>
|
||||
<span>Hits/Key <%= data.stats.cache.types.subredditCrit.identifierAverageHit %> | Hit Interval <%= data.stats.cache.types.subredditCrit.averageTimeBetweenHits %>s</span>
|
||||
<span>
|
||||
</span>
|
||||
|
||||
<label class="newRow">Comment Check</label>
|
||||
<span class="newRow"><%= data.stats.cache.types.commentCheck.requests %> | <%= data.stats.cache.types.commentCheck.miss %> (<%= data.stats.cache.types.commentCheck.missPercent %>) miss</span>
|
||||
<span class="newRow">
|
||||
@@ -521,6 +530,14 @@
|
||||
<span>
|
||||
</span>
|
||||
|
||||
<label class="newRow">Subreddits</label>
|
||||
<span class="newRow"><%= data.stats.cache.types.subreddit.requests %> | <%= data.stats.cache.types.subreddit.miss %> (<%= data.stats.cache.types.subreddit.missPercent %>) miss</span>
|
||||
<span class="newRow"></span>
|
||||
<label>Avgs</label>
|
||||
<span>Hits/Key <%= data.stats.cache.types.subreddit.identifierAverageHit %> | Hit Interval <%= data.stats.cache.types.subreddit.averageTimeBetweenHits %>s</span>
|
||||
<span>
|
||||
</span>
|
||||
|
||||
<label class="newRow">Content</label>
|
||||
<span class="newRow"><%= data.stats.cache.types.content.requests %> | <%= data.stats.cache.types.content.miss %> (<%= data.stats.cache.types.content.missPercent %>) miss</span>
|
||||
<span class="newRow">
|
||||
|
||||
285
src/util.ts
285
src/util.ts
@@ -9,12 +9,30 @@ import {InvalidOptionArgumentError} from "commander";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {Comment} from "snoowrap";
|
||||
import {inflateSync, deflateSync} from "zlib";
|
||||
import pixelmatch from 'pixelmatch';
|
||||
import os from 'os';
|
||||
import {
|
||||
ActivityWindowCriteria, CacheOptions, CacheProvider,
|
||||
ActivityWindowCriteria,
|
||||
CacheOptions,
|
||||
CacheProvider,
|
||||
DurationComparison,
|
||||
GenericComparison, LogInfo, NamedGroup,
|
||||
PollingOptionsStrong, RedditEntity, RedditEntityType, RegExResult, ResourceStats,
|
||||
StringOperator, StrongSubredditState, SubredditState
|
||||
GenericComparison,
|
||||
HistoricalStats,
|
||||
HistoricalStatsDisplay, ImageComparisonResult,
|
||||
//ImageData,
|
||||
ImageDetection,
|
||||
//ImageDownloadOptions,
|
||||
LogInfo,
|
||||
NamedGroup,
|
||||
PollingOptionsStrong,
|
||||
RedditEntity,
|
||||
RedditEntityType,
|
||||
RegExResult,
|
||||
ResourceStats,
|
||||
StatusCodeError,
|
||||
StringOperator,
|
||||
StrongSubredditState,
|
||||
SubredditState
|
||||
} from "./Common/interfaces";
|
||||
import JSON5 from "json5";
|
||||
import yaml, {JSON_SCHEMA} from "js-yaml";
|
||||
@@ -30,6 +48,21 @@ import {create as createMemoryStore} from './Utils/memoryStore';
|
||||
import {MESSAGE} from "triple-beam";
|
||||
import {RedditUser} from "snoowrap/dist/objects";
|
||||
import reRegExp from '@stdlib/regexp-regexp';
|
||||
import fetch, {Response} from "node-fetch";
|
||||
import { URL } from "url";
|
||||
import ImageData from "./Common/ImageData";
|
||||
import {Sharp, SharpOptions} from "sharp";
|
||||
// @ts-ignore
|
||||
import {blockhashData, hammingDistance} from 'blockhash';
|
||||
//import {ResembleSingleCallbackComparisonResult} from "resemblejs";
|
||||
|
||||
// want to guess how many concurrent image comparisons we should be doing
|
||||
// assuming, conservatively and based on real-world results, that comparing 30 images takes about ~30MB memory...
|
||||
// and we really want to use less than a fourth of available ram (should be low-footprint!)...
|
||||
// and base-line operation of RCB is usually around 40MB (liberal)
|
||||
const availMemory = (os.freemem() / (1024 * 1024)) / 4 + 40;
|
||||
export const imageCompareMaxConcurrencyGuess = Math.min(3, Math.max(Math.floor(availMemory/30), 1));
|
||||
|
||||
|
||||
const ReReg = reRegExp();
|
||||
|
||||
@@ -91,6 +124,7 @@ export const defaultFormat = (defaultLabel = 'App') => printf(({
|
||||
leaf,
|
||||
itemId,
|
||||
timestamp,
|
||||
durationMs,
|
||||
// @ts-ignore
|
||||
[SPLAT]: splatObj,
|
||||
stack,
|
||||
@@ -120,7 +154,7 @@ export const defaultFormat = (defaultLabel = 'App') => printf(({
|
||||
}
|
||||
const labelContent = `${nodes.map((x: string) => `[${x}]`).join(' ')}`;
|
||||
|
||||
return `${timestamp} ${level.padEnd(7)}: ${instance !== undefined ? `|${instance}| ` : ''}${bot !== undefined ? `~${bot}~ ` : ''}${subreddit !== undefined ? `{${subreddit}} ` : ''}${labelContent} ${msg}${stringifyValue !== '' ? ` ${stringifyValue}` : ''}${stackMsg}`;
|
||||
return `${timestamp} ${level.padEnd(7)}: ${instance !== undefined ? `|${instance}| ` : ''}${bot !== undefined ? `~${bot}~ ` : ''}${subreddit !== undefined ? `{${subreddit}} ` : ''}${labelContent} ${msg}${durationMs !== undefined ? ` Elapsed: ${durationMs}ms (${formatNumber(durationMs/1000)}s) ` : ''}${stringifyValue !== '' ? ` ${stringifyValue}` : ''}${stackMsg}`;
|
||||
});
|
||||
|
||||
|
||||
@@ -841,8 +875,8 @@ export const boolToString = (val: boolean): string => {
|
||||
return val ? 'Yes' : 'No';
|
||||
}
|
||||
|
||||
export const isRedditMedia = (act: Submission): boolean => {
|
||||
return act.is_reddit_media_domain || act.is_video || ['v.redd.it','i.redd.it'].includes(act.domain);
|
||||
export const isRedditMedia = (act: Comment | Submission): boolean => {
|
||||
return asSubmission(act) && (act.is_reddit_media_domain || act.is_video || ['v.redd.it','i.redd.it'].includes(act.domain));
|
||||
}
|
||||
|
||||
export const isExternalUrlSubmission = (act: Comment | Submission): boolean => {
|
||||
@@ -902,17 +936,25 @@ export interface StrongSubredditStateOptions {
|
||||
|
||||
export const toStrongSubredditState = (s: SubredditState, opts?: StrongSubredditStateOptions): StrongSubredditState => {
|
||||
const {defaultFlags, generateDescription = false} = opts || {};
|
||||
const {name: nameVal, stateDescription} = s;
|
||||
const {name: nameValRaw, stateDescription} = s;
|
||||
|
||||
let nameReg: RegExp | undefined;
|
||||
if (nameVal !== undefined) {
|
||||
if (!(nameVal instanceof RegExp)) {
|
||||
if (nameValRaw !== undefined) {
|
||||
if (!(nameValRaw instanceof RegExp)) {
|
||||
let nameVal = nameValRaw.trim();
|
||||
nameReg = parseStringToRegex(nameVal, defaultFlags);
|
||||
if (nameReg === undefined) {
|
||||
nameReg = new RegExp(parseSubredditName(nameVal), defaultFlags);
|
||||
try {
|
||||
const parsedVal = parseSubredditName(nameVal);
|
||||
nameVal = parsedVal;
|
||||
} catch (err) {
|
||||
// oh well
|
||||
const f = 1;
|
||||
}
|
||||
nameReg = parseStringToRegex(`/^${nameVal}$/`, defaultFlags);
|
||||
}
|
||||
} else {
|
||||
nameReg = nameVal;
|
||||
nameReg = nameValRaw;
|
||||
}
|
||||
}
|
||||
const strongState = {
|
||||
@@ -1009,7 +1051,8 @@ export const cacheStats = (): ResourceStats => {
|
||||
submission: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
|
||||
comment: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
|
||||
subreddit: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
|
||||
commentCheck: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0}
|
||||
commentCheck: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
|
||||
imageHash: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1075,6 +1118,10 @@ export const isScopeError = (err: any): boolean => {
|
||||
return false;
|
||||
}
|
||||
|
||||
export const isStatusError = (err: any): err is StatusCodeError => {
|
||||
return typeof err === 'object' && err.name === 'StatusCodeError' && err.response !== undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cached activities lose type information when deserialized so need to check properties as well to see if the object is the shape of a Submission
|
||||
* */
|
||||
@@ -1154,3 +1201,215 @@ export const parseRuleResultsToMarkdownSummary = (ruleResults: RuleResult[]): st
|
||||
});
|
||||
return results.join('\r\n');
|
||||
}
|
||||
|
||||
export const isValidImageURL = (str: string): boolean => {
|
||||
return !!str.match(/\w+\.(jpg|jpeg|gif|png|tiff|bmp|webp)$/gi);
|
||||
}
|
||||
|
||||
let resembleCIFunc: Function;
|
||||
type SharpCreate = (input?:
|
||||
| Buffer
|
||||
| Uint8Array
|
||||
| Uint8ClampedArray
|
||||
| Int8Array
|
||||
| Uint16Array
|
||||
| Int16Array
|
||||
| Uint32Array
|
||||
| Int32Array
|
||||
| Float32Array
|
||||
| Float64Array
|
||||
| string, options?: SharpOptions) => Sharp;
|
||||
let sharpImg: SharpCreate;
|
||||
|
||||
const getCIFunc = async () => {
|
||||
if (resembleCIFunc === undefined) {
|
||||
// @ts-ignore
|
||||
const resembleModule = await import('resemblejs/compareImages');
|
||||
if (resembleModule === undefined) {
|
||||
throw new Error('Could not import resemblejs');
|
||||
}
|
||||
resembleCIFunc = resembleModule.default;
|
||||
}
|
||||
return resembleCIFunc;
|
||||
}
|
||||
|
||||
export const getSharpAsync = async (): Promise<SharpCreate> => {
|
||||
if (sharpImg === undefined) {
|
||||
const sharpModule = await import('sharp');
|
||||
if (sharpModule === undefined) {
|
||||
throw new Error('Could not import sharp');
|
||||
}
|
||||
// @ts-ignore
|
||||
sharpImg = sharpModule.default;
|
||||
}
|
||||
return sharpImg;
|
||||
}
|
||||
|
||||
export const compareImages = async (data1: ImageData, data2: ImageData, threshold: number, variantDimensionDiff = 0): Promise<[ImageComparisonResult, boolean, string[]]> => {
|
||||
let results: ImageComparisonResult | undefined;
|
||||
const errors: string[] = [];
|
||||
|
||||
results = await pixelImageCompare(data1, data2);
|
||||
|
||||
// may decide to bring resemble back at some point in the future if pixelmatch has issues
|
||||
// but for now...
|
||||
// sharp is a *much* more useful utility and i'd rather have it as a dependency than node-canvas
|
||||
// it's much faster, uses less memory, and its libraries more likely to already be available on a host
|
||||
// -- with it i can control how images are normalized for dimensions which is basically what resemble was doing anyway (using canvas)
|
||||
|
||||
// try {
|
||||
// results = await pixelImageCompare(data1, data2);
|
||||
// } catch (err) {
|
||||
// if(!(err instanceof SimpleError)) {
|
||||
// errors.push(err.message);
|
||||
// }
|
||||
// // swallow this and continue with resemble
|
||||
// }
|
||||
// if (results === undefined) {
|
||||
// results = await resembleImageCompare(data1, data2, threshold, variantDimensionDiff);
|
||||
// }
|
||||
|
||||
|
||||
return [results, results.misMatchPercentage < threshold, errors];
|
||||
}
|
||||
|
||||
export const pixelImageCompare = async (data1: ImageData, data2: ImageData): Promise<ImageComparisonResult> => {
|
||||
|
||||
let pixelDiff: number | undefined = undefined;
|
||||
|
||||
let sharpFunc: SharpCreate;
|
||||
|
||||
try {
|
||||
sharpFunc = await getSharpAsync();
|
||||
} catch (err) {
|
||||
err.message = `Unable to do image comparison due to an issue importing the comparison library. It is likely sharp is not installed (see ContextMod docs). Error Message: ${err.message}`;
|
||||
throw err;
|
||||
}
|
||||
|
||||
const [refImg, compareImg, width, height] = await data1.normalizeImagesForComparison('pixel', data2);
|
||||
const time = Date.now();
|
||||
// ensureAlpha() is imperative here because pixelmatch expects an alpha layer
|
||||
pixelDiff = pixelmatch(await refImg.ensureAlpha().raw().toBuffer(), await compareImg.ensureAlpha().raw().toBuffer(), null, width, height);
|
||||
return {
|
||||
isSameDimensions: true,
|
||||
dimensionDifference: {
|
||||
height: 0,
|
||||
width: 0,
|
||||
},
|
||||
misMatchPercentage: pixelDiff / (width * height),
|
||||
analysisTime: Date.now() - time,
|
||||
}
|
||||
}
|
||||
|
||||
// see comments in compareImages
|
||||
//
|
||||
|
||||
// export const resembleImageCompare = async (data1: ImageData, data2: ImageData, threshold?: number, variantDimensionDiff = 0): Promise<ImageComparisonResult> => {
|
||||
// let ci: Function;
|
||||
//
|
||||
// try {
|
||||
// ci = await getCIFunc();
|
||||
// } catch (err) {
|
||||
// err.message = `Unable to do image comparison due to an issue importing the comparison library. It is likely 'node-canvas' is not installed (see ContextMod docs). Error Message: ${err.message}`;
|
||||
// throw err;
|
||||
// }
|
||||
//
|
||||
// let results: ImageComparisonResult | undefined = undefined;
|
||||
// // @ts-ignore
|
||||
// let resResult: ResembleSingleCallbackComparisonResult = undefined;
|
||||
//
|
||||
// //const [minWidth, minHeight] = getMinimumDimensions(data1, data2);
|
||||
// const compareOptions = {
|
||||
// // "ignore": [
|
||||
// // 'colors' // ~100% than nothing because resemble computes brightness information from rgb for each pixel
|
||||
// // ],
|
||||
// // boundingBox is ~30% slower than no restrictions
|
||||
// // because resemble has to check that each pixel is within the box
|
||||
// //
|
||||
// // output: {
|
||||
// // // compare at most 800x800 section to increase performance
|
||||
// // // -- potentially allow this to be user-configurable in the future if not sufficient for dup detection
|
||||
// // boundingBox: {
|
||||
// // left: 0,
|
||||
// // top: 0,
|
||||
// // right: Math.min(minWidth, 800),
|
||||
// // bottom: Math.min(minHeight, 800)
|
||||
// // },
|
||||
// // },
|
||||
// returnEarlyThreshold: threshold !== undefined ? Math.min(threshold + 5, 100) : undefined,
|
||||
// };
|
||||
//
|
||||
// if(data1.preferredResolution !== undefined) {
|
||||
// const [prefWidth, prefHeight] = data1.preferredResolution;
|
||||
// const prefImgData = data2.getSimilarResolutionVariant(prefWidth, prefHeight, variantDimensionDiff);
|
||||
// if(prefImgData !== undefined) {
|
||||
// let refThumbnail;
|
||||
// try {
|
||||
// refThumbnail = data1.getSimilarResolutionVariant(prefWidth, prefHeight) as ImageData;
|
||||
// resResult = await ci(await (await refThumbnail.sharp()).clone().resize(400, null, {fit: 'outside'}).jpeg().toBuffer()
|
||||
// , await (await prefImgData.sharp()).clone().resize(400, null, {fit: 'outside'}).jpeg().toBuffer()
|
||||
// , compareOptions) as ResembleSingleCallbackComparisonResult;
|
||||
// } catch(err) {
|
||||
// throw err;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// if(resResult === undefined) {
|
||||
// resResult = await ci(await (await data1.sharp()).clone().resize(400, null, {fit: 'outside'}).jpeg().toBuffer(),
|
||||
// await (await data2.sharp()).clone().resize(400, null, {fit: 'outside'}).jpeg().toBuffer(), compareOptions) as ResembleSingleCallbackComparisonResult;
|
||||
// }
|
||||
//
|
||||
//
|
||||
// return {
|
||||
// isSameDimensions: resResult.isSameDimensions,
|
||||
// dimensionDifference: resResult.dimensionDifference,
|
||||
// // @ts-ignore
|
||||
// misMatchPercentage: resResult.rawMisMatchPercentage,
|
||||
// analysisTime: resResult.analysisTime
|
||||
// };
|
||||
// }
|
||||
|
||||
export const createHistoricalStatsDisplay = (data: HistoricalStats): HistoricalStatsDisplay => {
|
||||
const display: any = {};
|
||||
for(const [k, v] of Object.entries(data)) {
|
||||
if(v instanceof Map) {
|
||||
display[k] = v;
|
||||
display[`${k}Total`] = Array.from(v.values()).reduce((acc, curr) => acc + curr, 0);
|
||||
} else {
|
||||
display[k] = v;
|
||||
}
|
||||
}
|
||||
|
||||
return display as HistoricalStatsDisplay;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if the state criteria being checked are
|
||||
* 1 ) expensive to compute or
|
||||
* 2 ) require additional api requests
|
||||
*
|
||||
* If neither then do not cache results as the number of unique keys (sub-state) increases AT LEAST linearly taking up space (especially in memory cache)
|
||||
* when they are probably not necessary to begin with
|
||||
* */
|
||||
export const shouldCacheSubredditStateCriteriaResult = (state: SubredditState | StrongSubredditState): boolean => {
|
||||
// currently there are no scenarios where we need to cache results
|
||||
// since only things computed from state are comparisons for properties already cached on subreddit object
|
||||
// and regexes for name which aren't that costly
|
||||
// -- so just return false
|
||||
return false;
|
||||
}
|
||||
|
||||
export const subredditStateIsNameOnly = (state: SubredditState | StrongSubredditState): boolean => {
|
||||
const critCount = Object.entries(state).filter(([key, val]) => {
|
||||
return val !== undefined && !['name','stateDescription'].includes(key);
|
||||
}).length;
|
||||
return critCount === 0;
|
||||
}
|
||||
|
||||
export const absPercentDifference = (num1: number, num2: number) => {
|
||||
return Math.abs((num1 - num2) / num1) * 100;
|
||||
}
|
||||
|
||||
export const bitsToHexLength = (bits: number): number => {
|
||||
return Math.pow(bits, 2) / 4;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user