mirror of
https://github.com/FoxxMD/context-mod.git
synced 2026-01-14 07:57:57 -05:00
Compare commits
154 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2a282a0d6f | ||
|
|
0d087521a7 | ||
|
|
fb5fc961cc | ||
|
|
c04b305881 | ||
|
|
5c5e9a26aa | ||
|
|
477d1a10ae | ||
|
|
bbee92699c | ||
|
|
7f09043cdf | ||
|
|
768a199c40 | ||
|
|
6e4b0c7719 | ||
|
|
89b21e6073 | ||
|
|
da611c5894 | ||
|
|
2c90a260c0 | ||
|
|
f081598da6 | ||
|
|
55f45163a4 | ||
|
|
e4dfa9dde3 | ||
|
|
0e395792db | ||
|
|
dcbeb784e8 | ||
|
|
aeaeb6ce27 | ||
|
|
d6a29c5914 | ||
|
|
c1224121d4 | ||
|
|
9790e681ea | ||
|
|
a48a850c98 | ||
|
|
b8369a9e9f | ||
|
|
0c31bdf25e | ||
|
|
4b14e581dd | ||
|
|
b2846efd2b | ||
|
|
a787e4515b | ||
|
|
f63e2a0ec4 | ||
|
|
9d0e098db1 | ||
|
|
181390f0eb | ||
|
|
a8c7b1dac9 | ||
|
|
fd5a92758d | ||
|
|
027199d788 | ||
|
|
2a9f01b928 | ||
|
|
cf54502f0d | ||
|
|
2a3663ccc9 | ||
|
|
dc2eeffcb5 | ||
|
|
39daa11f2d | ||
|
|
93de38a845 | ||
|
|
43caaca1f2 | ||
|
|
7bcc0195fe | ||
|
|
dac6541e28 | ||
|
|
2504a34a34 | ||
|
|
e19639ad0d | ||
|
|
b8084e02b5 | ||
|
|
97906281e6 | ||
|
|
2cea119657 | ||
|
|
6f16d289dd | ||
|
|
a96575c6b3 | ||
|
|
0a82e83352 | ||
|
|
d5e1cdec61 | ||
|
|
ef40c25b09 | ||
|
|
6370a2976a | ||
|
|
d8180299ea | ||
|
|
ac409dce3d | ||
|
|
56c007c20d | ||
|
|
487f13f704 | ||
|
|
00b9d87cdc | ||
|
|
2c797e0b9b | ||
|
|
4a2b27bfbf | ||
|
|
463a4dc0eb | ||
|
|
4b3bea661d | ||
|
|
976f310f51 | ||
|
|
4d8d3dc266 | ||
|
|
ce9e678c4c | ||
|
|
8cf30b6b7d | ||
|
|
2b6d08f8a5 | ||
|
|
f8fc63991f | ||
|
|
d96a1f677c | ||
|
|
b14689791c | ||
|
|
b70c877e44 | ||
|
|
041655376a | ||
|
|
e1eab7696b | ||
|
|
65d1d36d53 | ||
|
|
120d776fc2 | ||
|
|
425e16295b | ||
|
|
dd7e9d72cc | ||
|
|
55535ddd62 | ||
|
|
631e21452c | ||
|
|
be6fa4dd50 | ||
|
|
0d7a82836f | ||
|
|
d9a59b6824 | ||
|
|
ddbf8c3189 | ||
|
|
8393c471b2 | ||
|
|
fe66a2e8f7 | ||
|
|
4b0284102d | ||
|
|
95529f14a8 | ||
|
|
26af2c4e4d | ||
|
|
044c293f34 | ||
|
|
a082c9e593 | ||
|
|
4f3685a1f5 | ||
|
|
e242c36c09 | ||
|
|
d2d945db2c | ||
|
|
c5018183e0 | ||
|
|
c5358f196d | ||
|
|
1d9f8245f9 | ||
|
|
20b37f3a40 | ||
|
|
910f7f79ef | ||
|
|
641892cd3e | ||
|
|
1dfb9779e7 | ||
|
|
40111c54a2 | ||
|
|
b4745e3b45 | ||
|
|
838da497ce | ||
|
|
01755eada5 | ||
|
|
1ff59ad6e8 | ||
|
|
d8fd8e6140 | ||
|
|
255ffdb417 | ||
|
|
f0199366a0 | ||
|
|
20c724cab5 | ||
|
|
a670975f14 | ||
|
|
ee13feaf57 | ||
|
|
23a24b4448 | ||
|
|
a11b667d5e | ||
|
|
269b1620b9 | ||
|
|
6dee734440 | ||
|
|
3aea422eff | ||
|
|
e707e5a9a8 | ||
|
|
2a24eea3a5 | ||
|
|
8ad8297c0e | ||
|
|
0b94a14ac1 | ||
|
|
a04e0d2a9b | ||
|
|
3a1348c370 | ||
|
|
507818037f | ||
|
|
2c1f6daf4f | ||
|
|
fef79472fe | ||
|
|
885e3fa765 | ||
|
|
0b2c0e6451 | ||
|
|
15806b5f1f | ||
|
|
bf42cdf356 | ||
|
|
e21acd86db | ||
|
|
5dca1c9602 | ||
|
|
5274584d92 | ||
|
|
1d386c53a5 | ||
|
|
d6e351b195 | ||
|
|
ea32dc0b62 | ||
|
|
dca57bb19e | ||
|
|
43919f7f9c | ||
|
|
a176b51148 | ||
|
|
75ac5297df | ||
|
|
0ef2b99bd6 | ||
|
|
9596a476b5 | ||
|
|
92f52cada5 | ||
|
|
a482e852c5 | ||
|
|
e9055e5205 | ||
|
|
df2c40d9c1 | ||
|
|
fc4eeb47fa | ||
|
|
9fb3eaa611 | ||
|
|
23394ab5c2 | ||
|
|
5417b26417 | ||
|
|
b6d638d6c5 | ||
|
|
af1dd09e2d | ||
|
|
c42e56c68f | ||
|
|
561a007850 |
@@ -1,8 +1,8 @@
|
||||
node_modules
|
||||
Dockerfile
|
||||
.dockerignore
|
||||
.gitignore
|
||||
.git
|
||||
src/logs
|
||||
/docs
|
||||
.github
|
||||
/docs/
|
||||
/node_modules/
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -381,4 +381,5 @@ dist
|
||||
.pnp.*
|
||||
|
||||
**/src/**/*.js
|
||||
!src/Web/assets/public/yaml/*
|
||||
**/src/**/*.map
|
||||
|
||||
16
Dockerfile
16
Dockerfile
@@ -1,13 +1,17 @@
|
||||
FROM node:16-alpine3.12
|
||||
FROM node:16-alpine3.14 as base
|
||||
|
||||
ENV TZ=Etc/GMT
|
||||
|
||||
RUN apk update
|
||||
# vips required to run sharp library for image comparison
|
||||
RUN echo "http://dl-4.alpinelinux.org/alpine/v3.14/community" >> /etc/apk/repositories \
|
||||
&& apk --no-cache add vips
|
||||
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
||||
WORKDIR /usr/app
|
||||
|
||||
FROM base as build
|
||||
|
||||
COPY package*.json ./
|
||||
COPY tsconfig.json .
|
||||
|
||||
@@ -15,7 +19,13 @@ RUN npm install
|
||||
|
||||
ADD . /usr/app
|
||||
|
||||
RUN npm run build
|
||||
RUN npm run build && rm -rf node_modules
|
||||
|
||||
FROM base as app
|
||||
|
||||
COPY --from=build /usr/app /usr/app
|
||||
|
||||
RUN npm install --production
|
||||
|
||||
ENV NPM_CONFIG_LOGLEVEL debug
|
||||
|
||||
|
||||
@@ -26,6 +26,8 @@ Some feature highlights:
|
||||
* Author criteria (name, css flair/text, age, karma, moderator status, and [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes))
|
||||
* Activity state (removed, locked, distinguished, etc.)
|
||||
* Rules and Actions support named references (write once, reference anywhere)
|
||||
* [**Image Comparisons**](/docs/imageComparison.md) via fingerprinting and/or pixel differences
|
||||
* [**Repost detection**](/docs/examples/repost) with support for external services (youtube, etc...)
|
||||
* Global/subreddit-level **API caching**
|
||||
* Support for [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) as criteria or Actions (writing notes)
|
||||
* Docker container support
|
||||
@@ -124,7 +126,7 @@ Moderator view/invite and authorization:
|
||||
|
||||
A built-in editor using [monaco-editor](https://microsoft.github.io/monaco-editor/) makes editing configurations easy:
|
||||
|
||||
* Automatic JSON syntax validation and formatting
|
||||
* Automatic JSON or YAML syntax validation and formatting
|
||||
* Automatic Schema (subreddit or operator) validation
|
||||
* All properties are annotated via hover popups
|
||||
* Unauthenticated view via `yourdomain.com/config`
|
||||
|
||||
14
app.json
14
app.json
@@ -17,12 +17,22 @@
|
||||
"REFRESH_TOKEN": {
|
||||
"description": "Refresh token retrieved from authenticating an account with your Reddit Application",
|
||||
"value": "",
|
||||
"required": true
|
||||
"required": false
|
||||
},
|
||||
"ACCESS_TOKEN": {
|
||||
"description": "Access token retrieved from authenticating an account with your Reddit Application",
|
||||
"value": "",
|
||||
"required": true
|
||||
"required": false
|
||||
},
|
||||
"REDIRECT_URI": {
|
||||
"description": "Redirect URI you specified when creating your Reddit Application. Required if you want to use the web interface. In the provided example replace 'your-heroku-app-name' with the name of your HEROKU app.",
|
||||
"value": "https://your-heroku-6app-name.herokuapp.com/callback",
|
||||
"required": false
|
||||
},
|
||||
"OPERATOR": {
|
||||
"description": "Your reddit username WITHOUT any prefixes EXAMPLE /u/FoxxMD => FoxxMD. Specified user will be recognized as an admin.",
|
||||
"value": "",
|
||||
"required": false
|
||||
},
|
||||
"WIKI_CONFIG": {
|
||||
"description": "Relative url to contextbot wiki page EX https://reddit.com/r/subreddit/wiki/<path>",
|
||||
|
||||
67
cliff.toml
Normal file
67
cliff.toml
Normal file
@@ -0,0 +1,67 @@
|
||||
# configuration file for git-cliff (0.1.0)
|
||||
|
||||
[changelog]
|
||||
# changelog header
|
||||
header = """
|
||||
# Changelog
|
||||
All notable changes to this project will be documented in this file.\n
|
||||
"""
|
||||
# template for the changelog body
|
||||
# https://tera.netlify.app/docs/#introduction
|
||||
body = """
|
||||
{% if version %}\
|
||||
## [{{ version | replace(from="v", to="") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||
{% else %}\
|
||||
## [unreleased]
|
||||
{% endif %}\
|
||||
{% for group, commits in commits | group_by(attribute="group") %}
|
||||
### {{ group | upper_first }}
|
||||
{% for commit in commits
|
||||
| filter(attribute="scope")
|
||||
| sort(attribute="scope") %}
|
||||
- *({{commit.scope}})* {{ commit.message | upper_first }}
|
||||
{%- if commit.breaking %}
|
||||
{% raw %} {% endraw %}- **BREAKING**: {{commit.breaking_description}}
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
{%- for commit in commits %}
|
||||
{%- if commit.scope -%}
|
||||
{% else -%}
|
||||
- *(No Category)* {{ commit.message | upper_first }}
|
||||
{% if commit.breaking -%}
|
||||
{% raw %} {% endraw %}- **BREAKING**: {{commit.breaking_description}}
|
||||
{% endif -%}
|
||||
{% endif -%}
|
||||
{% endfor -%}
|
||||
{% endfor %}
|
||||
"""
|
||||
# remove the leading and trailing whitespaces from the template
|
||||
trim = true
|
||||
# changelog footer
|
||||
footer = """
|
||||
<!-- generated by git-cliff -->
|
||||
"""
|
||||
|
||||
[git]
|
||||
# allow only conventional commits
|
||||
# https://www.conventionalcommits.org
|
||||
conventional_commits = true
|
||||
# regex for parsing and grouping commits
|
||||
commit_parsers = [
|
||||
{ message = "^feat", group = "Features"},
|
||||
{ message = "^fix", group = "Bug Fixes"},
|
||||
{ message = "^doc", group = "Documentation"},
|
||||
{ message = "^perf", group = "Performance"},
|
||||
{ message = "^refactor", group = "Refactor"},
|
||||
{ message = "^style", group = "Styling"},
|
||||
{ message = "^test", group = "Testing"},
|
||||
{ message = "^chore\\(release\\): prepare for", skip = true},
|
||||
{ message = "^chore", group = "Miscellaneous Tasks"},
|
||||
{ body = ".*security", group = "Security"},
|
||||
]
|
||||
# filter out the commits that are not matched by commit parsers
|
||||
filter_commits = false
|
||||
# glob pattern for matching git tags
|
||||
tag_pattern = "[0-9]*"
|
||||
# regex for skipping tags
|
||||
skip_tags = "v0.1.0-beta.1"
|
||||
@@ -18,6 +18,7 @@
|
||||
* [Activities `window`](#activities-window)
|
||||
* [Comparisons](#thresholds-and-comparisons)
|
||||
* [Activity Templating](/docs/actionTemplating.md)
|
||||
* [Image Comparisons](#image-comparisons)
|
||||
* [Best Practices](#best-practices)
|
||||
* [Named Rules](#named-rules)
|
||||
* [Rule Order](#rule-order)
|
||||
@@ -100,7 +101,8 @@ Find detailed descriptions of all the Rules, with examples, below:
|
||||
* [Repeat Activity](/docs/examples/repeatActivity)
|
||||
* [History](/docs/examples/history)
|
||||
* [Author](/docs/examples/author)
|
||||
* Regex
|
||||
* [Regex](/docs/examples/regex)
|
||||
* [Repost](/docs/examples/repost)
|
||||
|
||||
### Rule Set
|
||||
|
||||
@@ -268,6 +270,12 @@ The duration value compares a time range from **now** to `duration value` time i
|
||||
|
||||
Refer to [duration values in activity window documentation](/docs/activitiesWindow.md#duration-values) as well as the individual rule/criteria schema to see what this duration is comparing against.
|
||||
|
||||
### Image Comparisons
|
||||
|
||||
ContextMod implements two methods for comparing **image content**, perceptual hashing and pixel-to-pixel comparisons. Comparisons can be used to filter activities in some activities.
|
||||
|
||||
See [image comparison documentation](/docs/imageComparison.md) for a full reference.
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Named Rules
|
||||
|
||||
@@ -16,6 +16,8 @@ This directory contains example of valid, ready-to-go configurations for Context
|
||||
* [Repeat Activity](/docs/examples/repeatActivity)
|
||||
* [History](/docs/examples/history)
|
||||
* [Author](/docs/examples/author)
|
||||
* [Regex](/docs/examples/regex)
|
||||
* [Repost](/docs/examples/repost)
|
||||
* [Toolbox User Notes](/docs/examples/userNotes)
|
||||
* [Advanced Concepts](/docs/examples/advancedConcepts)
|
||||
* [Rule Sets](/docs/examples/advancedConcepts/ruleSets.json5)
|
||||
|
||||
22
docs/examples/regex/README.md
Normal file
22
docs/examples/regex/README.md
Normal file
@@ -0,0 +1,22 @@
|
||||
The **Regex** rule matches on text content from a comment or submission in the same way automod uses regex. The rule, however, provides additional functionality automod does not:
|
||||
|
||||
* Can set the **number** of matches that trigger the rule (`matchThreshold`)
|
||||
|
||||
Which can then be used in conjunction with a [`window`](https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md) to match against activities from the history of the Author of the Activity being checked (including the Activity being checked):
|
||||
|
||||
* Can set the **number of Activities** that meet the `matchThreshold` to trigger the rule (`activityMatchThreshold`)
|
||||
* Can set the **number of total matches** across all Activities to trigger the rule (`totalMatchThreshold`)
|
||||
* Can set the **type of Activities** to check (`lookAt`)
|
||||
* When an Activity is a Submission can **specify which parts of the Submission to match against** IE title, body, and/or url (`testOn`)
|
||||
|
||||
### Examples
|
||||
|
||||
* [Trigger if regex matches against the current activity](/docs/examples/regex/matchAnyCurrentActivity.json5)
|
||||
* [Trigger if regex matches 5 times against the current activity](/docs/examples/regex/matchThresholdCurrentActivity.json5)
|
||||
* [Trigger if regex matches against any part of a Submission](/docs/examples/regex/matchSubmissionParts.json5)
|
||||
* [Trigger if regex matches any of Author's last 10 activities](/docs/examples/regex/matchHistoryActivity.json5)
|
||||
* [Trigger if regex matches at least 3 of Author's last 10 activities](/docs/examples/regex/matchActivityThresholdHistory.json5)
|
||||
* [Trigger if there are 5 regex matches in the Author's last 10 activities](/docs/examples/regex/matchTotalHistoryActivity.json5)
|
||||
* [Trigger if there are 5 regex matches in the Author's last 10 comments](/docs/examples/regex/matchSubsetHistoryActivity.json5)
|
||||
* [Remove comments that are spamming discord links](/docs/examples/regex/removeDiscordSpam.json5)
|
||||
* Differs from just using automod because this config can allow one-off/organic links from users who DO NOT spam discord links but will still remove the comment if the user is spamming them
|
||||
20
docs/examples/regex/matchActivityThresholdHistory.json5
Normal file
20
docs/examples/regex/matchActivityThresholdHistory.json5
Normal file
@@ -0,0 +1,20 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if more than 3 activities in the last 10 match the regex
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// this differs from "totalMatchThreshold"
|
||||
//
|
||||
// activityMatchThreshold => # of activities from window must match regex
|
||||
// totalMatchThreshold => # of matches across all activities from window must match regex
|
||||
"activityMatchThreshold": "> 3",
|
||||
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
|
||||
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
|
||||
"window": 10,
|
||||
},
|
||||
]
|
||||
}
|
||||
14
docs/examples/regex/matchAnyCurrentActivity.json5
Normal file
14
docs/examples/regex/matchAnyCurrentActivity.json5
Normal file
@@ -0,0 +1,14 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if current activity has more than 0 matches
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// if "matchThreshold" is not specified it defaults to this -- default behavior is to trigger if there are any matches
|
||||
// "matchThreshold": "> 0"
|
||||
},
|
||||
]
|
||||
}
|
||||
15
docs/examples/regex/matchHistoryActivity.json5
Normal file
15
docs/examples/regex/matchHistoryActivity.json5
Normal file
@@ -0,0 +1,15 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if any activity in the last 10 (including current activity) match the regex
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
|
||||
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
|
||||
"window": 10,
|
||||
},
|
||||
]
|
||||
}
|
||||
19
docs/examples/regex/matchSubmissionParts.json5
Normal file
19
docs/examples/regex/matchSubmissionParts.json5
Normal file
@@ -0,0 +1,19 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
{
|
||||
// triggers if the current activity has more than 0 matches
|
||||
// if the activity is a submission then matches against title, body, and url
|
||||
// if "testOn" is not provided then `title, body` are the defaults
|
||||
"regex": "/fuck|shit|damn/",
|
||||
"testOn": [
|
||||
"title",
|
||||
"body",
|
||||
"url"
|
||||
]
|
||||
},
|
||||
]
|
||||
}
|
||||
23
docs/examples/regex/matchSubsetHistoryActivity.json5
Normal file
23
docs/examples/regex/matchSubsetHistoryActivity.json5
Normal file
@@ -0,0 +1,23 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if there are more than 5 regex matches in the last 10 activities (comments only)
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// this differs from "activityMatchThreshold"
|
||||
//
|
||||
// activityMatchThreshold => # of activities from window must match regex
|
||||
// totalMatchThreshold => # of matches across all activities from window must match regex
|
||||
"totalMatchThreshold": "> 5",
|
||||
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
|
||||
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
|
||||
"window": 10,
|
||||
// determines which activities from window to consider
|
||||
//defaults to "all" (submissions and comments)
|
||||
"lookAt": "comments",
|
||||
},
|
||||
]
|
||||
}
|
||||
13
docs/examples/regex/matchThresholdCurrentActivity.json5
Normal file
13
docs/examples/regex/matchThresholdCurrentActivity.json5
Normal file
@@ -0,0 +1,13 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
{
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// triggers if current activity has greater than 5 matches
|
||||
"matchThreshold": "> 5"
|
||||
},
|
||||
]
|
||||
}
|
||||
21
docs/examples/regex/matchTotalHistoryActivity.json5
Normal file
21
docs/examples/regex/matchTotalHistoryActivity.json5
Normal file
@@ -0,0 +1,21 @@
|
||||
// goes inside
|
||||
// "rules": []
|
||||
{
|
||||
"name": "swear",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
// triggers if there are more than 5 regex matches in the last 10 activities (comments or submission)
|
||||
{
|
||||
// triggers if there are more than 5 *total matches* across the last 10 activities
|
||||
"regex": "/fuck|shit|damn/",
|
||||
// this differs from "activityMatchThreshold"
|
||||
//
|
||||
// activityMatchThreshold => # of activities from window must match regex
|
||||
// totalMatchThreshold => # of matches across all activities from window must match regex
|
||||
"totalMatchThreshold": "> 5",
|
||||
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
|
||||
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
|
||||
"window": 10,
|
||||
},
|
||||
]
|
||||
}
|
||||
73
docs/examples/regex/removeDiscordSpam.json5
Normal file
73
docs/examples/regex/removeDiscordSpam.json5
Normal file
@@ -0,0 +1,73 @@
|
||||
{
|
||||
"checks": [
|
||||
{
|
||||
"name": "remove discord spam",
|
||||
"notifyOnTrigger": true,
|
||||
"description": "remove comments from users who are spamming discord links",
|
||||
"kind": "comment",
|
||||
"authorIs": {
|
||||
"exclude": [
|
||||
{
|
||||
"isMod": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"itemIs": [
|
||||
{
|
||||
"removed": false,
|
||||
"approved": false,
|
||||
}
|
||||
],
|
||||
"condition": "OR",
|
||||
"rules": [
|
||||
{
|
||||
// set to false if you want to allow comments with a discord link ONLY IF
|
||||
// the author doesn't have a history of spamming discord links
|
||||
// -- basically allows one-off/organic discord links
|
||||
"enable": true,
|
||||
"name": "linkOnlySpam",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
{
|
||||
"name": "only link",
|
||||
"regex": "/^.*(discord\\.gg\\/[\\w\\d]+)$/i",
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
{
|
||||
"name": "linkAnywhereSpam",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
{
|
||||
"name": "contains link anywhere",
|
||||
"regex": "/^.*(discord\\.gg\\/[\\w\\d]+).*$/i",
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "linkAnywhereHistoricalSpam",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
{
|
||||
"name": "contains links anywhere historically",
|
||||
"regex": "/^.*(discord\\.gg\\/[\\w\\d]+).*$/i",
|
||||
"totalMatchThreshold": ">= 3",
|
||||
"lookAt": "comments",
|
||||
"window": 10
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"kind": "remove"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
703
docs/examples/repost/README.md
Normal file
703
docs/examples/repost/README.md
Normal file
@@ -0,0 +1,703 @@
|
||||
The **Repost** rule is used to find reposts for both **Submissions** and **Comments**, depending on what type of **Check** it is used on.
|
||||
|
||||
Note: This rule is for searching **all of Reddit** for reposts, as opposed to just the Author of the Activity being checked. If you only want to check for reposts by the Author of the Activity being checked you should use the [Repeat Activity](/docs/examples/repeatActivity) rule.
|
||||
|
||||
# TLDR
|
||||
|
||||
Out of the box CM generates a repost rule with sensible default behavior without any configuration. You do not need to configure any of below options (facets, modifiers, criteria) yourself in order to have a working repost rule. Default behavior is as follows...
|
||||
|
||||
* When looking for Submission reposts CM will find any Submissions with
|
||||
* a very similar title
|
||||
* or independent of title...
|
||||
* any crossposts/duplicates
|
||||
* any submissions with the exact URL
|
||||
* When looking for Comment reposts CM will do the above AND THEN
|
||||
* compare the top 50 most-upvoted comments from the top 10 most-upvoted Submissions against the comment being checked
|
||||
* compare any items found from external source (Youtube comments, etc...) against the comment being checked
|
||||
|
||||
# Configuration
|
||||
|
||||
## Search Facets
|
||||
|
||||
ContextMod has several ways to search for reposts -- all of which look at different elements of a Submission in order to find repost candidates. You can define any/all of these **Search Facets** you want to use to search Reddit inside the configuration for the Repost Rule in the `searchOn` property.
|
||||
|
||||
### Usage
|
||||
|
||||
Facets are specified in the `searchOn` array property within the rule's configuration.
|
||||
|
||||
**String**
|
||||
|
||||
Specify one or more types of facets as a string to use their default configurations
|
||||
|
||||
<details>
|
||||
|
||||
```json5
|
||||
{
|
||||
"kind": "repost",
|
||||
"criteria": [
|
||||
{
|
||||
// ...
|
||||
"searchOn": ["title", "url", "crossposts"],
|
||||
// ....
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
**Object**
|
||||
|
||||
**string** and object configurations can be mixed
|
||||
|
||||
<details>
|
||||
|
||||
```json5
|
||||
{
|
||||
"kind": "repost",
|
||||
"criteria": [
|
||||
{
|
||||
// ...
|
||||
"searchOn": [
|
||||
"title",
|
||||
{
|
||||
"kind": "url",
|
||||
// could also specify multiple types to use the same config for all
|
||||
//"kind": ["url", "duplicates"]
|
||||
"matchScore": 90,
|
||||
//...
|
||||
},
|
||||
"external"
|
||||
],
|
||||
// ....
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Facet Types
|
||||
|
||||
* **title** -- search reddit for Submissions with a similar title
|
||||
* **url** -- search reddit for Submissions with the same URL
|
||||
* **duplicates** -- get all Submissions **reddit has identified** as duplicates that are **NOT** crossposts
|
||||
* these are found under *View discussions in other communities* (new reddit) or *other discussions* (old reddit) on the Submission
|
||||
* **crossposts** -- get all Submissions where the current Submission is the source of an **official** crosspost
|
||||
* this differs from duplicates in that crossposts use reddit's built-in crosspost functionality, respect subreddit crosspost rules, and link back to the original Submission
|
||||
* **external** -- get items from the Submission's link source that may be reposted (currently implemented for **Comment Checks** only)
|
||||
* When the Submission link is for...
|
||||
* **Youtube** -- get top comments on video by replies/like count
|
||||
* **NOTE:** An **API Key** for the [Youtube Data API](https://developers.google.com/youtube/v3) must be provided for this facet to work. This can be provided by the operator alongside [bot credentials](/docs/operatorConfiguration.md) or in the top-level `credentials` property for a [subreddit configuration.](https://json-schema.app/view/%23?url=https%3A%2F%2Fraw.githubusercontent.com%2FFoxxMD%2Freddit-context-bot%2Fmaster%2Fsrc%2FSchema%2FApp.json)
|
||||
|
||||
### Facet Modifiers
|
||||
|
||||
For all **Facets**, except for **external**, there are options that be configured to determine if the found Submissions is a "valid" repost IE filtering. These options can be configured **per facet**.
|
||||
|
||||
* **matchScore** -- The percentage, as a whole number, of a repost title that must match the title being checked in order to consider both a match
|
||||
* **minWordCount** -- The minimum number of words a title must have
|
||||
* **caseSensitive** -- If the match comparison should be case-sensitive (defaults to `false`)
|
||||
|
||||
Additionally, the current Activity's title and/or each repost's title can be transformed before matching:
|
||||
|
||||
* **transformations** -- An array of SearchAndReplace objects used to transform the repost's title
|
||||
* **transformationsActivity** -- An array of SearchAndReplace objects used to transform the current Activity's title
|
||||
|
||||
#### Modifier Defaults
|
||||
|
||||
To make facets easier to use without configuration sensible defaults are applied to each when no other configuration is defined...
|
||||
|
||||
* **title**
|
||||
* `matchScore: 85` -- The candidate repost's title must be at least 85% similar to the current Activity's title
|
||||
* `minWordCount: 2` -- The candidate repost's title must have at least 2 words
|
||||
|
||||
For `url`,`duplicates`, and `crossposts` the only default is `matchScore: 0` because the assumption is you want to treat any actual dups/x-posts or exact URLs as reposts, regardless of their title.
|
||||
|
||||
## Additional Criteria Properties
|
||||
|
||||
A **criteria** object may also specify some additional tests to run against the reposts found from searching.
|
||||
|
||||
### For Submissions and Comments
|
||||
|
||||
#### Occurrences
|
||||
|
||||
Define a set of criteria to test against the **number of reposts**, **time reposts were created**, or both.
|
||||
|
||||
##### Count
|
||||
|
||||
<details>
|
||||
|
||||
```json5
|
||||
{
|
||||
"kind": "repost",
|
||||
"criteria": [
|
||||
{
|
||||
// ...
|
||||
"searchOn": ["title", "url", "crossposts"],
|
||||
"occurrences": {
|
||||
"criteria": [
|
||||
{
|
||||
// passes if BOTH tests are true
|
||||
"count": {
|
||||
"condition": "AND", // default is AND
|
||||
"test": [
|
||||
"> 3", // TRUE if there are GREATER THAN 3 reposts found
|
||||
"<= 5" // TRUE if there are LESS THAN OR EQUAL TO 5 reposts found
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
##### Time
|
||||
|
||||
Define a test or array of tests to run against **when reposts were created**
|
||||
|
||||
<details>
|
||||
|
||||
```json5
|
||||
{
|
||||
"kind": "repost",
|
||||
"criteria": [
|
||||
{
|
||||
// ...
|
||||
"searchOn": [
|
||||
"title",
|
||||
"url",
|
||||
"crossposts"
|
||||
],
|
||||
"occurrences": {
|
||||
"criteria": [
|
||||
{
|
||||
time: {
|
||||
// how to test array of comparisons. AND => all must pass, OR => any must pass
|
||||
"condition": "AND",
|
||||
"test": [
|
||||
{
|
||||
// which of the found reposts to test the time comparison on
|
||||
//
|
||||
// "all" => ALL reposts must pass time comparison
|
||||
// "any" => ANY repost must pass time comparison
|
||||
// "newest" => The newest (closest in time to now) repost must pass time comparison
|
||||
// "oldest" => The oldest (furthest in time from now) repost must pass time comparison
|
||||
//
|
||||
"testOn": "all",
|
||||
// Tested items must be OLDER THAN 3 months
|
||||
"condition": "> 3 months"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
### For Comments
|
||||
|
||||
When the rule is run in a **Comment Check** you may specify text comparisons (like those found in Search Facets) to run on the contents of the repost comments *against* the contents of the comment being checked.
|
||||
|
||||
* **matchScore** -- The percentage, as a whole number, of a repost comment that must match the comment being checked in order to consider both a match (defaults to 85% IE `85`)
|
||||
* **minWordCount** -- The minimum number of words a comment must have
|
||||
* **caseSensitive** -- If the match comparison should be case-sensitive (defaults to `false`)
|
||||
|
||||
# Examples
|
||||
|
||||
Examples of a *full* CM configuration, including the Repost Rule, in various scenarios. In each scenario the parts of the configuration that affect the rule are indicated.
|
||||
|
||||
## Submissions
|
||||
|
||||
When the Repost Rule is run on a **Submission Check** IE the activity being checked is a Submission.
|
||||
|
||||
### Default Behavior (No configuration)
|
||||
|
||||
This is the same behavior described in the [TLDR](#TLDR) section above -- find any submissions with:
|
||||
|
||||
* a very similar title (85% or more the same)
|
||||
* or ignoring title...
|
||||
* any crossposts/duplicates
|
||||
* any submissions with the exact URL
|
||||
|
||||
<details>
|
||||
|
||||
```json5
|
||||
{
|
||||
"polling": [
|
||||
"unmoderated"
|
||||
],
|
||||
"checks": [
|
||||
{
|
||||
"name": "subRepost",
|
||||
"description": "Check if submission has been reposted",
|
||||
// kind specifies this check is for SUBMISSIONS
|
||||
"kind": "submission",
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
// repost rule configuration is below
|
||||
//
|
||||
{
|
||||
"kind": "repost"
|
||||
},
|
||||
//
|
||||
// repost rule configuration is above
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"kind": "report",
|
||||
"content": "This submission was reposted"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Search by Title Only
|
||||
|
||||
Find any submissions with:
|
||||
|
||||
* a very similar title (85% or more the same)
|
||||
|
||||
<details>
|
||||
|
||||
```json5
|
||||
{
|
||||
"polling": [
|
||||
"unmoderated"
|
||||
],
|
||||
"checks": [
|
||||
{
|
||||
"name": "subRepost",
|
||||
"description": "Check if submission has been reposted",
|
||||
// kind specifies this check is for SUBMISSIONS
|
||||
"kind": "submission",
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
// repost rule configuration is below
|
||||
//
|
||||
{
|
||||
"kind": "repost",
|
||||
"criteria": [
|
||||
{
|
||||
// specify only title to search on
|
||||
"searchOn": [
|
||||
"title" // uses default configuration since only string is specified
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
//
|
||||
// repost rule configuration is above
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"kind": "report",
|
||||
"content": "This submission was reposted"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Search by Title only and specify similarity percentage
|
||||
|
||||
* a very similar title (95% or more the same)
|
||||
|
||||
<details>
|
||||
|
||||
```json5
|
||||
{
|
||||
"polling": [
|
||||
"unmoderated"
|
||||
],
|
||||
"checks": [
|
||||
{
|
||||
"name": "subRepost",
|
||||
"description": "Check if submission has been reposted",
|
||||
// kind specifies this check is for SUBMISSIONS
|
||||
"kind": "submission",
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
// repost rule configuration is below
|
||||
//
|
||||
{
|
||||
"kind": "repost",
|
||||
"criteria": [
|
||||
{
|
||||
// specify only title to search on
|
||||
"searchOn": [
|
||||
{
|
||||
"kind": "title",
|
||||
// titles must be 95% or more similar
|
||||
"matchScore": "95"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
//
|
||||
// repost rule configuration is above
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"kind": "report",
|
||||
"content": "This submission was reposted"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Search by Title, specify similarity percentage, AND any duplicates
|
||||
|
||||
<details>
|
||||
|
||||
```json5
|
||||
{
|
||||
"polling": [
|
||||
"unmoderated"
|
||||
],
|
||||
"checks": [
|
||||
{
|
||||
"name": "subRepost",
|
||||
"description": "Check if submission has been reposted",
|
||||
// kind specifies this check is for SUBMISSIONS
|
||||
"kind": "submission",
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
// repost rule configuration is below
|
||||
//
|
||||
{
|
||||
"kind": "repost",
|
||||
"criteria": [
|
||||
{
|
||||
"searchOn": [
|
||||
// look for duplicates (NON crossposts) using default configuration
|
||||
"duplicates",
|
||||
// search by title
|
||||
{
|
||||
"kind": "title",
|
||||
// titles must be 95% or more similar
|
||||
"matchScore": "95"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
//
|
||||
// repost rule configuration is above
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"kind": "report",
|
||||
"content": "This submission was reposted"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Approve Submission if not reposted in the last month, by title
|
||||
|
||||
<details>
|
||||
|
||||
```json5
|
||||
{
|
||||
"polling": [
|
||||
"unmoderated"
|
||||
],
|
||||
"checks": [
|
||||
{
|
||||
"name": "subRepost",
|
||||
"description": "Check there are no reposts with same title in the last month",
|
||||
// kind specifies this check is for SUBMISSIONS
|
||||
"kind": "submission",
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
// repost rule configuration is below
|
||||
//
|
||||
{
|
||||
"kind": "repost",
|
||||
"criteria": [
|
||||
{
|
||||
"searchOn": [
|
||||
"title"
|
||||
],
|
||||
"occurrences": {
|
||||
// if EITHER criteria is TRUE then it "passes"
|
||||
"condition": "OR",
|
||||
"criteria": [
|
||||
// first criteria:
|
||||
// TRUE if there are LESS THAN 1 reposts (no reposts found)
|
||||
{
|
||||
"count": {
|
||||
"test": ["< 1"]
|
||||
}
|
||||
},
|
||||
// second criteria:
|
||||
// TRUE if the newest repost is older than one month
|
||||
{
|
||||
"time": {
|
||||
"test": [
|
||||
{
|
||||
"testOn": "newest",
|
||||
"condition": "> 1 month"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
//
|
||||
// repost rule configuration is above
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
// approve this post since we know it is not a repost of anything within the last month
|
||||
"kind": "approve",
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
## Comments
|
||||
|
||||
### Default Behavior (No configuration)
|
||||
|
||||
This is the same behavior described in the [TLDR](#TLDR) section above -- find any submissions with:
|
||||
|
||||
* a very similar title (85% or more the same)
|
||||
* or ignoring title...
|
||||
* any crossposts/duplicates
|
||||
* any submissions with the exact URL
|
||||
* If comment being checked is on a Submission for Youtube then get top 50 comments on youtube video as well...
|
||||
|
||||
AND THEN
|
||||
|
||||
* sort submissions by votes
|
||||
* take top 20 (upvoted) comments from top 10 (upvoted) submissions
|
||||
* sort comments by votes, take top 50 + top 50 external items
|
||||
|
||||
FINALLY
|
||||
|
||||
* filter all gathered comments by default `matchScore: 85` to find very similar matches
|
||||
* rules is triggered if any are found
|
||||
|
||||
<details>
|
||||
|
||||
```json5
|
||||
{
|
||||
"polling": [
|
||||
"newComm"
|
||||
],
|
||||
"checks": [
|
||||
{
|
||||
"name": "commRepost",
|
||||
"description": "Check if comment has been reposted",
|
||||
// kind specifies this check is for COMMENTS
|
||||
"kind": "common",
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
// repost rule configuration is below
|
||||
//
|
||||
{
|
||||
"kind": "repost"
|
||||
},
|
||||
//
|
||||
// repost rule configuration is above
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"kind": "report",
|
||||
"content": "This comment was reposted"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Search by external (youtube) comments only
|
||||
|
||||
<details>
|
||||
|
||||
```json5
|
||||
{
|
||||
"polling": [
|
||||
"newComm"
|
||||
],
|
||||
"checks": [
|
||||
{
|
||||
"name": "commRepost",
|
||||
"description": "Check if comment has been reposted from youtube",
|
||||
// kind specifies this check is for SUBMISSIONS
|
||||
"kind": "comment",
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
// repost rule configuration is below
|
||||
//
|
||||
{
|
||||
"kind": "repost",
|
||||
"criteria": [
|
||||
{
|
||||
// specify only external (youtube) to search on
|
||||
"searchOn": [
|
||||
"external"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
//
|
||||
// repost rule configuration is above
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"kind": "report",
|
||||
"content": "This comment was reposted from youtube"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Search by external (youtube) comments only, with higher comment match percentage
|
||||
|
||||
<details>
|
||||
|
||||
```json5
|
||||
{
|
||||
"polling": [
|
||||
"newComm"
|
||||
],
|
||||
"checks": [
|
||||
{
|
||||
"name": "commRepost",
|
||||
"description": "Check if comment has been reposted from youtube",
|
||||
// kind specifies this check is for SUBMISSIONS
|
||||
"kind": "comment",
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
// repost rule configuration is below
|
||||
//
|
||||
{
|
||||
"kind": "repost",
|
||||
"criteria": [
|
||||
{
|
||||
// specify only external (youtube) to search on
|
||||
"searchOn": [
|
||||
"external"
|
||||
],
|
||||
"matchScore": 95 // matchScore for comments is on criteria instead of searchOn config...
|
||||
},
|
||||
]
|
||||
},
|
||||
//
|
||||
// repost rule configuration is above
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"kind": "report",
|
||||
"content": "This comment was reposted from youtube"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Search by external (youtube) comments and submission URL, with higher comment match percentage
|
||||
|
||||
<details>
|
||||
|
||||
```json5
|
||||
{
|
||||
"polling": [
|
||||
"newComm"
|
||||
],
|
||||
"checks": [
|
||||
{
|
||||
"name": "commRepost",
|
||||
"description": "Check if comment has been reposted",
|
||||
// kind specifies this check is for SUBMISSIONS
|
||||
"kind": "comment",
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
// repost rule configuration is below
|
||||
//
|
||||
{
|
||||
"kind": "repost",
|
||||
"criteria": [
|
||||
{
|
||||
// specify only external (youtube) to search on
|
||||
"searchOn": [
|
||||
"external",
|
||||
// can specify any/all submission search facets to acquire comments from
|
||||
"url"
|
||||
],
|
||||
"matchScore": 95 // matchScore for comments is on criteria instead of searchOn config...
|
||||
},
|
||||
]
|
||||
},
|
||||
//
|
||||
// repost rule configuration is above
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"kind": "report",
|
||||
"content": "This comment was reposted from youtube or from submission with the same URL"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
@@ -39,3 +39,26 @@ then remove the submission
|
||||
### [Remove comment if the user has posted the same comment 4 or more times in a row](/docs/examples/subredditReady/commentSpam.json5)
|
||||
|
||||
If the user made the same comment (with some fuzzy matching) 4 or more times in a row in the past (50 activities or 6 months) then remove the comment.
|
||||
|
||||
### [Remove comment if it is discord invite link spam](/docs/examples/subredditReady/discordSpam.json5)
|
||||
|
||||
This rule goes a step further than automod can by being more discretionary about how it handles this type of spam.
|
||||
|
||||
* Remove the comment and **ban a user** if:
|
||||
* Comment being checked contains **only** a discord link (no other text) AND
|
||||
* Discord links appear **anywhere** in three or more of the last 10 comments the Author has made
|
||||
|
||||
otherwise...
|
||||
|
||||
* Remove the comment if:
|
||||
* Comment being checked contains **only** a discord link (no other text) OR
|
||||
* Comment contains a discord link **anywhere** AND
|
||||
* Discord links appear **anywhere** in three or more of the last 10 comments the Author has made
|
||||
|
||||
Using these checks ContextMod can more easily distinguish between these use cases for a user commenting with a discord link:
|
||||
|
||||
* actual spammers who only spam a discord link
|
||||
* users who may comment with a link but have context for it either in the current comment or in their history
|
||||
* users who many comment with a link but it's a one-off event (no other links historically)
|
||||
|
||||
Additionally, you could modify both/either of these checks to not remove one-off discord link comments but still remove if the user has a historical trend for spamming links
|
||||
|
||||
75
docs/examples/subredditReady/discordSpam.json5
Normal file
75
docs/examples/subredditReady/discordSpam.json5
Normal file
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"polling": ["newComm"],
|
||||
"checks": [
|
||||
{
|
||||
"name": "ban discord only spammer",
|
||||
"description": "ban a user who spams only a discord link many times historically",
|
||||
"kind": "comment",
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
"linkOnlySpam",
|
||||
"linkAnywhereHistoricalSpam",
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"kind": "remove"
|
||||
},
|
||||
{
|
||||
"kind": "ban",
|
||||
"content": "spamming discord links"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "remove discord spam",
|
||||
"description": "remove comments from users who only link to discord or mention discord link many times historically",
|
||||
"kind": "comment",
|
||||
"condition": "OR",
|
||||
"rules": [
|
||||
{
|
||||
"name": "linkOnlySpam",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
{
|
||||
"name": "only link",
|
||||
"regex": "/^.*(discord\\.gg\\/[\\w\\d]+)$/i",
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"condition": "AND",
|
||||
"rules": [
|
||||
{
|
||||
"name": "linkAnywhereSpam",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
{
|
||||
"name": "contains link anywhere",
|
||||
"regex": "/^.*(discord\\.gg\\/[\\w\\d]+).*$/i",
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "linkAnywhereHistoricalSpam",
|
||||
"kind": "regex",
|
||||
"criteria": [
|
||||
{
|
||||
"name": "contains links anywhere historically",
|
||||
"regex": "/^.*(discord\\.gg\\/[\\w\\d]+).*$/i",
|
||||
"totalMatchThreshold": ">= 3",
|
||||
"lookAt": "comments",
|
||||
"window": 10
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"kind": "remove"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -50,6 +50,18 @@ tsc -p .
|
||||
### [Heroku Quick Deploy](https://heroku.com/about)
|
||||
[](https://dashboard.heroku.com/new?template=https://github.com/FoxxMD/context-mod)
|
||||
|
||||
This template provides a **web** and **worker** dyno for heroku.
|
||||
|
||||
* **Web** -- Will run the bot **and** the web interface for ContextMod.
|
||||
* **Worker** -- Will run **just** the bot.
|
||||
|
||||
Be aware that Heroku's [free dyno plan](https://devcenter.heroku.com/articles/free-dyno-hours#dyno-sleeping) enacts some limits:
|
||||
|
||||
* A **Web** dyno will go to sleep (pause) after 30 minutes without web activity -- so your bot will ALSO go to sleep at this time
|
||||
* The **Worker** dyno **will not** go to sleep but you will NOT be able to access the web interface. You can, however, still see how Cm is running by reading the logs for the dyno.
|
||||
|
||||
If you want to use a free dyno it is recommended you perform first-time setup (bot authentication and configuration, testing, etc...) with the **Web** dyno, then SWITCH to a **Worker** dyno so it can run 24/7.
|
||||
|
||||
# Bot Authentication
|
||||
|
||||
Next you need to create a bot and authenticate it with Reddit. Follow the [bot authentication guide](/docs/botAuthentication.md) to complete this step.
|
||||
|
||||
201
docs/imageComparison.md
Normal file
201
docs/imageComparison.md
Normal file
@@ -0,0 +1,201 @@
|
||||
# Overview
|
||||
|
||||
ContextMod supports comparing image content, for the purpose of detecting duplicates, with two different but complimentary systems. Image comparison behavior is available for the following rules:
|
||||
|
||||
* [Recent Activity](/docs/examples/recentActivity)
|
||||
* Repeat Activity (In-progress)
|
||||
|
||||
To enable comparisons reference the example below (at the top-level of your rule) and configure as needed:
|
||||
|
||||
```json5
|
||||
{
|
||||
"name": "ruleWithImageDetection",
|
||||
"kind": "recentActivity",
|
||||
// Add block below...
|
||||
//
|
||||
"imageDetection": {
|
||||
// enables image comparison
|
||||
"enable": true,
|
||||
// The difference, in percentage, between the reference submission and the submissions being checked
|
||||
// must be less than this number to consider the images "the same"
|
||||
"threshold": 5,
|
||||
// optional
|
||||
// set the behavior for determining if image comparison should occur on a URL:
|
||||
//
|
||||
// "extension" => try image detection if URL ends in a known image extension (jpeg, gif, png, bmp, etc.)
|
||||
// "unknown" => try image detection if URL ends in known image extension OR there is no extension OR the extension is unknown (not video, html, doc, etc...)
|
||||
// "all" => ALWAYS try image detection, regardless of URL extension
|
||||
//
|
||||
// if fetchBehavior is not defined then "extension" is the default
|
||||
"fetchBehavior": "extension",
|
||||
},
|
||||
//
|
||||
// And above ^^^
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
**Perceptual Hashing** (`hash`) and **Pixel Comparisons** (`pixel`) may be used at the same time. Refer to the documentation below to see how they interact.
|
||||
|
||||
**Note:** Regardless of `fetchBehavior`, if the response from the URL does not indicate it is an image then image detection will not occur. IE Response `Content-Type` must contain `image`
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Both image comparison systems require [Sharp](https://sharp.pixelplumbing.com/) as a dependency. Most modern operating systems running Node.js >= 12.13.0 do not require installing additional dependencies in order to use Sharp.
|
||||
|
||||
If you are using the docker image for ContextMod (`foxxmd/context-mod`) Sharp is built-in.
|
||||
|
||||
If you are installing ContextMod using npm then **Sharp should be installed automatically as an optional dependency.**
|
||||
|
||||
**If you do not want to install it automatically** install ContextMod with the following command:
|
||||
|
||||
```
|
||||
npm install --no-optional
|
||||
```
|
||||
|
||||
If you are using ContextMod as part of a larger project you may want to require Sharp in your own package:
|
||||
|
||||
```
|
||||
npm install sharp@0.29.1 --save
|
||||
```
|
||||
|
||||
# Comparison Systems
|
||||
|
||||
## Perceptual Hashing
|
||||
|
||||
[Perceptual Hashing](https://en.wikipedia.org/wiki/Perceptual_hashing) creates a text fingerprint of an image by:
|
||||
|
||||
* Dividing up the image into a grid
|
||||
* Using an algorithm to derive a value from the pixels in each grid
|
||||
* Adding up all the values to create a unique string (the "fingerprint")
|
||||
|
||||
An example of how a perceptual hash can work [can be found here.](https://www.hackerfactor.com/blog/?/archives/432-Looks-Like-It.html)
|
||||
|
||||
ContextMod uses [blockhash-js](https://github.com/commonsmachinery/blockhash-js) which is a javascript implementation of the algorithm described in the paper [Block Mean Value Based Image Perceptual Hashing by Bian Yang, Fan Gu and Xiamu Niu.](https://ieeexplore.ieee.org/document/4041692)
|
||||
|
||||
|
||||
**Advantages**
|
||||
|
||||
* Low memory requirements and not CPU intensive
|
||||
* Does not require any image transformations
|
||||
* Hash results can be stored to make future comparisons even faster and skip downloading images (cached by url)
|
||||
* Resolution-independent
|
||||
|
||||
**Disadvantages**
|
||||
|
||||
* Hash is weak when image differences are based only on color
|
||||
* Hash is weak when image contains lots of text
|
||||
* Higher accuracy requires larger calculation (more bits required)
|
||||
|
||||
**When should I use it?**
|
||||
|
||||
* General duplicate detection
|
||||
* Comparing many images
|
||||
* Comparing the same images often
|
||||
|
||||
### How To Use
|
||||
|
||||
If `imageDetection.enable` is `true` then hashing is enabled by default and no further configuration is required.
|
||||
|
||||
To further configure hashing refer to this code block:
|
||||
|
||||
```json5
|
||||
{
|
||||
"name": "ruleWithImageDetectionAndConfiguredHashing",
|
||||
"kind": "recentActivity",
|
||||
"imageDetection": {
|
||||
"enable": true,
|
||||
// Add block below...
|
||||
//
|
||||
"hash": {
|
||||
// enable or disable hash comparisons (enabled by default)
|
||||
"enable": true,
|
||||
// determines accuracy of hash and granularity of hash comparison (comparison to other hashes)
|
||||
// the higher the bits the more accurate the comparison
|
||||
//
|
||||
// NOTE: Hashes of different sizes (bits) cannot be compared. If you are caching hashes make sure all rules where results may be shared use the same bit count to ensure hashes can be compared. Otherwise hashes will be recomputed.
|
||||
"bits": 32, // default is 32 if not defined
|
||||
//
|
||||
// number of seconds to cache an image hash
|
||||
"ttl": 60, // default is 60 if not defined
|
||||
//
|
||||
// "High Confidence" Threshold
|
||||
// If the difference in comparison is equal to or less than this number the images are considered the same and pixel comparison WILL NOT occur
|
||||
//
|
||||
// Defaults to the parent-level `threshold` value if not present
|
||||
//
|
||||
// Use null if you want pixel comparison to ALWAYS occur (softThreshold must be present)
|
||||
"hardThreshold": 5,
|
||||
//
|
||||
// "Low Confidence" Threshold -- only used if `pixel` is enabled
|
||||
// If the difference in comparison is:
|
||||
//
|
||||
// 1) equal to or less than this value and
|
||||
// 2) the value is greater than `hardThreshold`
|
||||
//
|
||||
// the images will be compared using the `pixel` method
|
||||
"softThreshold": 0,
|
||||
},
|
||||
//
|
||||
// And above ^^^
|
||||
//"pixel": {...}
|
||||
},
|
||||
//...
|
||||
```
|
||||
|
||||
## Pixel Comparison
|
||||
|
||||
This approach is as straight forward as it sounds. Both images are compared, pixel by pixel, to determine the difference between the two. ContextMod uses [pixelmatch](https://github.com/mapbox/pixelmatch) to do the comparison.
|
||||
|
||||
**Advantages**
|
||||
|
||||
* Extremely accurate, high-confidence on difference percentage
|
||||
* Strong when comparing text-based images or color-only differences
|
||||
|
||||
**Disadvantages**
|
||||
|
||||
* High memory requirements (10-30MB per comparison) and CPU intensive
|
||||
* Weak against similar images with different aspect ratios
|
||||
* Requires image transformations (resize, crop) before comparison
|
||||
* Can only store image-to-image results (no single image fingerprints)
|
||||
|
||||
**When should I use it?**
|
||||
|
||||
* Require very high accuracy in comparison results
|
||||
* Comparing mostly text-based images or subtle color/detail differences
|
||||
* As a secondary, high-confidence confirmation of comparison result after hashing
|
||||
|
||||
### How To Use
|
||||
|
||||
By default pixel comparisons **are not enabled.** They must be explicitly enabled in configuration.
|
||||
|
||||
Pixel comparisons will be performed in either of these scenarios:
|
||||
|
||||
* pixel is enabled, hashing is enabled and `hash.softThreshold` is defined
|
||||
* When a comparison occurs that is less different than `softThreshold` but more different then `hardThreshold` (or `"hardThreshold": null`), then pixel comparison will occur as a high-confidence check
|
||||
* Example
|
||||
* hash comparison => 7% difference
|
||||
* `"softThreshold": 10`
|
||||
* `"hardThreshold": 4`
|
||||
* `hash.enable` is `false` and `pixel.enable` is true
|
||||
* hashing is skipped entirely and only pixel comparisons are performed
|
||||
|
||||
To configure pixel comparisons refer to this code block:
|
||||
|
||||
```json5
|
||||
{
|
||||
"name": "ruleWithImageDetectionAndPixelEnabled",
|
||||
"kind": "recentActivity",
|
||||
"imageDetection": {
|
||||
//"hash": {...}
|
||||
"pixel": {
|
||||
// enable or disable pixel comparisons (disabled by default)
|
||||
"enable": true,
|
||||
// if the comparison difference percentage is equal to or less than this value the images are considered the same
|
||||
//
|
||||
// if not defined the value from imageDetection.threshold will be used
|
||||
"threshold": 5
|
||||
}
|
||||
},
|
||||
//...
|
||||
```
|
||||
29
heroku.Dockerfile
Normal file
29
heroku.Dockerfile
Normal file
@@ -0,0 +1,29 @@
|
||||
FROM node:16-alpine3.14
|
||||
|
||||
ENV TZ=Etc/GMT
|
||||
|
||||
# vips required to run sharp library for image comparison
|
||||
RUN echo "http://dl-4.alpinelinux.org/alpine/v3.14/community" >> /etc/apk/repositories \
|
||||
&& apk --update add vips
|
||||
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
||||
WORKDIR /usr/app
|
||||
|
||||
COPY package*.json ./
|
||||
COPY tsconfig.json .
|
||||
|
||||
RUN npm install
|
||||
|
||||
ADD . /usr/app
|
||||
|
||||
RUN npm run build
|
||||
|
||||
ENV NPM_CONFIG_LOGLEVEL debug
|
||||
|
||||
ARG log_dir=/home/node/logs
|
||||
RUN mkdir -p $log_dir
|
||||
VOLUME $log_dir
|
||||
ENV LOG_DIR=$log_dir
|
||||
|
||||
CMD [ "node", "src/index.js", "run", "all", "--port $PORT"]
|
||||
@@ -1,3 +1,4 @@
|
||||
build:
|
||||
docker:
|
||||
worker: Dockerfile
|
||||
web: heroku.Dockerfile
|
||||
worker: heroku.Dockerfile
|
||||
|
||||
4449
package-lock.json
generated
4449
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
19
package.json
19
package.json
@@ -7,7 +7,6 @@
|
||||
"test": "echo \"Error: no tests installed\" && exit 1",
|
||||
"build": "tsc",
|
||||
"start": "node src/index.js run",
|
||||
"guard": "ts-auto-guard src/JsonConfig.ts",
|
||||
"schema": "npm run -s schema-app & npm run -s schema-ruleset & npm run -s schema-rule & npm run -s schema-action & npm run -s schema-config",
|
||||
"schema-app": "typescript-json-schema tsconfig.json JSONConfig --out src/Schema/App.json --required --tsNodeRegister --refs",
|
||||
"schema-ruleset": "typescript-json-schema tsconfig.json RuleSetJson --out src/Schema/RuleSet.json --required --tsNodeRegister --refs",
|
||||
@@ -26,7 +25,10 @@
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@awaitjs/express": "^0.8.0",
|
||||
"@googleapis/youtube": "^2.0.0",
|
||||
"@stdlib/regexp-regexp": "^0.0.6",
|
||||
"ajv": "^7.2.4",
|
||||
"ansi-regex": ">=5.0.1",
|
||||
"async": "^3.2.0",
|
||||
"autolinker": "^3.14.3",
|
||||
"body-parser": "^1.19.0",
|
||||
@@ -49,30 +51,34 @@
|
||||
"got": "^11.8.2",
|
||||
"he": "^1.2.0",
|
||||
"http-proxy": "^1.18.1",
|
||||
"image-size": "^1.0.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"json5": "^2.2.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"leven": "^3.1.0",
|
||||
"lodash": "^4.17.21",
|
||||
"lru-cache": "^6.0.0",
|
||||
"monaco-editor": "^0.27.0",
|
||||
"mustache": "^4.2.0",
|
||||
"node-fetch": "^2.6.1",
|
||||
"normalize-url": "^6.1.0",
|
||||
"object-hash": "^2.2.0",
|
||||
"p-event": "^4.2.0",
|
||||
"p-map": "^4.0.0",
|
||||
"passport": "^0.4.1",
|
||||
"passport-custom": "^1.1.1",
|
||||
"passport-jwt": "^4.0.0",
|
||||
"pixelmatch": "^5.2.1",
|
||||
"pretty-print-json": "^1.0.3",
|
||||
"safe-stable-stringify": "^1.1.1",
|
||||
"snoostorm": "^1.5.2",
|
||||
"snoowrap": "^1.23.0",
|
||||
"socket.io": "^4.1.3",
|
||||
"string-similarity": "^4.0.4",
|
||||
"tcp-port-used": "^1.0.2",
|
||||
"triple-beam": "^1.3.0",
|
||||
"typescript": "^4.3.4",
|
||||
"webhook-discord": "^3.7.7",
|
||||
"winston": "FoxxMD/winston#fbab8de969ecee578981c77846156c7f43b5f01e",
|
||||
"winston": "github:FoxxMD/winston#fbab8de969ecee578981c77846156c7f43b5f01e",
|
||||
"winston-daily-rotate-file": "^4.5.5",
|
||||
"winston-duplex": "^0.1.1",
|
||||
"winston-transport": "^4.4.0",
|
||||
@@ -100,10 +106,15 @@
|
||||
"@types/object-hash": "^2.1.0",
|
||||
"@types/passport": "^1.0.7",
|
||||
"@types/passport-jwt": "^3.0.6",
|
||||
"@types/pixelmatch": "^5.2.4",
|
||||
"@types/sharp": "^0.29.2",
|
||||
"@types/string-similarity": "^4.0.0",
|
||||
"@types/tcp-port-used": "^1.0.0",
|
||||
"@types/triple-beam": "^1.3.2",
|
||||
"ts-auto-guard": "*",
|
||||
"ts-json-schema-generator": "^0.93.0",
|
||||
"typescript-json-schema": "^0.50.1"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"sharp": "^0.29.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,23 +2,33 @@ import {ActionJson, ActionConfig} from "./index";
|
||||
import Action from "./index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {ActionProcessResult} from "../Common/interfaces";
|
||||
|
||||
export class ApproveAction extends Action {
|
||||
getKind() {
|
||||
return 'Approve';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
//snoowrap typing issue, thinks comments can't be locked
|
||||
// @ts-ignore
|
||||
if (item.approved) {
|
||||
this.logger.warn('Item is already approved');
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Item is already approved'
|
||||
}
|
||||
}
|
||||
if (!dryRun) {
|
||||
// @ts-ignore
|
||||
await item.approve();
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import Action from "./index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {Footer} from "../Common/interfaces";
|
||||
import {ActionProcessResult, Footer} from "../Common/interfaces";
|
||||
|
||||
export class BanAction extends Action {
|
||||
|
||||
@@ -33,7 +33,7 @@ export class BanAction extends Action {
|
||||
return 'Ban';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = this.message === undefined ? undefined : await this.resources.getContent(this.message, item.subreddit);
|
||||
const renderedBody = content === undefined ? undefined : await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
@@ -58,6 +58,11 @@ export class BanAction extends Action {
|
||||
duration: this.duration
|
||||
});
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
result: `Banned ${item.author.name} ${durText}${this.reason !== undefined ? ` (${this.reason})` : ''}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,8 +2,9 @@ import Action, {ActionJson, ActionOptions} from "./index";
|
||||
import {Comment} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
|
||||
import {ActionProcessResult, Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {truncateStringToLength} from "../util";
|
||||
|
||||
export class CommentAction extends Action {
|
||||
content: string;
|
||||
@@ -32,7 +33,7 @@ export class CommentAction extends Action {
|
||||
return 'Comment';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = await this.resources.getContent(this.content, item.subreddit);
|
||||
const body = await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
@@ -44,7 +45,11 @@ export class CommentAction extends Action {
|
||||
|
||||
if(item.archived) {
|
||||
this.logger.warn('Cannot comment because Item is archived');
|
||||
return;
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Cannot comment because Item is archived'
|
||||
};
|
||||
}
|
||||
let reply: Comment;
|
||||
if(!dryRun) {
|
||||
@@ -62,6 +67,19 @@ export class CommentAction extends Action {
|
||||
// @ts-ignore
|
||||
await reply.distinguish({sticky: this.sticky});
|
||||
}
|
||||
let modifiers = [];
|
||||
if(this.distinguish) {
|
||||
modifiers.push('Distinguished');
|
||||
}
|
||||
if(this.sticky) {
|
||||
modifiers.push('Stickied');
|
||||
}
|
||||
const modifierStr = modifiers.length === 0 ? '' : `[${modifiers.join(' | ')}]`;
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
result: `${modifierStr}${this.lock ? ' - Locked Author\'s Activity - ' : ''}${truncateStringToLength(100)(body)}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,24 +2,34 @@ import {ActionJson, ActionConfig} from "./index";
|
||||
import Action from "./index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {ActionProcessResult} from "../Common/interfaces";
|
||||
|
||||
export class LockAction extends Action {
|
||||
getKind() {
|
||||
return 'Lock';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
//snoowrap typing issue, thinks comments can't be locked
|
||||
// @ts-ignore
|
||||
if (item.locked) {
|
||||
this.logger.warn('Item is already locked');
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Item is already locked'
|
||||
};
|
||||
}
|
||||
if (!dryRun) {
|
||||
//snoowrap typing issue, thinks comments can't be locked
|
||||
// @ts-ignore
|
||||
await item.lock();
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,9 +2,17 @@ import Action, {ActionJson, ActionOptions} from "./index";
|
||||
import {Comment, ComposeMessageParams} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
|
||||
import {ActionProcessResult, Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {boolToString} from "../util";
|
||||
import {
|
||||
asSubmission,
|
||||
boolToString,
|
||||
isSubmission,
|
||||
parseRedditEntity,
|
||||
REDDIT_ENTITY_REGEX_URL,
|
||||
truncateStringToLength
|
||||
} from "../util";
|
||||
import SimpleError from "../Utils/SimpleError";
|
||||
|
||||
export class MessageAction extends Action {
|
||||
content: string;
|
||||
@@ -14,6 +22,7 @@ export class MessageAction extends Action {
|
||||
footer?: false | string;
|
||||
|
||||
title?: string;
|
||||
to?: string;
|
||||
asSubreddit: boolean;
|
||||
|
||||
constructor(options: MessageActionOptions) {
|
||||
@@ -23,7 +32,9 @@ export class MessageAction extends Action {
|
||||
asSubreddit,
|
||||
title,
|
||||
footer,
|
||||
to,
|
||||
} = options;
|
||||
this.to = to;
|
||||
this.footer = footer;
|
||||
this.content = content;
|
||||
this.asSubreddit = asSubreddit;
|
||||
@@ -34,7 +45,7 @@ export class MessageAction extends Action {
|
||||
return 'Message';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = await this.resources.getContent(this.content);
|
||||
const body = await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
@@ -42,19 +53,38 @@ export class MessageAction extends Action {
|
||||
const footer = await this.resources.generateFooter(item, this.footer);
|
||||
|
||||
const renderedContent = `${body}${footer}`;
|
||||
// @ts-ignore
|
||||
const author = await item.author.fetch() as RedditUser;
|
||||
|
||||
let recipient = item.author.name;
|
||||
if(this.to !== undefined) {
|
||||
// parse to value
|
||||
try {
|
||||
const entityData = parseRedditEntity(this.to, 'user');
|
||||
if(entityData.type === 'user') {
|
||||
recipient = entityData.name;
|
||||
} else {
|
||||
recipient = `/r/${entityData.name}`;
|
||||
}
|
||||
} catch (err: any) {
|
||||
this.logger.error(`'to' field for message was not in a valid format. See ${REDDIT_ENTITY_REGEX_URL} for valid examples`);
|
||||
this.logger.error(err);
|
||||
err.logged = true;
|
||||
throw err;
|
||||
}
|
||||
if(recipient.includes('/r/') && this.asSubreddit) {
|
||||
throw new SimpleError(`Cannot send a message as a subreddit to another subreddit. Requested recipient: ${recipient}`);
|
||||
}
|
||||
}
|
||||
|
||||
const msgOpts: ComposeMessageParams = {
|
||||
to: author,
|
||||
to: recipient,
|
||||
text: renderedContent,
|
||||
// @ts-ignore
|
||||
fromSubreddit: this.asSubreddit ? await item.subreddit.fetch() : undefined,
|
||||
subject: this.title || `Concerning your ${item instanceof Submission ? 'Submission' : 'Comment'}`,
|
||||
subject: this.title || `Concerning your ${isSubmission(item) ? 'Submission' : 'Comment'}`,
|
||||
};
|
||||
|
||||
const msgPreview = `\r\n
|
||||
TO: ${author.name}\r\n
|
||||
TO: ${recipient}\r\n
|
||||
Subject: ${msgOpts.subject}\r\n
|
||||
Sent As Modmail: ${boolToString(this.asSubreddit)}\r\n\r\n
|
||||
${renderedContent}`;
|
||||
@@ -64,6 +94,11 @@ export class MessageAction extends Action {
|
||||
if (!dryRun) {
|
||||
await this.client.composeMessage(msgOpts);
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
result: truncateStringToLength(200)(msgPreview)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -73,6 +108,24 @@ export interface MessageActionConfig extends RequiredRichContent, Footer {
|
||||
* */
|
||||
asSubreddit: boolean
|
||||
|
||||
/**
|
||||
* Entity to send message to.
|
||||
*
|
||||
* If not present Message be will sent to the Author of the Activity being checked.
|
||||
*
|
||||
* Valid formats:
|
||||
*
|
||||
* * `aUserName` -- send to /u/aUserName
|
||||
* * `u/aUserName` -- send to /u/aUserName
|
||||
* * `r/aSubreddit` -- sent to modmail of /r/aSubreddit
|
||||
*
|
||||
* **Note:** Reddit does not support sending a message AS a subreddit TO another subreddit
|
||||
*
|
||||
* @pattern ^\s*(\/[ru]\/|[ru]\/)*(\w+)*\s*$
|
||||
* @examples ["aUserName","u/aUserName","r/aSubreddit"]
|
||||
* */
|
||||
to?: string
|
||||
|
||||
/**
|
||||
* The title of the message
|
||||
*
|
||||
|
||||
@@ -3,24 +3,33 @@ import Action from "./index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {activityIsRemoved} from "../Utils/SnoowrapUtils";
|
||||
import {ActionProcessResult} from "../Common/interfaces";
|
||||
|
||||
export class RemoveAction extends Action {
|
||||
getKind() {
|
||||
return 'Remove';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
// issue with snoowrap typings, doesn't think prop exists on Submission
|
||||
// @ts-ignore
|
||||
if (activityIsRemoved(item)) {
|
||||
this.logger.warn('Item is already removed');
|
||||
return;
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Item is already removed',
|
||||
}
|
||||
}
|
||||
if (!dryRun) {
|
||||
// @ts-ignore
|
||||
await item.remove();
|
||||
}
|
||||
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {truncateStringToLength} from "../util";
|
||||
import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {RichContent} from "../Common/interfaces";
|
||||
import {ActionProcessResult, RichContent} from "../Common/interfaces";
|
||||
|
||||
// https://www.reddit.com/dev/api/oauth#POST_api_report
|
||||
// denotes 100 characters maximum
|
||||
@@ -23,7 +23,7 @@ export class ReportAction extends Action {
|
||||
return 'Report';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = await this.resources.getContent(this.content, item.subreddit);
|
||||
const renderedContent = await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
@@ -33,6 +33,12 @@ export class ReportAction extends Action {
|
||||
// @ts-ignore
|
||||
await item.report({reason: truncatedContent});
|
||||
}
|
||||
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
result: truncatedContent
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import {SubmissionActionConfig} from "./index";
|
||||
import Action, {ActionJson, ActionOptions} from "../index";
|
||||
import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {RuleResult} from "../../Rule";
|
||||
import {ActionProcessResult} from "../../Common/interfaces";
|
||||
|
||||
export class FlairAction extends Action {
|
||||
text: string;
|
||||
@@ -20,7 +21,17 @@ export class FlairAction extends Action {
|
||||
return 'Flair';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
let flairParts = [];
|
||||
if(this.text !== '') {
|
||||
flairParts.push(`Text: ${this.text}`);
|
||||
}
|
||||
if(this.css !== '') {
|
||||
flairParts.push(`CSS: ${this.css}`);
|
||||
}
|
||||
const flairSummary = flairParts.length === 0 ? 'No flair (unflaired)' : flairParts.join(' | ');
|
||||
this.logger.verbose(flairSummary);
|
||||
if (item instanceof Submission) {
|
||||
if(!this.dryRun) {
|
||||
// @ts-ignore
|
||||
@@ -28,6 +39,16 @@ export class FlairAction extends Action {
|
||||
}
|
||||
} else {
|
||||
this.logger.warn('Cannot flair Comment');
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: 'Cannot flair Comment',
|
||||
}
|
||||
}
|
||||
return {
|
||||
dryRun,
|
||||
success: true,
|
||||
result: flairSummary
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import {renderContent} from "../Utils/SnoowrapUtils";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {UserNote, UserNoteJson} from "../Subreddit/UserNotes";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {ActionProcessResult} from "../Common/interfaces";
|
||||
|
||||
|
||||
export class UserNoteAction extends Action {
|
||||
@@ -24,7 +25,7 @@ export class UserNoteAction extends Action {
|
||||
return 'User Note';
|
||||
}
|
||||
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
const content = await this.resources.getContent(this.content, item.subreddit);
|
||||
const renderedContent = await renderContent(content, item, ruleResults, this.resources.userNotes);
|
||||
@@ -35,7 +36,11 @@ export class UserNoteAction extends Action {
|
||||
const existingNote = notes.find((x) => x.link.includes(item.id));
|
||||
if (existingNote) {
|
||||
this.logger.info(`Will not add note because one already exists for this Activity (${existingNote.time.local().format()}) and allowDuplicate=false`);
|
||||
return;
|
||||
return {
|
||||
dryRun,
|
||||
success: false,
|
||||
result: `Will not add note because one already exists for this Activity (${existingNote.time.local().format()}) and allowDuplicate=false`
|
||||
};
|
||||
}
|
||||
}
|
||||
if (!dryRun) {
|
||||
@@ -43,6 +48,11 @@ export class UserNoteAction extends Action {
|
||||
} else if (!await this.resources.userNotes.warningExists(this.type)) {
|
||||
this.logger.warn(`UserNote type '${this.type}' does not exist. If you meant to use this please add it through Toolbox first.`);
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
dryRun,
|
||||
result: `(${this.type}) ${renderedContent}`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,9 +2,10 @@ import Snoowrap, {Comment, Submission} from "snoowrap";
|
||||
import {Logger} from "winston";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import {ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
|
||||
import {ActionProcessResult, ActionResult, ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
|
||||
import Author, {AuthorOptions} from "../Author/Author";
|
||||
import {mergeArr} from "../util";
|
||||
import LoggedError from "../Utils/LoggedError";
|
||||
|
||||
export abstract class Action {
|
||||
name?: string;
|
||||
@@ -53,47 +54,61 @@ export abstract class Action {
|
||||
return this.name === this.getKind() ? this.getKind() : `${this.getKind()} - ${this.name}`;
|
||||
}
|
||||
|
||||
async handle(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
|
||||
async handle(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionResult> {
|
||||
const dryRun = runtimeDryrun || this.dryRun;
|
||||
let actionRun = false;
|
||||
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
|
||||
if (!itemPass) {
|
||||
this.logger.verbose(`Activity did not pass 'itemIs' test, Action not run`);
|
||||
return;
|
||||
}
|
||||
const authorRun = async () => {
|
||||
|
||||
let actRes: ActionResult = {
|
||||
kind: this.getKind(),
|
||||
name: this.getActionUniqueName(),
|
||||
run: false,
|
||||
dryRun,
|
||||
success: false,
|
||||
};
|
||||
try {
|
||||
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
|
||||
if (!itemPass) {
|
||||
this.logger.verbose(`Activity did not pass 'itemIs' test, Action not run`);
|
||||
actRes.runReason = `Activity did not pass 'itemIs' test, Action not run`;
|
||||
return actRes;
|
||||
}
|
||||
if (this.authorIs.include !== undefined && this.authorIs.include.length > 0) {
|
||||
for (const auth of this.authorIs.include) {
|
||||
if (await this.resources.testAuthorCriteria(item, auth)) {
|
||||
await this.process(item, ruleResults, runtimeDryrun);
|
||||
return true;
|
||||
actRes.run = true;
|
||||
const results = await this.process(item, ruleResults, runtimeDryrun);
|
||||
return {...actRes, ...results};
|
||||
}
|
||||
}
|
||||
this.logger.verbose('Inclusive author criteria not matched, Action not run');
|
||||
return false;
|
||||
}
|
||||
if (!actionRun && this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
|
||||
actRes.runReason = 'Inclusive author criteria not matched';
|
||||
return actRes;
|
||||
} else if (this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
|
||||
for (const auth of this.authorIs.exclude) {
|
||||
if (await this.resources.testAuthorCriteria(item, auth, false)) {
|
||||
await this.process(item, ruleResults, runtimeDryrun);
|
||||
return true;
|
||||
actRes.run = true;
|
||||
const results = await this.process(item, ruleResults, runtimeDryrun);
|
||||
return {...actRes, ...results};
|
||||
}
|
||||
}
|
||||
this.logger.verbose('Exclusive author criteria not matched, Action not run');
|
||||
return false;
|
||||
actRes.runReason = 'Exclusive author criteria not matched';
|
||||
return actRes;
|
||||
}
|
||||
return null;
|
||||
|
||||
actRes.run = true;
|
||||
const results = await this.process(item, ruleResults, runtimeDryrun);
|
||||
return {...actRes, ...results};
|
||||
} catch (err: any) {
|
||||
if(!(err instanceof LoggedError)) {
|
||||
this.logger.error(`Encountered error while running`, err);
|
||||
}
|
||||
actRes.success = false;
|
||||
actRes.result = err.message;
|
||||
return actRes;
|
||||
}
|
||||
const authorRunResults = await authorRun();
|
||||
if (null === authorRunResults) {
|
||||
await this.process(item, ruleResults, runtimeDryrun);
|
||||
} else if (!authorRunResults) {
|
||||
return;
|
||||
}
|
||||
this.logger.verbose(`${dryRun ? 'DRYRUN - ' : ''}Done`);
|
||||
}
|
||||
|
||||
abstract process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryun?: boolean): Promise<void>;
|
||||
abstract process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryun?: boolean): Promise<ActionProcessResult>;
|
||||
}
|
||||
|
||||
export interface ActionOptions extends ActionConfig {
|
||||
|
||||
12
src/App.ts
12
src/App.ts
@@ -3,8 +3,8 @@ import dayjs, {Dayjs} from "dayjs";
|
||||
import {getLogger} from "./Utils/loggerFactory";
|
||||
import {Invokee, OperatorConfig} from "./Common/interfaces";
|
||||
import Bot from "./Bot";
|
||||
import {castArray} from "lodash";
|
||||
import LoggedError from "./Utils/LoggedError";
|
||||
import {sleep} from "./util";
|
||||
|
||||
export class App {
|
||||
|
||||
@@ -53,8 +53,11 @@ export class App {
|
||||
}
|
||||
|
||||
async onTerminate(reason = 'The application was shutdown') {
|
||||
for(const m of this.bots) {
|
||||
//await m.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
|
||||
for(const b of this.bots) {
|
||||
for(const m of b.subManagers) {
|
||||
await m.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
|
||||
}
|
||||
//await b.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -64,13 +67,14 @@ export class App {
|
||||
try {
|
||||
await b.testClient();
|
||||
await b.buildManagers();
|
||||
await sleep(2000);
|
||||
b.runManagers(causedBy).catch((err) => {
|
||||
this.logger.error(`Unexpected error occurred while running Bot ${b.botName}. Bot must be re-built to restart`);
|
||||
if (!err.logged || !(err instanceof LoggedError)) {
|
||||
this.logger.error(err);
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
if (b.error === undefined) {
|
||||
b.error = err.message;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import {DurationComparor, UserNoteCriteria} from "../Rule";
|
||||
import {CompareValue, CompareValueOrPercent} from "../Common/interfaces";
|
||||
import {UserNoteCriteria} from "../Rule";
|
||||
import {CompareValue, CompareValueOrPercent, DurationComparor} from "../Common/interfaces";
|
||||
import {parseStringToRegex} from "../util";
|
||||
|
||||
/**
|
||||
* If present then these Author criteria are checked before running the rule. If criteria fails then the rule is skipped.
|
||||
@@ -99,6 +100,24 @@ export interface AuthorCriteria {
|
||||
* Does Author's account have a verified email?
|
||||
* */
|
||||
verified?: boolean
|
||||
|
||||
/**
|
||||
* Is the author shadowbanned?
|
||||
*
|
||||
* This is determined by trying to retrieve the author's profile. If a 404 is returned it is likely they are shadowbanned
|
||||
* */
|
||||
shadowBanned?: boolean
|
||||
|
||||
/**
|
||||
* An (array of) string/regular expression to test contents of an Author's profile description against
|
||||
*
|
||||
* If no flags are specified then the **insensitive** flag is used by default
|
||||
*
|
||||
* If using an array then if **any** value in the array passes the description test passes
|
||||
*
|
||||
* @examples [["/test$/i", "look for this string literal"]]
|
||||
* */
|
||||
description?: string | string[]
|
||||
}
|
||||
|
||||
export class Author implements AuthorCriteria {
|
||||
@@ -112,6 +131,8 @@ export class Author implements AuthorCriteria {
|
||||
linkKarma?: string;
|
||||
totalKarma?: string;
|
||||
verified?: boolean;
|
||||
shadowBanned?: boolean;
|
||||
description?: string[];
|
||||
|
||||
constructor(options: AuthorCriteria) {
|
||||
this.name = options.name;
|
||||
@@ -123,6 +144,8 @@ export class Author implements AuthorCriteria {
|
||||
this.commentKarma = options.commentKarma;
|
||||
this.linkKarma = options.linkKarma;
|
||||
this.totalKarma = options.totalKarma;
|
||||
this.shadowBanned = options.shadowBanned;
|
||||
this.description = options.description === undefined ? undefined : Array.isArray(options.description) ? options.description : [options.description];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
451
src/Bot/index.ts
451
src/Bot/index.ts
@@ -1,9 +1,9 @@
|
||||
import Snoowrap, {Subreddit} from "snoowrap";
|
||||
import Snoowrap, {Comment, Submission, Subreddit} from "snoowrap";
|
||||
import {Logger} from "winston";
|
||||
import dayjs, {Dayjs} from "dayjs";
|
||||
import {Duration} from "dayjs/plugin/duration";
|
||||
import EventEmitter from "events";
|
||||
import {BotInstanceConfig, Invokee, PAUSED, RUNNING, SYSTEM} from "../Common/interfaces";
|
||||
import {BotInstanceConfig, Invokee, PAUSED, RUNNING, STOPPED, SYSTEM, USER} from "../Common/interfaces";
|
||||
import {
|
||||
createRetryHandler,
|
||||
formatNumber,
|
||||
@@ -15,16 +15,18 @@ import {
|
||||
snooLogWrapper
|
||||
} from "../util";
|
||||
import {Manager} from "../Subreddit/Manager";
|
||||
import {ProxiedSnoowrap} from "../Utils/SnoowrapClients";
|
||||
import {ExtendedSnoowrap, ProxiedSnoowrap} from "../Utils/SnoowrapClients";
|
||||
import {ModQueueStream, UnmoderatedStream} from "../Subreddit/Streams";
|
||||
import {BotResourcesManager} from "../Subreddit/SubredditResources";
|
||||
import LoggedError from "../Utils/LoggedError";
|
||||
import pEvent from "p-event";
|
||||
import SimpleError from "../Utils/SimpleError";
|
||||
import {isRateLimitError} from "../Utils/Errors";
|
||||
|
||||
|
||||
class Bot {
|
||||
|
||||
client!: Snoowrap;
|
||||
client!: ExtendedSnoowrap;
|
||||
logger!: Logger;
|
||||
wikiLocation: string;
|
||||
dryRun?: true | undefined;
|
||||
@@ -33,12 +35,16 @@ class Bot {
|
||||
excludeSubreddits: string[];
|
||||
subManagers: Manager[] = [];
|
||||
heartbeatInterval: number;
|
||||
nextHeartbeat?: Dayjs;
|
||||
nextHeartbeat: Dayjs = dayjs();
|
||||
heartBeating: boolean = false;
|
||||
|
||||
softLimit: number | string = 250;
|
||||
hardLimit: number | string = 50;
|
||||
nannyMode?: 'soft' | 'hard';
|
||||
nannyRunning: boolean = false;
|
||||
nextNannyCheck: Dayjs = dayjs().add(10, 'second');
|
||||
nannyRetryHandler: Function;
|
||||
managerRetryHandler: Function;
|
||||
nextExpiration: Dayjs = dayjs();
|
||||
botName?: string;
|
||||
botLink?: string;
|
||||
@@ -46,6 +52,9 @@ class Bot {
|
||||
maxWorkers: number;
|
||||
startedAt: Dayjs = dayjs();
|
||||
sharedModqueue: boolean = false;
|
||||
streamListedOnce: string[] = [];
|
||||
|
||||
stagger: number;
|
||||
|
||||
apiSample: number[] = [];
|
||||
apiRollingAvg: number = 0;
|
||||
@@ -77,10 +86,12 @@ class Bot {
|
||||
heartbeatInterval,
|
||||
},
|
||||
credentials: {
|
||||
clientId,
|
||||
clientSecret,
|
||||
refreshToken,
|
||||
accessToken,
|
||||
reddit: {
|
||||
clientId,
|
||||
clientSecret,
|
||||
refreshToken,
|
||||
accessToken,
|
||||
},
|
||||
},
|
||||
snoowrap: {
|
||||
proxy,
|
||||
@@ -88,6 +99,7 @@ class Bot {
|
||||
},
|
||||
polling: {
|
||||
sharedMod,
|
||||
stagger = 2000,
|
||||
},
|
||||
queue: {
|
||||
maxWorkers,
|
||||
@@ -163,15 +175,15 @@ class Bot {
|
||||
}
|
||||
|
||||
try {
|
||||
this.client = proxy === undefined ? new Snoowrap(creds) : new ProxiedSnoowrap({...creds, proxy});
|
||||
this.client = proxy === undefined ? new ExtendedSnoowrap(creds) : new ProxiedSnoowrap({...creds, proxy});
|
||||
this.client.config({
|
||||
warnings: true,
|
||||
maxRetryAttempts: 5,
|
||||
debug,
|
||||
logger: snooLogWrapper(this.logger.child({labels: ['Snoowrap']}, mergeArr)),
|
||||
continueAfterRatelimitError: true,
|
||||
continueAfterRatelimitError: false,
|
||||
});
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
if(this.error === undefined) {
|
||||
this.error = err.message;
|
||||
this.logger.error(err);
|
||||
@@ -179,6 +191,10 @@ class Bot {
|
||||
}
|
||||
|
||||
const retryHandler = createRetryHandler({maxRequestRetry: 8, maxOtherRetry: 1}, this.logger);
|
||||
this.nannyRetryHandler = createRetryHandler({maxRequestRetry: 5, maxOtherRetry: 1}, this.logger);
|
||||
this.managerRetryHandler = createRetryHandler({maxRequestRetry: 5, maxOtherRetry: 10, waitOnRetry: false, clearRetryCountAfter: 2}, this.logger);
|
||||
|
||||
this.stagger = stagger ?? 2000;
|
||||
|
||||
const modStreamErrorListener = (name: string) => async (err: any) => {
|
||||
this.logger.error('Polling error occurred', err);
|
||||
@@ -195,12 +211,32 @@ class Bot {
|
||||
}
|
||||
}
|
||||
|
||||
const defaultUnmoderatedStream = new UnmoderatedStream(this.client, {subreddit: 'mod'});
|
||||
const modStreamListingListener = (name: string) => async (listing: (Comment|Submission)[]) => {
|
||||
// dole out in order they were received
|
||||
if(!this.streamListedOnce.includes(name)) {
|
||||
this.streamListedOnce.push(name);
|
||||
return;
|
||||
}
|
||||
for(const i of listing) {
|
||||
const foundManager = this.subManagers.find(x => x.subreddit.display_name === i.subreddit.display_name && x.modStreamCallbacks.get(name) !== undefined);
|
||||
if(foundManager !== undefined) {
|
||||
foundManager.modStreamCallbacks.get(name)(i);
|
||||
if(stagger !== undefined) {
|
||||
await sleep(stagger);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const defaultUnmoderatedStream = new UnmoderatedStream(this.client, {subreddit: 'mod', limit: 100, clearProcessed: { size: 100, retain: 100 }});
|
||||
// @ts-ignore
|
||||
defaultUnmoderatedStream.on('error', modStreamErrorListener('unmoderated'));
|
||||
const defaultModqueueStream = new ModQueueStream(this.client, {subreddit: 'mod'});
|
||||
defaultUnmoderatedStream.on('listing', modStreamListingListener('unmoderated'));
|
||||
const defaultModqueueStream = new ModQueueStream(this.client, {subreddit: 'mod', limit: 100, clearProcessed: { size: 100, retain: 100 }});
|
||||
// @ts-ignore
|
||||
defaultModqueueStream.on('error', modStreamErrorListener('modqueue'));
|
||||
defaultModqueueStream.on('listing', modStreamListingListener('modqueue'));
|
||||
this.cacheManager.modStreams.set('unmoderated', defaultUnmoderatedStream);
|
||||
this.cacheManager.modStreams.set('modqueue', defaultModqueueStream);
|
||||
|
||||
@@ -233,19 +269,23 @@ class Bot {
|
||||
}
|
||||
}
|
||||
|
||||
async testClient() {
|
||||
async testClient(initial = true) {
|
||||
try {
|
||||
// @ts-ignore
|
||||
await this.client.getMe();
|
||||
this.logger.info('Test API call successful');
|
||||
} catch (err) {
|
||||
this.logger.error('An error occurred while trying to initialize the Reddit API Client which would prevent the entire application from running.');
|
||||
if(err.name === 'StatusCodeError') {
|
||||
} catch (err: any) {
|
||||
if (initial) {
|
||||
this.logger.error('An error occurred while trying to initialize the Reddit API Client which would prevent the entire application from running.');
|
||||
}
|
||||
if (err.name === 'StatusCodeError') {
|
||||
const authHeader = err.response.headers['www-authenticate'];
|
||||
if (authHeader !== undefined && authHeader.includes('insufficient_scope')) {
|
||||
this.logger.error('Reddit responded with a 403 insufficient_scope. Please ensure you have chosen the correct scopes when authorizing your account.');
|
||||
} else if(err.statusCode === 401) {
|
||||
} else if (err.statusCode === 401) {
|
||||
this.logger.error('It is likely a credential is missing or incorrect. Check clientId, clientSecret, refreshToken, and accessToken');
|
||||
} else if(err.statusCode === 400) {
|
||||
this.logger.error('Credentials may have been invalidated due to prior behavior. The error message may contain more information.');
|
||||
}
|
||||
this.logger.error(`Error Message: ${err.message}`);
|
||||
} else {
|
||||
@@ -272,10 +312,12 @@ class Bot {
|
||||
}
|
||||
this.logger.info(`Bot Name${botNameFromConfig ? ' (from config)' : ''}: ${this.botName}`);
|
||||
|
||||
for (const sub of await this.client.getModeratedSubreddits()) {
|
||||
// TODO don't know a way to check permissions yet
|
||||
availSubs.push(sub);
|
||||
let subListing = await this.client.getModeratedSubreddits({count: 100});
|
||||
while(!subListing.isFinished) {
|
||||
subListing = await subListing.fetchMore({amount: 100});
|
||||
}
|
||||
availSubs = subListing;
|
||||
|
||||
this.logger.info(`u/${user.name} is a moderator of these subreddits: ${availSubs.map(x => x.display_name_prefixed).join(', ')}`);
|
||||
|
||||
let subsToRun: Subreddit[] = [];
|
||||
@@ -298,8 +340,8 @@ class Bot {
|
||||
const normalExcludes = this.excludeSubreddits.map(x => x.toLowerCase());
|
||||
subsToRun = availSubs.filter(x => !normalExcludes.includes(x.display_name.toLowerCase()));
|
||||
} else {
|
||||
this.logger.info('No user-defined subreddit constraints detected, will run on all moderated subreddits');
|
||||
subsToRun = availSubs;
|
||||
this.logger.info(`No user-defined subreddit constraints detected, will run on all moderated subreddits EXCEPT own profile (${this.botAccount})`);
|
||||
subsToRun = availSubs.filter(x => x.display_name_prefixed !== this.botAccount);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -309,20 +351,31 @@ class Bot {
|
||||
const manager = new Manager(sub, this.client, this.logger, this.cacheManager, {dryRun: this.dryRun, sharedModqueue: this.sharedModqueue, wikiLocation: this.wikiLocation, botName: this.botName, maxWorkers: this.maxWorkers});
|
||||
try {
|
||||
await manager.parseConfiguration('system', true, {suppressNotification: true});
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
if (!(err instanceof LoggedError)) {
|
||||
this.logger.error(`Config was not valid:`, {subreddit: sub.display_name_prefixed});
|
||||
this.logger.error(err, {subreddit: sub.display_name_prefixed});
|
||||
}
|
||||
}
|
||||
// all errors from managers will count towards bot-level retry count
|
||||
manager.on('error', async (err) => await this.panicOnRetries(err));
|
||||
subSchedule.push(manager);
|
||||
}
|
||||
this.subManagers = subSchedule;
|
||||
}
|
||||
|
||||
// if the cumulative errors exceeds configured threshold then stop ALL managers as there is most likely something very bad happening
|
||||
async panicOnRetries(err: any) {
|
||||
if(!this.managerRetryHandler(err)) {
|
||||
for(const m of this.subManagers) {
|
||||
await m.stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async destroy(causedBy: Invokee) {
|
||||
this.logger.info('Stopping heartbeat and nanny processes, may take up to 5 seconds...');
|
||||
const processWait = Promise.all([pEvent(this.emitter, 'heartbeatStopped'), pEvent(this.emitter, 'nannyStopped')]);
|
||||
const processWait = pEvent(this.emitter, 'healthStopped');
|
||||
this.running = false;
|
||||
await processWait;
|
||||
for (const manager of this.subManagers) {
|
||||
@@ -333,7 +386,7 @@ class Bot {
|
||||
|
||||
async runModStreams(notify = false) {
|
||||
for(const [k,v] of this.cacheManager.modStreams) {
|
||||
if(!v.running && v.listeners('item').length > 0) {
|
||||
if(!v.running && this.subManagers.some(x => x.modStreamCallbacks.get(k) !== undefined)) {
|
||||
v.startInterval();
|
||||
this.logger.info(`Starting default ${k.toUpperCase()} mod stream`);
|
||||
if(notify) {
|
||||
@@ -343,6 +396,7 @@ class Bot {
|
||||
}
|
||||
}
|
||||
}
|
||||
await sleep(2000);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -355,150 +409,239 @@ class Bot {
|
||||
for (const manager of this.subManagers) {
|
||||
if (manager.validConfigLoaded && manager.botState.state !== RUNNING) {
|
||||
await manager.start(causedBy, {reason: 'Caused by application startup'});
|
||||
await sleep(this.stagger);
|
||||
}
|
||||
}
|
||||
|
||||
await this.runModStreams();
|
||||
|
||||
this.running = true;
|
||||
this.runApiNanny();
|
||||
this.nextNannyCheck = dayjs().add(10, 'second');
|
||||
this.nextHeartbeat = dayjs().add(this.heartbeatInterval, 'second');
|
||||
await this.healthLoop();
|
||||
}
|
||||
|
||||
async healthLoop() {
|
||||
while (this.running) {
|
||||
await sleep(5000);
|
||||
if (!this.running) {
|
||||
break;
|
||||
}
|
||||
if (dayjs().isSameOrAfter(this.nextNannyCheck)) {
|
||||
try {
|
||||
await this.runApiNanny();
|
||||
this.nextNannyCheck = dayjs().add(10, 'second');
|
||||
} catch (err: any) {
|
||||
this.logger.info('Delaying next nanny check for 2 minutes due to emitted error');
|
||||
this.nextNannyCheck = dayjs().add(120, 'second');
|
||||
}
|
||||
}
|
||||
if(dayjs().isSameOrAfter(this.nextHeartbeat)) {
|
||||
try {
|
||||
await this.heartbeat();
|
||||
} catch (err: any) {
|
||||
this.logger.error(`Error occurred during heartbeat check: ${err.message}`);
|
||||
}
|
||||
this.nextHeartbeat = dayjs().add(this.heartbeatInterval, 'second');
|
||||
}
|
||||
}
|
||||
this.emitter.emit('healthStopped');
|
||||
}
|
||||
|
||||
async heartbeat() {
|
||||
const heartbeat = `HEARTBEAT -- API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ~${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion === undefined ? 'N/A' : this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`
|
||||
this.logger.info(heartbeat);
|
||||
|
||||
// run sanity check to see if there is a service issue
|
||||
try {
|
||||
await this.testClient(false);
|
||||
} catch (err: any) {
|
||||
throw new SimpleError(`Something isn't right! This could be a Reddit API issue (service is down? buggy??) or an issue with the Bot account. Will not run heartbeat operations and will wait until next heartbeat (${dayjs.duration(this.nextHeartbeat.diff(dayjs())).humanize()}) to try again`);
|
||||
}
|
||||
let startedAny = false;
|
||||
|
||||
for (const s of this.subManagers) {
|
||||
if(s.botState.state === STOPPED && s.botState.causedBy === USER) {
|
||||
this.logger.debug('Skipping config check/restart on heartbeat due to previously being stopped by user', {subreddit: s.displayLabel});
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
// ensure calls to wiki page are also staggered so we aren't hitting api hard when bot has a ton of subreddits to check
|
||||
await sleep(this.stagger);
|
||||
const newConfig = await s.parseConfiguration();
|
||||
const willStart = newConfig || (s.queueState.state !== RUNNING && s.queueState.causedBy === SYSTEM) || (s.eventsState.state !== RUNNING && s.eventsState.causedBy === SYSTEM);
|
||||
if(willStart) {
|
||||
// stagger restart
|
||||
if (startedAny) {
|
||||
await sleep(this.stagger);
|
||||
}
|
||||
startedAny = true;
|
||||
if(newConfig || (s.queueState.state !== RUNNING && s.queueState.causedBy === SYSTEM))
|
||||
{
|
||||
await s.startQueue('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running queue'});
|
||||
}
|
||||
if(newConfig || (s.eventsState.state !== RUNNING && s.eventsState.causedBy === SYSTEM))
|
||||
{
|
||||
await s.startEvents('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running events'});
|
||||
}
|
||||
}
|
||||
if(s.botState.state !== RUNNING && s.eventsState.state === RUNNING && s.queueState.state === RUNNING) {
|
||||
s.botState = {
|
||||
state: RUNNING,
|
||||
causedBy: 'system',
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
this.logger.info('Stopping event polling to prevent activity processing queue from backing up. Will be restarted when config update succeeds.')
|
||||
await s.stopEvents('system', {reason: 'Invalid config will cause events to pile up in queue. Will be restarted when config update succeeds (next heartbeat).'});
|
||||
if(!(err instanceof LoggedError)) {
|
||||
this.logger.error(err, {subreddit: s.displayLabel});
|
||||
}
|
||||
if(this.nextHeartbeat !== undefined) {
|
||||
this.logger.info(`Will retry parsing config on next heartbeat (in ${dayjs.duration(this.nextHeartbeat.diff(dayjs())).humanize()})`, {subreddit: s.displayLabel});
|
||||
}
|
||||
}
|
||||
}
|
||||
await this.runModStreams(true);
|
||||
}
|
||||
|
||||
async runApiNanny() {
|
||||
try {
|
||||
mainLoop:
|
||||
while (this.running) {
|
||||
for(let i = 0; i < 2; i++) {
|
||||
await sleep(5000);
|
||||
if (!this.running) {
|
||||
break mainLoop;
|
||||
}
|
||||
}
|
||||
|
||||
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
|
||||
const nowish = dayjs().add(10, 'second');
|
||||
if (nowish.isAfter(this.nextExpiration)) {
|
||||
// it's possible no api calls are being made because of a hard limit
|
||||
// need to make an api call to update this
|
||||
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
|
||||
const nowish = dayjs().add(10, 'second');
|
||||
if (nowish.isAfter(this.nextExpiration)) {
|
||||
// it's possible no api calls are being made because of a hard limit
|
||||
// need to make an api call to update this
|
||||
let shouldRetry = true;
|
||||
while (shouldRetry) {
|
||||
try {
|
||||
// @ts-ignore
|
||||
await this.client.getMe();
|
||||
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
|
||||
}
|
||||
const rollingSample = this.apiSample.slice(0, 7)
|
||||
rollingSample.unshift(this.client.ratelimitRemaining);
|
||||
this.apiSample = rollingSample;
|
||||
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
|
||||
if (this.apiSample[index + 1] !== undefined) {
|
||||
const d = Math.abs(curr - this.apiSample[index + 1]);
|
||||
if (d === 0) {
|
||||
return [...acc, 0];
|
||||
}
|
||||
return [...acc, d / 10];
|
||||
shouldRetry = false;
|
||||
} catch (err: any) {
|
||||
if(isRateLimitError(err)) {
|
||||
throw err;
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
this.apiRollingAvg = diff.reduce((acc, curr) => acc + curr, 0) / diff.length; // api requests per second
|
||||
this.depletedInSecs = this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
|
||||
this.apiEstDepletion = dayjs.duration({seconds: this.depletedInSecs});
|
||||
this.logger.debug(`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`);
|
||||
|
||||
|
||||
let hardLimitHit = false;
|
||||
if (typeof this.hardLimit === 'string') {
|
||||
const hardDur = parseDuration(this.hardLimit);
|
||||
hardLimitHit = hardDur.asSeconds() > this.apiEstDepletion.asSeconds();
|
||||
} else {
|
||||
hardLimitHit = this.hardLimit > this.client.ratelimitRemaining;
|
||||
}
|
||||
|
||||
if (hardLimitHit) {
|
||||
if (this.nannyMode === 'hard') {
|
||||
continue;
|
||||
shouldRetry = await this.nannyRetryHandler(err);
|
||||
if (!shouldRetry) {
|
||||
throw err;
|
||||
}
|
||||
this.logger.info(`Detected HARD LIMIT of ${this.hardLimit} remaining`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${this.apiRollingAvg}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`All subreddit event polling has been paused`, {leaf: 'Api Nanny'});
|
||||
|
||||
for (const m of this.subManagers) {
|
||||
m.pauseEvents('system');
|
||||
m.notificationManager.handle('runStateChanged', 'Hard Limit Triggered', `Hard Limit of ${this.hardLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit event polling has been paused.`, 'system', 'warn');
|
||||
}
|
||||
|
||||
this.nannyMode = 'hard';
|
||||
continue;
|
||||
}
|
||||
|
||||
let softLimitHit = false;
|
||||
if (typeof this.softLimit === 'string') {
|
||||
const softDur = parseDuration(this.softLimit);
|
||||
softLimitHit = softDur.asSeconds() > this.apiEstDepletion.asSeconds();
|
||||
} else {
|
||||
softLimitHit = this.softLimit > this.client.ratelimitRemaining;
|
||||
}
|
||||
|
||||
if (softLimitHit) {
|
||||
if (this.nannyMode === 'soft') {
|
||||
continue;
|
||||
}
|
||||
this.logger.info(`Detected SOFT LIMIT of ${this.softLimit} remaining`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
|
||||
this.logger.info('Trying to detect heavy usage subreddits...', {leaf: 'Api Nanny'});
|
||||
let threshold = 0.5;
|
||||
let offenders = this.subManagers.filter(x => {
|
||||
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
|
||||
return combinedPerSec > threshold;
|
||||
});
|
||||
if (offenders.length === 0) {
|
||||
threshold = 0.25;
|
||||
// reduce threshold
|
||||
offenders = this.subManagers.filter(x => {
|
||||
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
|
||||
return combinedPerSec > threshold;
|
||||
});
|
||||
}
|
||||
|
||||
if (offenders.length > 0) {
|
||||
this.logger.info(`Slowing subreddits using >- ${threshold}req/s:`, {leaf: 'Api Nanny'});
|
||||
for (const m of offenders) {
|
||||
m.delayBy = 1.5;
|
||||
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
|
||||
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
|
||||
}
|
||||
} else {
|
||||
this.logger.info(`Couldn't detect specific offenders, slowing all...`, {leaf: 'Api Nanny'});
|
||||
for (const m of this.subManagers) {
|
||||
m.delayBy = 1.5;
|
||||
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
|
||||
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
|
||||
}
|
||||
}
|
||||
this.nannyMode = 'soft';
|
||||
continue;
|
||||
}
|
||||
|
||||
if (this.nannyMode !== undefined) {
|
||||
this.logger.info('Turning off due to better conditions...', {leaf: 'Api Nanny'});
|
||||
for (const m of this.subManagers) {
|
||||
if (m.delayBy !== undefined) {
|
||||
m.delayBy = undefined;
|
||||
m.notificationManager.handle('runStateChanged', 'Normal Processing Resumed', 'Slow Mode has been turned off due to better API conditions', 'system');
|
||||
}
|
||||
if (m.queueState.state === PAUSED && m.queueState.causedBy === SYSTEM) {
|
||||
m.startQueue('system', {reason: 'API Nanny has been turned off due to better API conditions'});
|
||||
}
|
||||
if (m.eventsState.state === PAUSED && m.eventsState.causedBy === SYSTEM) {
|
||||
await m.startEvents('system', {reason: 'API Nanny has been turned off due to better API conditions'});
|
||||
}
|
||||
}
|
||||
this.nannyMode = undefined;
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error('Error occurred during nanny loop', err);
|
||||
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
|
||||
}
|
||||
const rollingSample = this.apiSample.slice(0, 7)
|
||||
rollingSample.unshift(this.client.ratelimitRemaining);
|
||||
this.apiSample = rollingSample;
|
||||
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
|
||||
if (this.apiSample[index + 1] !== undefined) {
|
||||
const d = Math.abs(curr - this.apiSample[index + 1]);
|
||||
if (d === 0) {
|
||||
return [...acc, 0];
|
||||
}
|
||||
return [...acc, d / 10];
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
this.apiRollingAvg = diff.reduce((acc, curr) => acc + curr, 0) / diff.length; // api requests per second
|
||||
this.depletedInSecs = this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
|
||||
this.apiEstDepletion = dayjs.duration({seconds: this.depletedInSecs});
|
||||
this.logger.debug(`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`);
|
||||
|
||||
|
||||
let hardLimitHit = false;
|
||||
if (typeof this.hardLimit === 'string') {
|
||||
const hardDur = parseDuration(this.hardLimit);
|
||||
hardLimitHit = hardDur.asSeconds() > this.apiEstDepletion.asSeconds();
|
||||
} else {
|
||||
hardLimitHit = this.hardLimit > this.client.ratelimitRemaining;
|
||||
}
|
||||
|
||||
if (hardLimitHit) {
|
||||
if (this.nannyMode === 'hard') {
|
||||
return;
|
||||
}
|
||||
this.logger.info(`Detected HARD LIMIT of ${this.hardLimit} remaining`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${this.apiRollingAvg}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`All subreddit event polling has been paused`, {leaf: 'Api Nanny'});
|
||||
|
||||
for (const m of this.subManagers) {
|
||||
m.pauseEvents('system');
|
||||
m.notificationManager.handle('runStateChanged', 'Hard Limit Triggered', `Hard Limit of ${this.hardLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit event polling has been paused.`, 'system', 'warn');
|
||||
}
|
||||
|
||||
this.nannyMode = 'hard';
|
||||
return;
|
||||
}
|
||||
|
||||
let softLimitHit = false;
|
||||
if (typeof this.softLimit === 'string') {
|
||||
const softDur = parseDuration(this.softLimit);
|
||||
softLimitHit = softDur.asSeconds() > this.apiEstDepletion.asSeconds();
|
||||
} else {
|
||||
softLimitHit = this.softLimit > this.client.ratelimitRemaining;
|
||||
}
|
||||
|
||||
if (softLimitHit) {
|
||||
if (this.nannyMode === 'soft') {
|
||||
return;
|
||||
}
|
||||
this.logger.info(`Detected SOFT LIMIT of ${this.softLimit} remaining`, {leaf: 'Api Nanny'});
|
||||
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
|
||||
this.logger.info('Trying to detect heavy usage subreddits...', {leaf: 'Api Nanny'});
|
||||
let threshold = 0.5;
|
||||
let offenders = this.subManagers.filter(x => {
|
||||
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
|
||||
return combinedPerSec > threshold;
|
||||
});
|
||||
if (offenders.length === 0) {
|
||||
threshold = 0.25;
|
||||
// reduce threshold
|
||||
offenders = this.subManagers.filter(x => {
|
||||
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
|
||||
return combinedPerSec > threshold;
|
||||
});
|
||||
}
|
||||
|
||||
if (offenders.length > 0) {
|
||||
this.logger.info(`Slowing subreddits using >- ${threshold}req/s:`, {leaf: 'Api Nanny'});
|
||||
for (const m of offenders) {
|
||||
m.delayBy = 1.5;
|
||||
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
|
||||
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
|
||||
}
|
||||
} else {
|
||||
this.logger.info(`Couldn't detect specific offenders, slowing all...`, {leaf: 'Api Nanny'});
|
||||
for (const m of this.subManagers) {
|
||||
m.delayBy = 1.5;
|
||||
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
|
||||
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
|
||||
}
|
||||
}
|
||||
this.nannyMode = 'soft';
|
||||
return
|
||||
}
|
||||
|
||||
if (this.nannyMode !== undefined) {
|
||||
this.logger.info('Turning off due to better conditions...', {leaf: 'Api Nanny'});
|
||||
for (const m of this.subManagers) {
|
||||
if (m.delayBy !== undefined) {
|
||||
m.delayBy = undefined;
|
||||
m.notificationManager.handle('runStateChanged', 'Normal Processing Resumed', 'Slow Mode has been turned off due to better API conditions', 'system');
|
||||
}
|
||||
if (m.queueState.state === PAUSED && m.queueState.causedBy === SYSTEM) {
|
||||
m.startQueue('system', {reason: 'API Nanny has been turned off due to better API conditions'});
|
||||
}
|
||||
if (m.eventsState.state === PAUSED && m.eventsState.causedBy === SYSTEM) {
|
||||
await m.startEvents('system', {reason: 'API Nanny has been turned off due to better API conditions'});
|
||||
}
|
||||
}
|
||||
this.nannyMode = undefined;
|
||||
}
|
||||
|
||||
} catch (err: any) {
|
||||
this.logger.error(`Error occurred during nanny loop: ${err.message}`);
|
||||
throw err;
|
||||
} finally {
|
||||
this.logger.info('Nanny stopped');
|
||||
this.emitter.emit('nannyStopped');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import {Check, CheckOptions, userResultCacheDefault, UserResultCacheOptions} from "./index";
|
||||
import {CommentState} from "../Common/interfaces";
|
||||
import {CommentState, UserResultCache} from "../Common/interfaces";
|
||||
import {Submission, Comment} from "snoowrap/dist/objects";
|
||||
import {RuleResult} from "../Rule";
|
||||
|
||||
export interface CommentCheckOptions extends CheckOptions {
|
||||
cacheUserResult?: UserResultCacheOptions;
|
||||
@@ -9,20 +10,12 @@ export interface CommentCheckOptions extends CheckOptions {
|
||||
export class CommentCheck extends Check {
|
||||
itemIs: CommentState[];
|
||||
|
||||
cacheUserResult: Required<UserResultCacheOptions>;
|
||||
|
||||
constructor(options: CommentCheckOptions) {
|
||||
super(options);
|
||||
const {
|
||||
itemIs = [],
|
||||
cacheUserResult = {},
|
||||
} = options;
|
||||
|
||||
this.cacheUserResult = {
|
||||
...userResultCacheDefault,
|
||||
...cacheUserResult
|
||||
}
|
||||
|
||||
this.itemIs = itemIs;
|
||||
this.logSummary();
|
||||
}
|
||||
@@ -31,7 +24,7 @@ export class CommentCheck extends Check {
|
||||
super.logSummary('comment');
|
||||
}
|
||||
|
||||
async getCacheResult(item: Submission | Comment): Promise<boolean | undefined> {
|
||||
async getCacheResult(item: Submission | Comment): Promise<UserResultCache | undefined> {
|
||||
if (this.cacheUserResult.enable) {
|
||||
return await this.resources.getCommentCheckCacheResult(item as Comment, {
|
||||
name: this.name,
|
||||
@@ -42,13 +35,22 @@ export class CommentCheck extends Check {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async setCacheResult(item: Submission | Comment, result: boolean): Promise<void> {
|
||||
async setCacheResult(item: Submission | Comment, result: UserResultCache): Promise<void> {
|
||||
if (this.cacheUserResult.enable) {
|
||||
const {result: outcome, ruleResults} = result;
|
||||
|
||||
const res: UserResultCache = {
|
||||
result: outcome,
|
||||
// don't need to cache rule results if check was not triggered
|
||||
// since we only use rule results for actions
|
||||
ruleResults: outcome ? ruleResults : []
|
||||
};
|
||||
|
||||
await this.resources.setCommentCheckCacheResult(item as Comment, {
|
||||
name: this.name,
|
||||
authorIs: this.authorIs,
|
||||
itemIs: this.itemIs
|
||||
}, result, this.cacheUserResult.ttl)
|
||||
}, res, this.cacheUserResult.ttl)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import {Check, CheckOptions} from "./index";
|
||||
import {SubmissionState} from "../Common/interfaces";
|
||||
import {SubmissionState, UserResultCache} from "../Common/interfaces";
|
||||
import {Submission, Comment} from "snoowrap/dist/objects";
|
||||
import {RuleResult} from "../Rule";
|
||||
|
||||
export class SubmissionCheck extends Check {
|
||||
itemIs: SubmissionState[];
|
||||
@@ -15,11 +16,4 @@ export class SubmissionCheck extends Check {
|
||||
logSummary() {
|
||||
super.logSummary('submission');
|
||||
}
|
||||
|
||||
async getCacheResult(item: Submission | Comment) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async setCacheResult(item: Submission | Comment, result: boolean) {
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,12 +16,13 @@ import {
|
||||
truncateStringToLength
|
||||
} from "../util";
|
||||
import {
|
||||
ActionResult,
|
||||
ChecksActivityState,
|
||||
CommentState,
|
||||
JoinCondition,
|
||||
JoinOperands,
|
||||
SubmissionState,
|
||||
TypedActivityStates
|
||||
TypedActivityStates, UserResultCache
|
||||
} from "../Common/interfaces";
|
||||
import * as RuleSchema from '../Schema/Rule.json';
|
||||
import * as RuleSetSchema from '../Schema/RuleSet.json';
|
||||
@@ -45,6 +46,7 @@ export abstract class Check implements ICheck {
|
||||
include: AuthorCriteria[],
|
||||
exclude: AuthorCriteria[]
|
||||
};
|
||||
cacheUserResult: Required<UserResultCacheOptions>;
|
||||
dryRun?: boolean;
|
||||
notifyOnTrigger: boolean;
|
||||
resources: SubredditResources;
|
||||
@@ -62,6 +64,7 @@ export abstract class Check implements ICheck {
|
||||
actions = [],
|
||||
notifyOnTrigger = false,
|
||||
subredditName,
|
||||
cacheUserResult = {},
|
||||
itemIs = [],
|
||||
authorIs: {
|
||||
include = [],
|
||||
@@ -88,6 +91,10 @@ export abstract class Check implements ICheck {
|
||||
exclude: exclude.map(x => new Author(x)),
|
||||
include: include.map(x => new Author(x)),
|
||||
}
|
||||
this.cacheUserResult = {
|
||||
...userResultCacheDefault,
|
||||
...cacheUserResult
|
||||
}
|
||||
this.dryRun = dryRun;
|
||||
for (const r of rules) {
|
||||
if (r instanceof Rule || r instanceof RuleSet) {
|
||||
@@ -170,10 +177,14 @@ export abstract class Check implements ICheck {
|
||||
}
|
||||
}
|
||||
|
||||
abstract getCacheResult(item: Submission | Comment) : Promise<boolean | undefined>;
|
||||
abstract setCacheResult(item: Submission | Comment, result: boolean): void;
|
||||
async getCacheResult(item: Submission | Comment) : Promise<UserResultCache | undefined> {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async runRules(item: Submission | Comment, existingResults: RuleResult[] = []): Promise<[boolean, RuleResult[]]> {
|
||||
async setCacheResult(item: Submission | Comment, result: UserResultCache): Promise<void> {
|
||||
}
|
||||
|
||||
async runRules(item: Submission | Comment, existingResults: RuleResult[] = []): Promise<[boolean, RuleResult[], boolean?]> {
|
||||
try {
|
||||
let allRuleResults: RuleResult[] = [];
|
||||
let allResults: (RuleResult | RuleSetResult)[] = [];
|
||||
@@ -182,7 +193,7 @@ export abstract class Check implements ICheck {
|
||||
const cacheResult = await this.getCacheResult(item);
|
||||
if(cacheResult !== undefined) {
|
||||
this.logger.verbose(`Skipping rules run because result was found in cache, Check Triggered Result: ${cacheResult}`);
|
||||
return [cacheResult, allRuleResults];
|
||||
return [cacheResult.result, cacheResult.ruleResults, true];
|
||||
}
|
||||
|
||||
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
|
||||
@@ -257,30 +268,34 @@ export abstract class Check implements ICheck {
|
||||
// otherwise AND and did not return already so all passed
|
||||
this.logger.info(`${PASS} => Rules: ${resultsSummary(allResults, this.condition)}`);
|
||||
return [true, allRuleResults];
|
||||
} catch (e) {
|
||||
} catch (e: any) {
|
||||
e.logged = true;
|
||||
this.logger.warn(`Running rules failed due to uncaught exception`, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async runActions(item: Submission | Comment, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<Action[]> {
|
||||
async runActions(item: Submission | Comment, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionResult[]> {
|
||||
const dr = runtimeDryrun || this.dryRun;
|
||||
this.logger.debug(`${dr ? 'DRYRUN - ' : ''}Running Actions`);
|
||||
const runActions: Action[] = [];
|
||||
const runActions: ActionResult[] = [];
|
||||
for (const a of this.actions) {
|
||||
if(!a.enabled) {
|
||||
runActions.push({
|
||||
kind: a.getKind(),
|
||||
name: a.getActionUniqueName(),
|
||||
run: false,
|
||||
success: false,
|
||||
runReason: 'Not enabled',
|
||||
dryRun: (a.dryRun || dr) || false,
|
||||
});
|
||||
this.logger.info(`Action ${a.getActionUniqueName()} not run because it is not enabled.`);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
await a.handle(item, ruleResults, runtimeDryrun);
|
||||
runActions.push(a);
|
||||
} catch (err) {
|
||||
this.logger.error(`Action ${a.getActionUniqueName()} encountered an error while running`, err);
|
||||
}
|
||||
const res = await a.handle(item, ruleResults, runtimeDryrun);
|
||||
runActions.push(res);
|
||||
}
|
||||
this.logger.info(`${dr ? 'DRYRUN - ' : ''}Ran Actions: ${runActions.map(x => x.getActionUniqueName()).join(' | ')}`);
|
||||
this.logger.info(`${dr ? 'DRYRUN - ' : ''}Ran Actions: ${runActions.map(x => x.name).join(' | ')}`);
|
||||
return runActions;
|
||||
}
|
||||
}
|
||||
@@ -337,6 +352,7 @@ export interface CheckOptions extends ICheck {
|
||||
notifyOnTrigger?: boolean
|
||||
resources: SubredditResources
|
||||
client: Snoowrap
|
||||
cacheUserResult?: UserResultCacheOptions;
|
||||
}
|
||||
|
||||
export interface CheckJson extends ICheck {
|
||||
@@ -371,6 +387,8 @@ export interface CheckJson extends ICheck {
|
||||
* @default false
|
||||
* */
|
||||
notifyOnTrigger?: boolean,
|
||||
|
||||
cacheUserResult?: UserResultCacheOptions;
|
||||
}
|
||||
|
||||
export interface SubmissionCheckJson extends CheckJson {
|
||||
@@ -388,6 +406,9 @@ export interface SubmissionCheckJson extends CheckJson {
|
||||
* 3. The rule results are not likely to change while cache is valid
|
||||
* */
|
||||
export interface UserResultCacheOptions {
|
||||
/**
|
||||
* @default false
|
||||
* */
|
||||
enable?: boolean,
|
||||
/**
|
||||
* The amount of time, in seconds, to cache this result
|
||||
@@ -396,17 +417,23 @@ export interface UserResultCacheOptions {
|
||||
* @examples [60]
|
||||
* */
|
||||
ttl?: number,
|
||||
/**
|
||||
* In the event the cache returns a triggered result should the actions for the check also be run?
|
||||
*
|
||||
* @default true
|
||||
* */
|
||||
runActions?: boolean
|
||||
}
|
||||
|
||||
export const userResultCacheDefault: Required<UserResultCacheOptions> = {
|
||||
enable: false,
|
||||
ttl: 60,
|
||||
runActions: true,
|
||||
}
|
||||
|
||||
export interface CommentCheckJson extends CheckJson {
|
||||
kind: 'comment'
|
||||
itemIs?: CommentState[]
|
||||
cacheUserResult?: UserResultCacheOptions
|
||||
}
|
||||
|
||||
export type CheckStructuredJson = SubmissionCheckStructuredJson | CommentCheckStructuredJson;
|
||||
|
||||
248
src/Common/ImageData.ts
Normal file
248
src/Common/ImageData.ts
Normal file
@@ -0,0 +1,248 @@
|
||||
import fetch from "node-fetch";
|
||||
import {Submission} from "snoowrap/dist/objects";
|
||||
import {URL} from "url";
|
||||
import {absPercentDifference, getSharpAsync, isValidImageURL} from "../util";
|
||||
import sizeOf from "image-size";
|
||||
import SimpleError from "../Utils/SimpleError";
|
||||
import {Sharp} from "sharp";
|
||||
import {blockhash} from "./blockhash/blockhash";
|
||||
|
||||
export interface ImageDataOptions {
|
||||
width?: number,
|
||||
height?: number,
|
||||
url: string,
|
||||
variants?: ImageData[]
|
||||
}
|
||||
|
||||
class ImageData {
|
||||
|
||||
width?: number
|
||||
height?: number
|
||||
url: URL
|
||||
variants: ImageData[] = []
|
||||
preferredResolution?: [number, number]
|
||||
sharpImg!: Sharp
|
||||
hashResult!: string
|
||||
actualResolution?: [number, number]
|
||||
|
||||
constructor(data: ImageDataOptions, aggressive = false) {
|
||||
this.width = data.width;
|
||||
this.height = data.height;
|
||||
this.url = new URL(data.url);
|
||||
if (!aggressive && !isValidImageURL(`${this.url.origin}${this.url.pathname}`)) {
|
||||
throw new Error('URL did not end with a valid image extension');
|
||||
}
|
||||
this.variants = data.variants || [];
|
||||
}
|
||||
|
||||
async data(format = 'raw'): Promise<Buffer> {
|
||||
// @ts-ignore
|
||||
return await (await this.sharp()).clone().toFormat(format).toBuffer();
|
||||
}
|
||||
|
||||
async hash(bits: number, useVariantIfPossible = true): Promise<string> {
|
||||
if(this.hashResult === undefined) {
|
||||
let ref: ImageData | undefined;
|
||||
if(useVariantIfPossible && this.preferredResolution !== undefined) {
|
||||
ref = this.getSimilarResolutionVariant(this.preferredResolution[0], this.preferredResolution[1]);
|
||||
}
|
||||
if(ref === undefined) {
|
||||
ref = this;
|
||||
}
|
||||
this.hashResult = await blockhash((await ref.sharp()).clone(), bits);
|
||||
}
|
||||
return this.hashResult;
|
||||
}
|
||||
|
||||
async sharp(): Promise<Sharp> {
|
||||
if (this.sharpImg === undefined) {
|
||||
try {
|
||||
const response = await fetch(this.url.toString())
|
||||
if (response.ok) {
|
||||
const ct = response.headers.get('Content-Type');
|
||||
if (ct !== null && ct.includes('image')) {
|
||||
const sFunc = await getSharpAsync();
|
||||
// if image is animated then we want to extract the first frame and convert it to a regular image
|
||||
// so we can compare two static images later (also because sharp can't use resize() on animated images)
|
||||
if(['gif','webp'].some(x => ct.includes(x))) {
|
||||
this.sharpImg = await sFunc(await (await sFunc(await response.buffer(), {pages: 1, animated: false})).png().toBuffer());
|
||||
} else {
|
||||
this.sharpImg = await sFunc(await response.buffer());
|
||||
}
|
||||
const meta = await this.sharpImg.metadata();
|
||||
if (this.width === undefined || this.height === undefined) {
|
||||
this.width = meta.width;
|
||||
this.height = meta.height;
|
||||
}
|
||||
this.actualResolution = [meta.width as number, meta.height as number];
|
||||
} else {
|
||||
throw new SimpleError(`Content-Type for fetched URL ${this.url} did not contain "image"`);
|
||||
}
|
||||
} else {
|
||||
throw new SimpleError(`URL response was not OK: (${response.status})${response.statusText}`);
|
||||
}
|
||||
|
||||
|
||||
} catch (err: any) {
|
||||
if(!(err instanceof SimpleError)) {
|
||||
throw new Error(`Error occurred while fetching response from URL: ${err.message}`);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
return this.sharpImg;
|
||||
}
|
||||
|
||||
get pixels() {
|
||||
if (this.actualResolution !== undefined) {
|
||||
return this.actualResolution[0] * this.actualResolution[1];
|
||||
}
|
||||
if (this.width === undefined || this.height === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
return this.width * this.height;
|
||||
}
|
||||
|
||||
get hasDimensions() {
|
||||
return this.width !== undefined && this.height !== undefined;
|
||||
}
|
||||
|
||||
get baseUrl() {
|
||||
return `${this.url.origin}${this.url.pathname}`;
|
||||
}
|
||||
|
||||
setPreferredResolutionByWidth(prefWidth: number) {
|
||||
let height: number | undefined = undefined,
|
||||
width: number | undefined = undefined;
|
||||
if (this.variants.length === 0) {
|
||||
return;
|
||||
}
|
||||
for (const v of this.variants) {
|
||||
if (v.hasDimensions && (v.width as number) <= prefWidth) {
|
||||
width = v.width as number;
|
||||
height = v.height as number;
|
||||
}
|
||||
}
|
||||
if (width !== undefined) {
|
||||
this.preferredResolution = [width, (height as number)];
|
||||
}
|
||||
}
|
||||
|
||||
getSimilarResolutionVariant(width: number, height: number, allowablePercentDiff = 0): ImageData | undefined {
|
||||
if (this.variants.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
return this.variants.find(x => {
|
||||
return x.hasDimensions && (absPercentDifference(width, x.width as number) <= allowablePercentDiff) && (absPercentDifference(height, x.height as number) <= allowablePercentDiff);
|
||||
});
|
||||
}
|
||||
|
||||
isSameDimensions(otherImage: ImageData) {
|
||||
if (!this.hasDimensions || !otherImage.hasDimensions) {
|
||||
return false;
|
||||
}
|
||||
return this.width === otherImage.width && this.height === otherImage.height;
|
||||
}
|
||||
|
||||
async sameAspectRatio(otherImage: ImageData) {
|
||||
let thisRes = this.actualResolution;
|
||||
let otherRes = otherImage.actualResolution;
|
||||
if(thisRes === undefined) {
|
||||
const tMeta = await (await this.sharp()).metadata();
|
||||
const thisMeta = {width: tMeta.width as number, height: tMeta.height as number };
|
||||
this.actualResolution = [thisMeta.width, thisMeta.height];
|
||||
thisRes = this.actualResolution;
|
||||
}
|
||||
if(otherRes === undefined) {
|
||||
const otherMeta = await (await otherImage.sharp()).metadata();
|
||||
otherRes = [otherMeta.width as number, otherMeta.height as number];
|
||||
}
|
||||
const thisRatio = thisRes[0] / thisRes[1];
|
||||
const otherRatio = otherRes[0] / otherRes[1];
|
||||
|
||||
// a little leeway
|
||||
return Math.abs(thisRatio - otherRatio) < 0.1;
|
||||
}
|
||||
|
||||
static async dimensionsFromMetadata(img: Sharp) {
|
||||
const {width, height, ...rest} = await img.metadata();
|
||||
return {width: width as number, height: height as number};
|
||||
}
|
||||
|
||||
async normalizeImagesForComparison(compareLibrary: ('pixel' | 'resemble'), imgToCompare: ImageData): Promise<[Sharp, Sharp, number, number]> {
|
||||
const sFunc = await getSharpAsync();
|
||||
|
||||
let refImage = this as ImageData;
|
||||
let compareImage = imgToCompare;
|
||||
if (this.preferredResolution !== undefined) {
|
||||
const matchingVariant = compareImage.getSimilarResolutionVariant(this.preferredResolution[0], this.preferredResolution[1]);
|
||||
if (matchingVariant !== undefined) {
|
||||
compareImage = matchingVariant;
|
||||
refImage = this.getSimilarResolutionVariant(this.preferredResolution[0], this.preferredResolution[1]) as ImageData;
|
||||
}
|
||||
}
|
||||
|
||||
let refSharp = (await refImage.sharp()).clone();
|
||||
let refMeta = await ImageData.dimensionsFromMetadata(refSharp);
|
||||
let compareSharp = (await compareImage.sharp()).clone();
|
||||
let compareMeta = await ImageData.dimensionsFromMetadata(compareSharp);
|
||||
|
||||
// if dimensions on not the same we need to crop or resize before final resize
|
||||
if (refMeta.width !== compareMeta.width || refMeta.height !== compareMeta.height) {
|
||||
const thisRatio = refMeta.width / (refMeta.height);
|
||||
const otherRatio = compareMeta.width / compareMeta.height;
|
||||
|
||||
const sameRatio = Math.abs(thisRatio - otherRatio) < 0.04;
|
||||
if (sameRatio) {
|
||||
// then resize first since its most likely the same image
|
||||
// can be fairly sure a downscale will get pixels close to the same
|
||||
if (refMeta.width > compareMeta.width) {
|
||||
refSharp = sFunc(await refSharp.resize(compareMeta.width, null, {fit: 'outside'}).toBuffer());
|
||||
} else {
|
||||
compareSharp = sFunc(await compareSharp.resize(refMeta.width, null, {fit: 'outside'}).toBuffer());
|
||||
}
|
||||
refMeta = await ImageData.dimensionsFromMetadata(refSharp);
|
||||
compareMeta = await ImageData.dimensionsFromMetadata(compareSharp);
|
||||
}
|
||||
// find smallest common dimensions
|
||||
const sWidth = refMeta.width <= compareMeta.width ? refMeta.width : compareMeta.width;
|
||||
const sHeight = refMeta.height <= compareMeta.height ? refMeta.height : compareMeta.height;
|
||||
|
||||
// crop if necessary
|
||||
if(sWidth !== refMeta.width || sHeight !== refMeta.height) {
|
||||
refSharp = sFunc(await refSharp.extract({left: 0, top: 0, width: sWidth, height: sHeight}).toBuffer());
|
||||
}
|
||||
if(sWidth !== compareMeta.width || sHeight !== compareMeta.height) {
|
||||
compareSharp = sFunc(await compareSharp.extract({left: 0, top: 0, width: sWidth, height: sHeight}).toBuffer());
|
||||
}
|
||||
}
|
||||
|
||||
// final resize to reduce memory/cpu usage during comparison
|
||||
refSharp = sFunc(await refSharp.resize(400, null, {fit: 'outside'}).toBuffer());
|
||||
compareSharp = sFunc(await compareSharp.resize(400, null, {fit: 'outside'}).toBuffer());
|
||||
|
||||
const {width, height} = await ImageData.dimensionsFromMetadata(refSharp);
|
||||
return [refSharp, compareSharp, width, height];
|
||||
}
|
||||
|
||||
static fromSubmission(sub: Submission, aggressive = false): ImageData {
|
||||
const url = new URL(sub.url);
|
||||
const data: any = {
|
||||
url,
|
||||
};
|
||||
let variants = [];
|
||||
if (sub.preview !== undefined && sub.preview.enabled && sub.preview.images.length > 0) {
|
||||
const firstImg = sub.preview.images[0];
|
||||
const ref = sub.preview.images[0].source;
|
||||
data.width = ref.width;
|
||||
data.height = ref.height;
|
||||
|
||||
variants = firstImg.resolutions.map(x => new ImageData(x));
|
||||
data.variants = variants;
|
||||
}
|
||||
return new ImageData(data, aggressive);
|
||||
}
|
||||
}
|
||||
|
||||
export default ImageData;
|
||||
234
src/Common/blockhash/blockhash.ts
Normal file
234
src/Common/blockhash/blockhash.ts
Normal file
@@ -0,0 +1,234 @@
|
||||
// Perceptual image hash calculation tool based on algorithm descibed in
|
||||
// Block Mean Value Based Image Perceptual Hashing by Bian Yang, Fan Gu and Xiamu Niu
|
||||
//
|
||||
// Copyright 2014 Commons Machinery http://commonsmachinery.se/
|
||||
// Distributed under an MIT license, please see LICENSE in the top dir.
|
||||
|
||||
|
||||
// https://github.com/commonsmachinery/blockhash-js/blob/master/index.js
|
||||
|
||||
import {Sharp} from "sharp";
|
||||
|
||||
interface BlockImageData {
|
||||
data: Buffer,
|
||||
width: number,
|
||||
height: number
|
||||
}
|
||||
|
||||
var one_bits = [0, 1, 1, 2, 1, 2, 2, 3, 1, 2, 2, 3, 2, 3, 3, 4];
|
||||
|
||||
/* Calculate the hamming distance for two hashes in hex format */
|
||||
export const hammingDistance = (hash1: string, hash2: string) => {
|
||||
var d = 0;
|
||||
var i;
|
||||
|
||||
if (hash1.length !== hash2.length) {
|
||||
throw new Error("Can't compare hashes with different length");
|
||||
}
|
||||
|
||||
for (i = 0; i < hash1.length; i++) {
|
||||
var n1 = parseInt(hash1[i], 16);
|
||||
var n2 = parseInt(hash2[i], 16);
|
||||
d += one_bits[n1 ^ n2];
|
||||
}
|
||||
return d;
|
||||
};
|
||||
|
||||
var median = function(data: number[]) {
|
||||
var mdarr = data.slice(0);
|
||||
mdarr.sort(function(a, b) { return a-b; });
|
||||
if (mdarr.length % 2 === 0) {
|
||||
return (mdarr[mdarr.length/2 - 1] + mdarr[mdarr.length/2]) / 2.0;
|
||||
}
|
||||
return mdarr[Math.floor(mdarr.length/2)];
|
||||
};
|
||||
|
||||
var translate_blocks_to_bits = function(blocks: number[], pixels_per_block: number) {
|
||||
var half_block_value = pixels_per_block * 256 * 3 / 2;
|
||||
var bandsize = blocks.length / 4;
|
||||
|
||||
// Compare medians across four horizontal bands
|
||||
for (var i = 0; i < 4; i++) {
|
||||
var m = median(blocks.slice(i * bandsize, (i + 1) * bandsize));
|
||||
for (var j = i * bandsize; j < (i + 1) * bandsize; j++) {
|
||||
var v = blocks[j];
|
||||
|
||||
// Output a 1 if the block is brighter than the median.
|
||||
// With images dominated by black or white, the median may
|
||||
// end up being 0 or the max value, and thus having a lot
|
||||
// of blocks of value equal to the median. To avoid
|
||||
// generating hashes of all zeros or ones, in that case output
|
||||
// 0 if the median is in the lower value space, 1 otherwise
|
||||
blocks[j] = Number(v > m || (Math.abs(v - m) < 1 && m > half_block_value));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var bits_to_hexhash = function(bitsArray: number[]) {
|
||||
var hex = [];
|
||||
for (var i = 0; i < bitsArray.length; i += 4) {
|
||||
var nibble = bitsArray.slice(i, i + 4);
|
||||
hex.push(parseInt(nibble.join(''), 2).toString(16));
|
||||
}
|
||||
|
||||
return hex.join('');
|
||||
};
|
||||
|
||||
var bmvbhash_even = function(data: BlockImageData, bits: number) {
|
||||
var blocksize_x = Math.floor(data.width / bits);
|
||||
var blocksize_y = Math.floor(data.height / bits);
|
||||
|
||||
var result = [];
|
||||
|
||||
for (var y = 0; y < bits; y++) {
|
||||
for (var x = 0; x < bits; x++) {
|
||||
var total = 0;
|
||||
|
||||
for (var iy = 0; iy < blocksize_y; iy++) {
|
||||
for (var ix = 0; ix < blocksize_x; ix++) {
|
||||
var cx = x * blocksize_x + ix;
|
||||
var cy = y * blocksize_y + iy;
|
||||
var ii = (cy * data.width + cx) * 4;
|
||||
|
||||
var alpha = data.data[ii+3];
|
||||
if (alpha === 0) {
|
||||
total += 765;
|
||||
} else {
|
||||
total += data.data[ii] + data.data[ii+1] + data.data[ii+2];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result.push(total);
|
||||
}
|
||||
}
|
||||
|
||||
translate_blocks_to_bits(result, blocksize_x * blocksize_y);
|
||||
return bits_to_hexhash(result);
|
||||
};
|
||||
|
||||
var bmvbhash = function(data: BlockImageData, bits: number) {
|
||||
var result = [];
|
||||
|
||||
var i, j, x, y;
|
||||
var block_width, block_height;
|
||||
var weight_top, weight_bottom, weight_left, weight_right;
|
||||
var block_top, block_bottom, block_left, block_right;
|
||||
var y_mod, y_frac, y_int;
|
||||
var x_mod, x_frac, x_int;
|
||||
var blocks: number[][] = [];
|
||||
|
||||
var even_x = data.width % bits === 0;
|
||||
var even_y = data.height % bits === 0;
|
||||
|
||||
if (even_x && even_y) {
|
||||
return bmvbhash_even(data, bits);
|
||||
}
|
||||
|
||||
// initialize blocks array with 0s
|
||||
for (i = 0; i < bits; i++) {
|
||||
blocks.push([]);
|
||||
for (j = 0; j < bits; j++) {
|
||||
blocks[i].push(0);
|
||||
}
|
||||
}
|
||||
|
||||
block_width = data.width / bits;
|
||||
block_height = data.height / bits;
|
||||
|
||||
for (y = 0; y < data.height; y++) {
|
||||
if (even_y) {
|
||||
// don't bother dividing y, if the size evenly divides by bits
|
||||
block_top = block_bottom = Math.floor(y / block_height);
|
||||
weight_top = 1;
|
||||
weight_bottom = 0;
|
||||
} else {
|
||||
y_mod = (y + 1) % block_height;
|
||||
y_frac = y_mod - Math.floor(y_mod);
|
||||
y_int = y_mod - y_frac;
|
||||
|
||||
weight_top = (1 - y_frac);
|
||||
weight_bottom = (y_frac);
|
||||
|
||||
// y_int will be 0 on bottom/right borders and on block boundaries
|
||||
if (y_int > 0 || (y + 1) === data.height) {
|
||||
block_top = block_bottom = Math.floor(y / block_height);
|
||||
} else {
|
||||
block_top = Math.floor(y / block_height);
|
||||
block_bottom = Math.ceil(y / block_height);
|
||||
}
|
||||
}
|
||||
|
||||
for (x = 0; x < data.width; x++) {
|
||||
var ii = (y * data.width + x) * 4;
|
||||
|
||||
var avgvalue, alpha = data.data[ii+3];
|
||||
if (alpha === 0) {
|
||||
avgvalue = 765;
|
||||
} else {
|
||||
avgvalue = data.data[ii] + data.data[ii+1] + data.data[ii+2];
|
||||
}
|
||||
|
||||
if (even_x) {
|
||||
block_left = block_right = Math.floor(x / block_width);
|
||||
weight_left = 1;
|
||||
weight_right = 0;
|
||||
} else {
|
||||
x_mod = (x + 1) % block_width;
|
||||
x_frac = x_mod - Math.floor(x_mod);
|
||||
x_int = x_mod - x_frac;
|
||||
|
||||
weight_left = (1 - x_frac);
|
||||
weight_right = x_frac;
|
||||
|
||||
// x_int will be 0 on bottom/right borders and on block boundaries
|
||||
if (x_int > 0 || (x + 1) === data.width) {
|
||||
block_left = block_right = Math.floor(x / block_width);
|
||||
} else {
|
||||
block_left = Math.floor(x / block_width);
|
||||
block_right = Math.ceil(x / block_width);
|
||||
}
|
||||
}
|
||||
|
||||
// add weighted pixel value to relevant blocks
|
||||
blocks[block_top][block_left] += avgvalue * weight_top * weight_left;
|
||||
blocks[block_top][block_right] += avgvalue * weight_top * weight_right;
|
||||
blocks[block_bottom][block_left] += avgvalue * weight_bottom * weight_left;
|
||||
blocks[block_bottom][block_right] += avgvalue * weight_bottom * weight_right;
|
||||
}
|
||||
}
|
||||
|
||||
for (i = 0; i < bits; i++) {
|
||||
for (j = 0; j < bits; j++) {
|
||||
result.push(blocks[i][j]);
|
||||
}
|
||||
}
|
||||
|
||||
translate_blocks_to_bits(result, block_width * block_height);
|
||||
return bits_to_hexhash(result);
|
||||
};
|
||||
|
||||
var blockhashData = function(imgData: BlockImageData, bits: number, method: number) {
|
||||
var hash;
|
||||
|
||||
if (method === 1) {
|
||||
hash = bmvbhash_even(imgData, bits);
|
||||
}
|
||||
else if (method === 2) {
|
||||
hash = bmvbhash(imgData, bits);
|
||||
}
|
||||
else {
|
||||
throw new Error("Bad hashing method");
|
||||
}
|
||||
|
||||
return hash;
|
||||
};
|
||||
|
||||
export const blockhash = async function(src: Sharp, bits: number, method: number = 2): Promise<string> {
|
||||
const {data: buff, info} = await src.ensureAlpha().raw().toBuffer({resolveWithObject: true});
|
||||
return blockhashData({
|
||||
width: info.width,
|
||||
height: info.height,
|
||||
data: buff,
|
||||
}, bits, method);
|
||||
};
|
||||
@@ -1,2 +1,31 @@
|
||||
import {HistoricalStats} from "./interfaces";
|
||||
|
||||
export const cacheOptDefaults = {ttl: 60, max: 500, checkPeriod: 600};
|
||||
export const cacheTTLDefaults = {authorTTL: 60, userNotesTTL: 300, wikiTTL: 300, submissionTTL: 60, commentTTL: 60, filterCriteriaTTL: 60};
|
||||
export const cacheTTLDefaults = {authorTTL: 60, userNotesTTL: 300, wikiTTL: 300, submissionTTL: 60, commentTTL: 60, filterCriteriaTTL: 60, subredditTTL: 600};
|
||||
export const historicalDefaults: HistoricalStats = {
|
||||
eventsCheckedTotal: 0,
|
||||
eventsActionedTotal: 0,
|
||||
checksRun: new Map(),
|
||||
checksFromCache: new Map(),
|
||||
checksTriggered: new Map(),
|
||||
rulesRun: new Map(),
|
||||
//rulesCached: new Map(),
|
||||
rulesCachedTotal: 0,
|
||||
rulesTriggered: new Map(),
|
||||
actionsRun: new Map(),
|
||||
}
|
||||
|
||||
export const createHistoricalDefaults = (): HistoricalStats => {
|
||||
return {
|
||||
eventsCheckedTotal: 0,
|
||||
eventsActionedTotal: 0,
|
||||
checksRun: new Map(),
|
||||
checksFromCache: new Map(),
|
||||
checksTriggered: new Map(),
|
||||
rulesRun: new Map(),
|
||||
//rulesCached: new Map(),
|
||||
rulesCachedTotal: 0,
|
||||
rulesTriggered: new Map(),
|
||||
actionsRun: new Map(),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -3,6 +3,8 @@ import {Cache} from 'cache-manager';
|
||||
import {MESSAGE} from 'triple-beam';
|
||||
import Poll from "snoostorm/out/util/Poll";
|
||||
import Snoowrap from "snoowrap";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {IncomingMessage} from "http";
|
||||
|
||||
/**
|
||||
* An ISO 8601 Duration
|
||||
@@ -222,6 +224,186 @@ export interface ReferenceSubmission {
|
||||
useSubmissionAsReference?: boolean,
|
||||
}
|
||||
|
||||
/**
|
||||
* When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.
|
||||
*
|
||||
* **Note:** This is an **experimental feature**
|
||||
* */
|
||||
export interface ImageDetection {
|
||||
/**
|
||||
* Is image detection enabled?
|
||||
* */
|
||||
enable?: boolean
|
||||
/**
|
||||
* Determines how and when to check if a URL is an image
|
||||
*
|
||||
* **Note:** After fetching a URL the **Content-Type** is validated to contain `image` before detection occurs
|
||||
*
|
||||
* **When `extension`:** (default)
|
||||
*
|
||||
* * Only URLs that end in known image extensions (.png, .jpg, etc...) are fetched
|
||||
*
|
||||
* **When `unknown`:**
|
||||
*
|
||||
* * URLs that end in known image extensions (.png, .jpg, etc...) are fetched
|
||||
* * URLs with no extension or unknown (IE non-video, non-doc, etc...) are fetched
|
||||
*
|
||||
* **When `all`:**
|
||||
*
|
||||
* * All submissions that have URLs (non-self) will be fetched, regardless of extension
|
||||
* * **Note:** This can be bandwidth/CPU intensive if history window is large so use with care
|
||||
*
|
||||
* @default "extension"
|
||||
* */
|
||||
fetchBehavior?: 'extension' | 'unknown' | 'all',
|
||||
/**
|
||||
* The percentage, as a whole number, of difference between two images at which point they will not be considered the same.
|
||||
*
|
||||
* Will be used as `hash.hardThreshold` and `pixel.threshold` if those values are not specified
|
||||
*
|
||||
* Default is `5`
|
||||
*
|
||||
* @default 5
|
||||
* */
|
||||
threshold?: number
|
||||
|
||||
/**
|
||||
* Use perceptual hashing (blockhash-js) to compare images
|
||||
*
|
||||
* Pros:
|
||||
*
|
||||
* * very fast
|
||||
* * low cpu/memory usage
|
||||
* * results can be cached
|
||||
*
|
||||
* Cons:
|
||||
*
|
||||
* * not as accurate as pixel comparison
|
||||
* * weaker for text-heavy images
|
||||
* * mostly color-blind
|
||||
*
|
||||
* Best uses:
|
||||
*
|
||||
* * Detecting (general) duplicate images
|
||||
* * Comparing large number of images
|
||||
* */
|
||||
hash?: {
|
||||
/**
|
||||
* Enabled by default.
|
||||
*
|
||||
* If both `hash` and `pixel` are enabled then `pixel` will be used to verify image comparison when hashes matches
|
||||
*
|
||||
* @default true
|
||||
* */
|
||||
enable?: boolean
|
||||
|
||||
/**
|
||||
* Bit count determines accuracy of hash and granularity of hash comparison (comparison to other hashes)
|
||||
*
|
||||
* Default is `32`
|
||||
*
|
||||
* **NOTE:** Hashes of different sizes (bits) cannot be compared. If you are caching results make sure all rules where results may be shared use the same bit count to ensure hashes can be compared. Otherwise hashes will be recomputed.
|
||||
*
|
||||
* @default 32
|
||||
* */
|
||||
bits?: number
|
||||
|
||||
/**
|
||||
* Number of seconds to cache image hash
|
||||
* */
|
||||
ttl?: number
|
||||
/**
|
||||
* High Confidence Threshold
|
||||
*
|
||||
* If the difference in comparison is equal to or less than this number the images are considered the same and pixel comparison WILL NOT occur
|
||||
*
|
||||
* Defaults to the parent-level `threshold` value if not present
|
||||
*
|
||||
* Use `null` if you want pixel comparison to ALWAYS occur (softThreshold must be present)
|
||||
* */
|
||||
hardThreshold?: number | null
|
||||
/**
|
||||
* Low Confidence Threshold -- only used if `pixel` is enabled
|
||||
*
|
||||
* If the difference in comparison is
|
||||
*
|
||||
* 1) equal to or less than this value and
|
||||
* 2) the value is greater than `hardThreshold`
|
||||
*
|
||||
* the images will be compared using the `pixel` method
|
||||
* */
|
||||
softThreshold?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Use pixel counting to compare images
|
||||
*
|
||||
* Pros:
|
||||
*
|
||||
* * most accurate
|
||||
* * strong with text or color-only changes
|
||||
*
|
||||
* Cons:
|
||||
*
|
||||
* * much slower than hashing
|
||||
* * memory/cpu intensive
|
||||
*
|
||||
* Best uses:
|
||||
*
|
||||
* * Comparison text-only images
|
||||
* * Comparison requires high degree of accuracy or changes are subtle
|
||||
* */
|
||||
pixel?: {
|
||||
/**
|
||||
* Disabled by default.
|
||||
*
|
||||
* @default false
|
||||
* */
|
||||
enable?: boolean
|
||||
/**
|
||||
* The percentage, as a whole number, of pixels that are **different** between the two images at which point the images are not considered the same.
|
||||
* */
|
||||
threshold?: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface StrongImageDetection {
|
||||
enable: boolean,
|
||||
fetchBehavior: 'extension' | 'unknown' | 'all'
|
||||
threshold: number,
|
||||
hash: {
|
||||
enable: boolean
|
||||
bits: number
|
||||
ttl?: number
|
||||
hardThreshold: number | null
|
||||
softThreshold?: number
|
||||
}
|
||||
pixel: {
|
||||
enable: boolean
|
||||
threshold: number
|
||||
}
|
||||
}
|
||||
|
||||
// export interface ImageData {
|
||||
// data: Promise<Buffer>,
|
||||
// buf?: Buffer,
|
||||
// width: number,
|
||||
// height: number
|
||||
// pixels?: number
|
||||
// url: string
|
||||
// variants?: ImageData[]
|
||||
// }
|
||||
|
||||
export interface ImageComparisonResult {
|
||||
isSameDimensions: boolean
|
||||
dimensionDifference: {
|
||||
width: number;
|
||||
height: number;
|
||||
};
|
||||
misMatchPercentage: number;
|
||||
analysisTime: number;
|
||||
}
|
||||
|
||||
export interface RichContent {
|
||||
/**
|
||||
* The Content to submit for this Action. Content is interpreted as reddit-flavored Markdown.
|
||||
@@ -304,6 +486,38 @@ export type PollOn = 'unmoderated' | 'modqueue' | 'newSub' | 'newComm';
|
||||
export interface PollingOptionsStrong extends PollingOptions {
|
||||
limit: number,
|
||||
interval: number,
|
||||
clearProcessed: ClearProcessedOptions
|
||||
}
|
||||
|
||||
/**
|
||||
* For very long-running, high-volume subreddits clearing the list of processed activities helps manage memory bloat
|
||||
*
|
||||
* All of these options have default values based on the limit and/or interval set for polling options on each subreddit stream. They only need to modified if the defaults are not sufficient.
|
||||
*
|
||||
* If both `after` and `size` are defined whichever is hit first will trigger the list to clear. `after` will be reset after ever clear.
|
||||
* */
|
||||
export interface ClearProcessedOptions {
|
||||
/**
|
||||
* An interval the processed list should be cleared after.
|
||||
*
|
||||
* * EX `9 days`
|
||||
* * EX `3 months`
|
||||
* * EX `5 minutes`
|
||||
* @pattern ^\s*(?<time>\d+)\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\s*$
|
||||
* */
|
||||
after?: string,
|
||||
/**
|
||||
* Number of activities found in processed list after which the list should be cleared.
|
||||
*
|
||||
* Defaults to the `limit` value from `PollingOptions`
|
||||
* */
|
||||
size?: number,
|
||||
/**
|
||||
* The number of activities to retain in processed list after clearing.
|
||||
*
|
||||
* Defaults to `limit` value from `PollingOptions`
|
||||
* */
|
||||
retain?: number,
|
||||
}
|
||||
|
||||
export interface PollingDefaults {
|
||||
@@ -377,53 +591,122 @@ export interface PollingOptions extends PollingDefaults {
|
||||
*
|
||||
* */
|
||||
pollOn: 'unmoderated' | 'modqueue' | 'newSub' | 'newComm'
|
||||
|
||||
clearProcessed?: ClearProcessedOptions
|
||||
}
|
||||
|
||||
export interface TTLConfig {
|
||||
/**
|
||||
* Amount of time, in seconds, author activities (Comments/Submission) should be cached
|
||||
* Amount of time, in seconds, author activity history (Comments/Submission) should be cached
|
||||
*
|
||||
* * If `0` or `true` will cache indefinitely (not recommended)
|
||||
* * If `false` will not cache
|
||||
*
|
||||
* * ENV => `AUTHOR_TTL`
|
||||
* * ARG => `--authorTTL <sec>`
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
authorTTL?: number;
|
||||
authorTTL?: number | boolean;
|
||||
/**
|
||||
* Amount of time, in seconds, wiki content pages should be cached
|
||||
*
|
||||
* * If `0` or `true` will cache indefinitely (not recommended)
|
||||
* * If `false` will not cache
|
||||
*
|
||||
* @examples [300]
|
||||
* @default 300
|
||||
* */
|
||||
wikiTTL?: number;
|
||||
|
||||
wikiTTL?: number | boolean;
|
||||
/**
|
||||
* Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
|
||||
* Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
|
||||
*
|
||||
* * If `0` or `true` will cache indefinitely (not recommended)
|
||||
* * If `false` will not cache
|
||||
*
|
||||
* @examples [300]
|
||||
* @default 300
|
||||
* */
|
||||
userNotesTTL?: number;
|
||||
userNotesTTL?: number | boolean;
|
||||
/**
|
||||
* Amount of time, in seconds, a submission should be cached
|
||||
*
|
||||
* * If `0` or `true` will cache indefinitely (not recommended)
|
||||
* * If `false` will not cache
|
||||
*
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
submissionTTL?: number;
|
||||
submissionTTL?: number | boolean;
|
||||
/**
|
||||
* Amount of time, in seconds, a comment should be cached
|
||||
*
|
||||
* * If `0` or `true` will cache indefinitely (not recommended)
|
||||
* * If `false` will not cache
|
||||
*
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
commentTTL?: number;
|
||||
commentTTL?: number | boolean;
|
||||
/**
|
||||
* Amount of time, in seconds, a subreddit (attributes) should be cached
|
||||
*
|
||||
* * If `0` or `true` will cache indefinitely (not recommended)
|
||||
* * If `false` will not cache
|
||||
*
|
||||
* @examples [600]
|
||||
* @default 600
|
||||
* */
|
||||
subredditTTL?: number | boolean;
|
||||
/**
|
||||
* Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)
|
||||
*
|
||||
* This is especially useful if when polling high-volume comments and your checks rely on author/item filters
|
||||
*
|
||||
* * If `0` or `true` will cache indefinitely (not recommended)
|
||||
* * If `false` will not cache
|
||||
*
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
filterCriteriaTTL?: number;
|
||||
filterCriteriaTTL?: number | boolean;
|
||||
}
|
||||
|
||||
export interface SubredditCacheConfig extends TTLConfig {
|
||||
export interface CacheConfig extends TTLConfig {
|
||||
/**
|
||||
* The cache provider and, optionally, a custom configuration for that provider
|
||||
*
|
||||
* If not present or `null` provider will be `memory`.
|
||||
*
|
||||
* To specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`
|
||||
* */
|
||||
provider?: CacheProvider | CacheOptions
|
||||
|
||||
/**
|
||||
* The **maximum** number of Events that the cache should store triggered result summaries for
|
||||
*
|
||||
* These summaries are viewable through the Web UI.
|
||||
*
|
||||
* The value specified by a subreddit cannot be larger than the value set by the Operator for the global/bot config (if set)
|
||||
*
|
||||
* @default 25
|
||||
* @example [25]
|
||||
* */
|
||||
actionedEventsMax?: number
|
||||
}
|
||||
|
||||
export interface OperatorCacheConfig extends CacheConfig {
|
||||
/**
|
||||
* The **default** number of Events that the cache will store triggered result summaries for
|
||||
*
|
||||
* These summaries are viewable through the Web UI.
|
||||
*
|
||||
* The value specified cannot be larger than `actionedEventsMax` for the global/bot config (if set)
|
||||
*
|
||||
* @default 25
|
||||
* @example [25]
|
||||
* */
|
||||
actionedEventsDefault?: number
|
||||
}
|
||||
|
||||
export interface Footer {
|
||||
@@ -503,7 +786,7 @@ export interface ManagerOptions {
|
||||
/**
|
||||
* Per-subreddit config for caching TTL values. If set to `false` caching is disabled.
|
||||
* */
|
||||
caching?: SubredditCacheConfig
|
||||
caching?: CacheConfig
|
||||
|
||||
/**
|
||||
* Use this option to override the `dryRun` setting for all `Checks`
|
||||
@@ -549,6 +832,8 @@ export interface ManagerOptions {
|
||||
nickname?: string
|
||||
|
||||
notifications?: NotificationConfig
|
||||
|
||||
credentials?: ThirdPartyCredentialsJsonConfig
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -562,6 +847,22 @@ export interface ManagerOptions {
|
||||
* */
|
||||
export type CompareValue = string;
|
||||
|
||||
/**
|
||||
* A duration and how to compare it against a value
|
||||
*
|
||||
* The syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`
|
||||
*
|
||||
* * EX `> 100 days` => Passes if the date being compared is before 100 days ago
|
||||
* * EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months
|
||||
*
|
||||
* Unit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)
|
||||
*
|
||||
* [See] https://regexr.com/609n8 for example
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\s*$
|
||||
* */
|
||||
export type DurationComparor = string;
|
||||
|
||||
/**
|
||||
* A string containing a comparison operator and a value to compare against
|
||||
*
|
||||
@@ -607,6 +908,9 @@ export interface ActivityState {
|
||||
stickied?: boolean
|
||||
distinguished?: boolean
|
||||
approved?: boolean
|
||||
score?: CompareValue
|
||||
reports?: CompareValue
|
||||
age?: DurationComparor
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -630,6 +934,9 @@ export interface SubmissionState extends ActivityState {
|
||||
link_flair_css_class?: string
|
||||
}
|
||||
|
||||
// properties calculated/derived by CM -- not provided as plain values by reddit
|
||||
export const cmActivityProperties = ['submissionState','score','reports','removed','deleted','filtered','age','title'];
|
||||
|
||||
/**
|
||||
* Different attributes a `Comment` can be in. Only include a property if you want to check it.
|
||||
* @examples [{"op": true, "removed": false}]
|
||||
@@ -643,6 +950,51 @@ export interface CommentState extends ActivityState {
|
||||
* A list of SubmissionState attributes to test the Submission this comment is in
|
||||
* */
|
||||
submissionState?: SubmissionState[]
|
||||
|
||||
/**
|
||||
* The (nested) level of a comment.
|
||||
*
|
||||
* * 0 mean the comment is at top-level (replying to submission)
|
||||
* * non-zero, Nth value means the comment has N parent comments
|
||||
* */
|
||||
depth?: DurationComparor
|
||||
}
|
||||
|
||||
/**
|
||||
* Different attributes a `Subreddit` can be in. Only include a property if you want to check it.
|
||||
* @examples [{"over18": true}]
|
||||
* */
|
||||
export interface SubredditState {
|
||||
/**
|
||||
* Is subreddit quarantined?
|
||||
* */
|
||||
quarantine?: boolean
|
||||
/**
|
||||
* Is subreddit NSFW/over 18?
|
||||
*
|
||||
* **Note**: This is **mod-controlled flag** so it is up to the mods of the subreddit to correctly mark their subreddit as NSFW
|
||||
* */
|
||||
over18?: boolean
|
||||
/**
|
||||
* The name the subreddit.
|
||||
*
|
||||
* Can be a normal string (will check case-insensitive) or a regular expression
|
||||
*
|
||||
* EX `["mealtimevideos", "/onlyfans*\/i"]`
|
||||
*
|
||||
* @examples ["mealtimevideos", "/onlyfans*\/i"]
|
||||
* */
|
||||
name?: string | RegExp
|
||||
/**
|
||||
* A friendly description of what this State is trying to parse
|
||||
* */
|
||||
stateDescription?: string
|
||||
|
||||
isUserProfile?: boolean
|
||||
}
|
||||
|
||||
export interface StrongSubredditState extends SubredditState {
|
||||
name?: RegExp
|
||||
}
|
||||
|
||||
export type TypedActivityStates = SubmissionState[] | CommentState[];
|
||||
@@ -666,6 +1018,28 @@ export const STOPPED = 'stopped';
|
||||
export const RUNNING = 'running';
|
||||
export const PAUSED = 'paused';
|
||||
|
||||
export interface SearchAndReplaceRegExp {
|
||||
/**
|
||||
* The search value to test for
|
||||
*
|
||||
* Can be a normal string (converted to a case-sensitive literal) or a valid regular expression
|
||||
*
|
||||
* EX `["find this string", "/some string*\/ig"]`
|
||||
*
|
||||
* @examples ["find this string", "/some string*\/ig"]
|
||||
* */
|
||||
search: string
|
||||
|
||||
/**
|
||||
* The replacement string/value to use when search is found
|
||||
*
|
||||
* This can be a literal string like `'replace with this`, an empty string to remove the search value (`''`), or a special regex value
|
||||
*
|
||||
* See replacement here for more information: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/replace
|
||||
* */
|
||||
replace: string
|
||||
}
|
||||
|
||||
export interface NamedGroup {
|
||||
[name: string]: string
|
||||
}
|
||||
@@ -692,13 +1066,16 @@ export type CacheProvider = 'memory' | 'redis' | 'none';
|
||||
// provider: CacheOptions
|
||||
// }
|
||||
export type StrongCache = {
|
||||
authorTTL: number,
|
||||
userNotesTTL: number,
|
||||
wikiTTL: number,
|
||||
submissionTTL: number,
|
||||
commentTTL: number,
|
||||
filterCriteriaTTL: number,
|
||||
authorTTL: number | boolean,
|
||||
userNotesTTL: number | boolean,
|
||||
wikiTTL: number | boolean,
|
||||
submissionTTL: number | boolean,
|
||||
commentTTL: number | boolean,
|
||||
subredditTTL: number | boolean,
|
||||
filterCriteriaTTL: number | boolean,
|
||||
provider: CacheOptions
|
||||
actionedEventsMax?: number,
|
||||
actionedEventsDefault: number,
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -757,6 +1134,8 @@ export interface CacheOptions {
|
||||
* @examples [500]
|
||||
* */
|
||||
max?: number
|
||||
|
||||
[key:string]: any
|
||||
}
|
||||
|
||||
export type NotificationProvider = 'discord';
|
||||
@@ -919,7 +1298,7 @@ export interface WebCredentials {
|
||||
*
|
||||
* */
|
||||
export interface BotInstanceJsonConfig {
|
||||
credentials?: RedditCredentials
|
||||
credentials?: BotCredentialsJsonConfig | RedditCredentials
|
||||
/*
|
||||
* The name to display for the bot. If not specified will use the name of the reddit account IE `u/TheBotName`
|
||||
* */
|
||||
@@ -1034,6 +1413,13 @@ export interface BotInstanceJsonConfig {
|
||||
* @default false
|
||||
* */
|
||||
sharedMod?: boolean,
|
||||
|
||||
/**
|
||||
* If sharing a mod stream stagger pushing relevant Activities to individual subreddits.
|
||||
*
|
||||
* Useful when running many subreddits and rules are potentially cpu/memory/traffic heavy -- allows spreading out load
|
||||
* */
|
||||
stagger?: number,
|
||||
},
|
||||
/**
|
||||
* Settings related to default configurations for queue behavior for subreddits
|
||||
@@ -1053,60 +1439,11 @@ export interface BotInstanceJsonConfig {
|
||||
}
|
||||
|
||||
/**
|
||||
* Settings to configure the default caching behavior for each suberddit
|
||||
* Settings to configure the default caching behavior for this bot
|
||||
*
|
||||
* Every setting not specified will default to what is specified by the global operator caching config
|
||||
* */
|
||||
caching?: {
|
||||
/**
|
||||
* Amount of time, in seconds, author activity history (Comments/Submission) should be cached
|
||||
*
|
||||
* * ENV => `AUTHOR_TTL`
|
||||
* * ARG => `--authorTTL <sec>`
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
authorTTL?: number;
|
||||
/**
|
||||
* Amount of time, in seconds, wiki content pages should be cached
|
||||
* @examples [300]
|
||||
* @default 300
|
||||
* */
|
||||
wikiTTL?: number;
|
||||
/**
|
||||
* Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
|
||||
* @examples [300]
|
||||
* @default 300
|
||||
* */
|
||||
userNotesTTL?: number;
|
||||
/**
|
||||
* Amount of time, in seconds, a submission should be cached
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
submissionTTL?: number;
|
||||
/**
|
||||
* Amount of time, in seconds, a comment should be cached
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
commentTTL?: number;
|
||||
/**
|
||||
* Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)
|
||||
*
|
||||
* This is especially useful if when polling high-volume comments and your checks rely on author/item filters
|
||||
*
|
||||
* @examples [60]
|
||||
* @default 60
|
||||
* */
|
||||
filterCriteriaTTL?: number;
|
||||
/**
|
||||
* The cache provider and, optionally, a custom configuration for that provider
|
||||
*
|
||||
* If not present or `null` provider will be `memory`.
|
||||
*
|
||||
* To specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`
|
||||
* */
|
||||
provider?: CacheProvider | CacheOptions
|
||||
}
|
||||
caching?: OperatorCacheConfig
|
||||
/**
|
||||
* Settings related to managing heavy API usage.
|
||||
* */
|
||||
@@ -1214,6 +1551,13 @@ export interface OperatorJsonConfig {
|
||||
path?: string,
|
||||
},
|
||||
|
||||
/**
|
||||
* Settings to configure the default caching behavior globally
|
||||
*
|
||||
* These settings will be used by each bot, and subreddit, that does not specify their own
|
||||
* */
|
||||
caching?: OperatorCacheConfig
|
||||
|
||||
bots?: BotInstanceJsonConfig[]
|
||||
|
||||
/**
|
||||
@@ -1230,23 +1574,30 @@ export interface OperatorJsonConfig {
|
||||
* @examples [8085]
|
||||
* */
|
||||
port?: number,
|
||||
|
||||
/**
|
||||
* Caching provider to use for session and invite data
|
||||
*
|
||||
* If none is provided the top-level caching provider is used
|
||||
* */
|
||||
caching?: 'memory' | 'redis' | CacheOptions
|
||||
/**
|
||||
* Settings to configure the behavior of user sessions -- the session is what the web interface uses to identify logged in users.
|
||||
* */
|
||||
session?: {
|
||||
/**
|
||||
* The cache provider to use.
|
||||
* Number of seconds a session should be valid for.
|
||||
*
|
||||
* The default should be sufficient for almost all use cases
|
||||
* Default is 1 day
|
||||
*
|
||||
* @default "memory"
|
||||
* @examples ["memory"]
|
||||
* @default 86400
|
||||
* @examples [86400]
|
||||
* */
|
||||
provider?: 'memory' | 'redis' | CacheOptions,
|
||||
maxAge?: number
|
||||
/**
|
||||
* The secret value used to encrypt session data
|
||||
*
|
||||
* If provider is persistent (redis) specifying a value here will ensure sessions are valid between application restarts
|
||||
* If provider is persistent (`redis`) specifying a value here will ensure sessions are valid between application restarts
|
||||
*
|
||||
* When not present or `null` a random string is generated on application start
|
||||
*
|
||||
@@ -1254,6 +1605,21 @@ export interface OperatorJsonConfig {
|
||||
* */
|
||||
secret?: string,
|
||||
}
|
||||
|
||||
/**
|
||||
* Settings related to oauth flow invites
|
||||
* */
|
||||
invites?: {
|
||||
/**
|
||||
* Number of seconds an invite should be valid for
|
||||
*
|
||||
* If `0` or not specified (default) invites do not expire
|
||||
*
|
||||
* @default 0
|
||||
* @examples [0]
|
||||
* */
|
||||
maxAge?: number
|
||||
}
|
||||
/**
|
||||
* The default log level to filter to in the web interface
|
||||
*
|
||||
@@ -1312,6 +1678,8 @@ export interface OperatorJsonConfig {
|
||||
* */
|
||||
friendly?: string,
|
||||
}
|
||||
|
||||
credentials?: ThirdPartyCredentialsJsonConfig
|
||||
}
|
||||
|
||||
export interface RequiredOperatorRedditCredentials extends RedditCredentials {
|
||||
@@ -1325,8 +1693,23 @@ export interface RequiredWebRedditCredentials extends RedditCredentials {
|
||||
redirectUri: string
|
||||
}
|
||||
|
||||
export interface ThirdPartyCredentialsJsonConfig {
|
||||
youtube?: {
|
||||
apiKey: string
|
||||
}
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
export interface BotCredentialsJsonConfig extends ThirdPartyCredentialsJsonConfig {
|
||||
reddit: RedditCredentials
|
||||
}
|
||||
|
||||
export interface BotCredentialsConfig extends ThirdPartyCredentialsJsonConfig {
|
||||
reddit: RequiredOperatorRedditCredentials
|
||||
}
|
||||
|
||||
export interface BotInstanceConfig extends BotInstanceJsonConfig {
|
||||
credentials: RequiredOperatorRedditCredentials
|
||||
credentials: BotCredentialsJsonConfig
|
||||
snoowrap: {
|
||||
proxy?: string,
|
||||
debug?: boolean,
|
||||
@@ -1340,6 +1723,7 @@ export interface BotInstanceConfig extends BotInstanceJsonConfig {
|
||||
},
|
||||
polling: {
|
||||
sharedMod: boolean,
|
||||
stagger?: number,
|
||||
limit: number,
|
||||
interval: number,
|
||||
},
|
||||
@@ -1364,12 +1748,17 @@ export interface OperatorConfig extends OperatorJsonConfig {
|
||||
level: LogLevel,
|
||||
path?: string,
|
||||
},
|
||||
caching: StrongCache,
|
||||
web: {
|
||||
port: number,
|
||||
caching: CacheOptions,
|
||||
session: {
|
||||
provider: CacheOptions,
|
||||
maxAge: number,
|
||||
secret: string,
|
||||
}
|
||||
},
|
||||
invites: {
|
||||
maxAge: number
|
||||
},
|
||||
logLevel?: LogLevel,
|
||||
maxLogs: number,
|
||||
clients: BotConnection[]
|
||||
@@ -1382,6 +1771,7 @@ export interface OperatorConfig extends OperatorJsonConfig {
|
||||
friendly?: string,
|
||||
}
|
||||
bots: BotInstanceConfig[]
|
||||
credentials: ThirdPartyCredentialsJsonConfig
|
||||
}
|
||||
|
||||
//export type OperatorConfig = Required<OperatorJsonConfig>;
|
||||
@@ -1410,3 +1800,157 @@ export interface LogInfo {
|
||||
labels?: string[]
|
||||
bot?: string
|
||||
}
|
||||
|
||||
export interface ActionResult {
|
||||
kind: string,
|
||||
name: string,
|
||||
run: boolean,
|
||||
runReason?: string,
|
||||
dryRun: boolean,
|
||||
success: boolean,
|
||||
result?: string,
|
||||
}
|
||||
|
||||
export interface ActionProcessResult {
|
||||
success: boolean,
|
||||
dryRun: boolean,
|
||||
result?: string
|
||||
}
|
||||
|
||||
export interface ActionedEvent {
|
||||
activity: {
|
||||
peek: string
|
||||
link: string
|
||||
}
|
||||
author: string
|
||||
timestamp: number
|
||||
check: string
|
||||
ruleSummary: string,
|
||||
subreddit: string,
|
||||
ruleResults: RuleResult[]
|
||||
actionResults: ActionResult[]
|
||||
}
|
||||
|
||||
export interface UserResultCache {
|
||||
result: boolean,
|
||||
ruleResults: RuleResult[]
|
||||
}
|
||||
|
||||
export type RedditEntityType = 'user' | 'subreddit';
|
||||
|
||||
export interface RedditEntity {
|
||||
name: string
|
||||
type: RedditEntityType
|
||||
}
|
||||
|
||||
export interface StatusCodeError extends Error {
|
||||
name: 'StatusCodeError',
|
||||
statusCode: number,
|
||||
message: string,
|
||||
response: IncomingMessage,
|
||||
error: Error
|
||||
}
|
||||
|
||||
export interface HistoricalStatsDisplay extends HistoricalStats {
|
||||
checksRunTotal: number
|
||||
checksFromCacheTotal: number
|
||||
checksTriggeredTotal: number
|
||||
rulesRunTotal: number
|
||||
rulesCachedTotal: number
|
||||
rulesTriggeredTotal: number
|
||||
actionsRunTotal: number
|
||||
}
|
||||
|
||||
export interface HistoricalStats {
|
||||
eventsCheckedTotal: number
|
||||
eventsActionedTotal: number
|
||||
checksRun: Map<string, number>
|
||||
checksFromCache: Map<string, number>
|
||||
checksTriggered: Map<string, number>
|
||||
rulesRun: Map<string, number>
|
||||
//rulesCached: Map<string, number>
|
||||
rulesCachedTotal: number
|
||||
rulesTriggered: Map<string, number>
|
||||
actionsRun: Map<string, number>
|
||||
[index: string]: any
|
||||
}
|
||||
|
||||
export interface SubredditHistoricalStats {
|
||||
allTime: HistoricalStats
|
||||
lastReload: HistoricalStats
|
||||
}
|
||||
|
||||
export interface SubredditHistoricalStatsDisplay {
|
||||
allTime: HistoricalStatsDisplay
|
||||
lastReload: HistoricalStatsDisplay
|
||||
}
|
||||
|
||||
export interface ManagerStats {
|
||||
// eventsCheckedTotal: number
|
||||
// eventsCheckedSinceStartTotal: number
|
||||
eventsAvg: number
|
||||
// checksRunTotal: number
|
||||
// checksRunSinceStartTotal: number
|
||||
// checksTriggered: number
|
||||
// checksTriggeredTotal: number
|
||||
// checksTriggeredSinceStart: number
|
||||
// checksTriggeredSinceStartTotal: number
|
||||
// rulesRunTotal: number
|
||||
// rulesRunSinceStartTotal: number
|
||||
// rulesCachedTotal: number
|
||||
// rulesCachedSinceStartTotal: number
|
||||
// rulesTriggeredTotal: number
|
||||
// rulesTriggeredSinceStartTotal: number
|
||||
rulesAvg: number
|
||||
// actionsRun: number
|
||||
// actionsRunTotal: number
|
||||
// actionsRunSinceStart: number,
|
||||
// actionsRunSinceStartTotal: number
|
||||
historical: SubredditHistoricalStatsDisplay
|
||||
cache: {
|
||||
provider: string,
|
||||
currentKeyCount: number,
|
||||
isShared: boolean,
|
||||
totalRequests: number,
|
||||
totalMiss: number,
|
||||
missPercent: string,
|
||||
requestRate: number,
|
||||
types: ResourceStats
|
||||
},
|
||||
}
|
||||
|
||||
export interface HistoricalStatUpdateData {
|
||||
eventsCheckedTotal?: number
|
||||
eventsActionedTotal?: number
|
||||
checksRun: string[] | string
|
||||
checksTriggered: string[] | string
|
||||
checksFromCache: string[] | string
|
||||
actionsRun: string[] | string
|
||||
rulesRun: string[] | string
|
||||
rulesCachedTotal: number
|
||||
rulesTriggered: string[] | string
|
||||
}
|
||||
|
||||
export type SearchFacetType = 'title' | 'url' | 'duplicates' | 'crossposts' | 'external';
|
||||
|
||||
export interface RepostItem {
|
||||
value: string
|
||||
createdOn?: number
|
||||
source: string
|
||||
sourceUrl?: string
|
||||
score?: number
|
||||
id: string
|
||||
itemType: string
|
||||
acquisitionType: SearchFacetType | 'comment'
|
||||
sourceObj?: any
|
||||
reqSameness?: number
|
||||
}
|
||||
|
||||
export interface RepostItemResult extends RepostItem {
|
||||
sameness: number
|
||||
}
|
||||
|
||||
export interface StringComparisonOptions {
|
||||
lengthWeight?: number,
|
||||
transforms?: ((str: string) => string)[]
|
||||
}
|
||||
|
||||
@@ -13,9 +13,17 @@ import {ApproveActionJson} from "../Action/ApproveAction";
|
||||
import {BanActionJson} from "../Action/BanAction";
|
||||
import {RegexRuleJSONConfig} from "../Rule/RegexRule";
|
||||
import {MessageActionJson} from "../Action/MessageAction";
|
||||
import {RepostRuleJSONConfig} from "../Rule/RepostRule";
|
||||
|
||||
export type RuleJson = RecentActivityRuleJSONConfig | RepeatActivityJSONConfig | AuthorRuleJSONConfig | AttributionJSONConfig | HistoryJSONConfig | RegexRuleJSONConfig | string;
|
||||
export type RuleJson = RecentActivityRuleJSONConfig | RepeatActivityJSONConfig | AuthorRuleJSONConfig | AttributionJSONConfig | HistoryJSONConfig | RegexRuleJSONConfig | RepostRuleJSONConfig | string;
|
||||
export type RuleObjectJson = Exclude<RuleJson, string>
|
||||
|
||||
export type ActionJson = CommentActionJson | FlairActionJson | ReportActionJson | LockActionJson | RemoveActionJson | ApproveActionJson | BanActionJson | UserNoteActionJson | MessageActionJson | string;
|
||||
export type ActionObjectJson = Exclude<ActionJson, string>;
|
||||
|
||||
// borrowed from https://github.com/jabacchetta/set-random-interval/blob/master/src/index.ts
|
||||
export type SetRandomInterval = (
|
||||
intervalFunction: () => void,
|
||||
minDelay: number,
|
||||
maxDelay: number,
|
||||
) => { clear: () => void };
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import {Logger} from "winston";
|
||||
import {
|
||||
buildCacheOptionsFromProvider,
|
||||
buildCacheOptionsFromProvider, buildCachePrefix,
|
||||
createAjvFactory,
|
||||
mergeArr,
|
||||
normalizeName,
|
||||
overwriteMerge,
|
||||
parseBool, randomId,
|
||||
readJson,
|
||||
readConfigFile,
|
||||
removeUndefinedKeys
|
||||
} from "./util";
|
||||
import {CommentCheck} from "./Check/CommentCheck";
|
||||
@@ -31,7 +31,7 @@ import {
|
||||
CacheOptions,
|
||||
BotInstanceJsonConfig,
|
||||
BotInstanceConfig,
|
||||
RequiredWebRedditCredentials
|
||||
RequiredWebRedditCredentials, RedditCredentials, BotCredentialsJsonConfig, BotCredentialsConfig
|
||||
} from "./Common/interfaces";
|
||||
import {isRuleSetJSON, RuleSetJson, RuleSetObjectJson} from "./Rule/RuleSet";
|
||||
import deepEqual from "fast-deep-equal";
|
||||
@@ -43,6 +43,7 @@ import {operatorConfig} from "./Utils/CommandConfig";
|
||||
import merge from 'deepmerge';
|
||||
import * as process from "process";
|
||||
import {cacheOptDefaults, cacheTTLDefaults} from "./Common/defaults";
|
||||
import objectHash from "object-hash";
|
||||
|
||||
export interface ConfigBuilderOptions {
|
||||
logger: Logger,
|
||||
@@ -141,15 +142,30 @@ export const buildPollingOptions = (values: (string | PollingOptions)[]): Pollin
|
||||
let opts: PollingOptionsStrong[] = [];
|
||||
for (const v of values) {
|
||||
if (typeof v === 'string') {
|
||||
opts.push({pollOn: v as PollOn, interval: DEFAULT_POLLING_INTERVAL, limit: DEFAULT_POLLING_LIMIT});
|
||||
opts.push({
|
||||
pollOn: v as PollOn,
|
||||
interval: DEFAULT_POLLING_INTERVAL,
|
||||
limit: DEFAULT_POLLING_LIMIT,
|
||||
clearProcessed: {
|
||||
size: DEFAULT_POLLING_LIMIT,
|
||||
retain: DEFAULT_POLLING_LIMIT,
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const {
|
||||
pollOn: p,
|
||||
interval = DEFAULT_POLLING_INTERVAL,
|
||||
limit = DEFAULT_POLLING_LIMIT,
|
||||
delayUntil,
|
||||
clearProcessed = {size: limit, retain: limit},
|
||||
} = v;
|
||||
opts.push({pollOn: p as PollOn, interval, limit, delayUntil});
|
||||
opts.push({
|
||||
pollOn: p as PollOn,
|
||||
interval,
|
||||
limit,
|
||||
delayUntil,
|
||||
clearProcessed
|
||||
});
|
||||
}
|
||||
}
|
||||
return opts;
|
||||
@@ -291,10 +307,6 @@ export const parseDefaultBotInstanceFromArgs = (args: any): BotInstanceJsonConfi
|
||||
polling: {
|
||||
sharedMod,
|
||||
},
|
||||
caching: {
|
||||
provider: caching,
|
||||
authorTTL
|
||||
},
|
||||
nanny: {
|
||||
softLimit,
|
||||
hardLimit
|
||||
@@ -316,6 +328,8 @@ export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
|
||||
sessionSecret,
|
||||
web,
|
||||
mode,
|
||||
caching,
|
||||
authorTTL,
|
||||
} = args || {};
|
||||
|
||||
const data = {
|
||||
@@ -328,6 +342,10 @@ export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
|
||||
level: logLevel,
|
||||
path: logDir === true ? `${process.cwd()}/logs` : undefined,
|
||||
},
|
||||
caching: {
|
||||
provider: caching,
|
||||
authorTTL
|
||||
},
|
||||
web: {
|
||||
enabled: web,
|
||||
port,
|
||||
@@ -345,9 +363,9 @@ export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
|
||||
return removeUndefinedKeys(data) as OperatorJsonConfig;
|
||||
}
|
||||
|
||||
const parseListFromEnv = (val: string|undefined) => {
|
||||
const parseListFromEnv = (val: string | undefined) => {
|
||||
let listVals: undefined | string[];
|
||||
if(val === undefined) {
|
||||
if (val === undefined) {
|
||||
return listVals;
|
||||
}
|
||||
const trimmedVal = val.trim();
|
||||
@@ -369,10 +387,13 @@ const parseListFromEnv = (val: string|undefined) => {
|
||||
export const parseDefaultBotInstanceFromEnv = (): BotInstanceJsonConfig => {
|
||||
const data = {
|
||||
credentials: {
|
||||
clientId: process.env.CLIENT_ID,
|
||||
clientSecret: process.env.CLIENT_SECRET,
|
||||
accessToken: process.env.ACCESS_TOKEN,
|
||||
refreshToken: process.env.REFRESH_TOKEN,
|
||||
reddit: {
|
||||
clientId: process.env.CLIENT_ID,
|
||||
clientSecret: process.env.CLIENT_SECRET,
|
||||
accessToken: process.env.ACCESS_TOKEN,
|
||||
refreshToken: process.env.REFRESH_TOKEN,
|
||||
},
|
||||
youtube: process.env.YOUTUBE_API_KEY
|
||||
},
|
||||
subreddits: {
|
||||
names: parseListFromEnv(process.env.SUBREDDITS),
|
||||
@@ -387,13 +408,6 @@ export const parseDefaultBotInstanceFromEnv = (): BotInstanceJsonConfig => {
|
||||
polling: {
|
||||
sharedMod: parseBool(process.env.SHARE_MOD),
|
||||
},
|
||||
caching: {
|
||||
provider: {
|
||||
// @ts-ignore
|
||||
store: process.env.CACHING as (CacheProvider | undefined)
|
||||
},
|
||||
authorTTL: process.env.AUTHOR_TTL !== undefined ? parseInt(process.env.AUTHOR_TTL) : undefined
|
||||
},
|
||||
nanny: {
|
||||
softLimit: process.env.SOFT_LIMIT !== undefined ? parseInt(process.env.SOFT_LIMIT) : undefined,
|
||||
hardLimit: process.env.HARD_LIMIT !== undefined ? parseInt(process.env.HARD_LIMIT) : undefined
|
||||
@@ -405,7 +419,7 @@ export const parseDefaultBotInstanceFromEnv = (): BotInstanceJsonConfig => {
|
||||
export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
|
||||
const data = {
|
||||
mode: process.env.MODE !== undefined ? process.env.MODE as ('all' | 'server' | 'client') : undefined,
|
||||
operator: {
|
||||
operator: {
|
||||
name: parseListFromEnv(process.env.OPERATOR),
|
||||
display: process.env.OPERATOR_DISPLAY
|
||||
},
|
||||
@@ -414,6 +428,13 @@ export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
|
||||
level: process.env.LOG_LEVEL,
|
||||
path: process.env.LOG_DIR === 'true' ? `${process.cwd()}/logs` : undefined,
|
||||
},
|
||||
caching: {
|
||||
provider: {
|
||||
// @ts-ignore
|
||||
store: process.env.CACHING as (CacheProvider | undefined)
|
||||
},
|
||||
authorTTL: process.env.AUTHOR_TTL !== undefined ? parseInt(process.env.AUTHOR_TTL) : undefined
|
||||
},
|
||||
web: {
|
||||
port: process.env.PORT !== undefined ? parseInt(process.env.PORT) : undefined,
|
||||
session: {
|
||||
@@ -425,6 +446,11 @@ export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
|
||||
clientSecret: process.env.CLIENT_SECRET,
|
||||
redirectUri: process.env.REDIRECT_URI,
|
||||
},
|
||||
},
|
||||
credentials: {
|
||||
youtube: {
|
||||
apiKey: process.env.YOUTUBE_API_KEY
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -458,7 +484,7 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
|
||||
process.env[k] = v;
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
let msg = 'No .env file found at default location (./env)';
|
||||
if (envPath !== undefined) {
|
||||
msg = `${msg} or OPERATOR_ENV path (${envPath})`;
|
||||
@@ -473,15 +499,15 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
|
||||
if (operatorConfig !== undefined) {
|
||||
let rawConfig;
|
||||
try {
|
||||
rawConfig = await readJson(operatorConfig, {log: initLogger});
|
||||
} catch (err) {
|
||||
rawConfig = await readConfigFile(operatorConfig, {log: initLogger}) as object;
|
||||
} catch (err: any) {
|
||||
initLogger.error('Cannot continue app startup because operator config file was not parseable.');
|
||||
err.logged = true;
|
||||
throw err;
|
||||
}
|
||||
try {
|
||||
configFromFile = validateJson(rawConfig, operatorSchema, initLogger) as OperatorJsonConfig;
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
initLogger.error('Cannot continue app startup because operator config file was not valid.');
|
||||
throw err;
|
||||
}
|
||||
@@ -493,21 +519,25 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
|
||||
const defaultBotInstanceFromEnv = parseDefaultBotInstanceFromEnv();
|
||||
const {bots: botInstancesFromFile = [], ...restConfigFile} = configFromFile;
|
||||
|
||||
const defaultBotInstance = merge.all([defaultBotInstanceFromEnv, defaultBotInstanceFromArgs], {
|
||||
const mergedConfig = merge.all([opConfigFromEnv, restConfigFile, opConfigFromArgs], {
|
||||
arrayMerge: overwriteMerge,
|
||||
});
|
||||
|
||||
const defaultBotInstance = merge.all([defaultBotInstanceFromEnv, defaultBotInstanceFromArgs], {
|
||||
arrayMerge: overwriteMerge,
|
||||
}) as BotInstanceJsonConfig;
|
||||
|
||||
if (configFromFile.caching !== undefined) {
|
||||
defaultBotInstance.caching = configFromFile.caching;
|
||||
}
|
||||
|
||||
let botInstances = [];
|
||||
if(botInstancesFromFile.length === 0) {
|
||||
if (botInstancesFromFile.length === 0) {
|
||||
botInstances = [defaultBotInstance];
|
||||
} else {
|
||||
botInstances = botInstancesFromFile.map(x => merge.all([defaultBotInstance, x], {arrayMerge: overwriteMerge}));
|
||||
}
|
||||
|
||||
const mergedConfig = merge.all([opConfigFromEnv, restConfigFile, opConfigFromArgs], {
|
||||
arrayMerge: overwriteMerge,
|
||||
});
|
||||
|
||||
return removeUndefinedKeys({...mergedConfig, bots: botInstances}) as OperatorJsonConfig;
|
||||
}
|
||||
|
||||
@@ -522,12 +552,17 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
level = 'verbose',
|
||||
path,
|
||||
} = {},
|
||||
caching: opCache,
|
||||
web: {
|
||||
port = 8085,
|
||||
maxLogs = 200,
|
||||
caching: webCaching = {},
|
||||
session: {
|
||||
secret = randomId(),
|
||||
provider: sessionProvider = { store: 'memory' },
|
||||
maxAge: sessionMaxAge = 86400,
|
||||
} = {},
|
||||
invites: {
|
||||
maxAge: inviteMaxAge = 0,
|
||||
} = {},
|
||||
clients,
|
||||
credentials: webCredentials,
|
||||
@@ -538,105 +573,204 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
secret: apiSecret = randomId(),
|
||||
friendly,
|
||||
} = {},
|
||||
credentials = {},
|
||||
bots = [],
|
||||
} = data;
|
||||
|
||||
let hydratedBots: BotInstanceConfig[] = bots.map(x => {
|
||||
const {
|
||||
polling: {
|
||||
sharedMod = false,
|
||||
limit = 100,
|
||||
interval = 30,
|
||||
} = {},
|
||||
queue: {
|
||||
maxWorkers = 1,
|
||||
} = {},
|
||||
caching,
|
||||
nanny: {
|
||||
softLimit = 250,
|
||||
hardLimit = 50
|
||||
} = {},
|
||||
snoowrap = {},
|
||||
credentials: {
|
||||
clientId: ci,
|
||||
clientSecret: cs,
|
||||
...restCred
|
||||
} = {},
|
||||
subreddits: {
|
||||
names = [],
|
||||
exclude = [],
|
||||
wikiConfig = 'botconfig/contextbot',
|
||||
dryRun,
|
||||
heartbeatInterval = 300,
|
||||
} = {},
|
||||
} = x;
|
||||
|
||||
|
||||
let cache: StrongCache;
|
||||
let defaultProvider: CacheOptions;
|
||||
let opActionedEventsMax: number | undefined;
|
||||
let opActionedEventsDefault: number = 25;
|
||||
|
||||
if(caching === undefined) {
|
||||
if (opCache === undefined) {
|
||||
defaultProvider = {
|
||||
store: 'memory',
|
||||
...cacheOptDefaults
|
||||
};
|
||||
cache = {
|
||||
...cacheTTLDefaults,
|
||||
provider: {
|
||||
store: 'memory',
|
||||
...cacheOptDefaults
|
||||
}
|
||||
provider: defaultProvider,
|
||||
actionedEventsDefault: opActionedEventsDefault,
|
||||
};
|
||||
|
||||
} else {
|
||||
const {provider, ...restConfig} = caching;
|
||||
const {provider, actionedEventsMax, actionedEventsDefault = opActionedEventsDefault, ...restConfig} = opCache;
|
||||
|
||||
if (actionedEventsMax !== undefined && actionedEventsMax !== null) {
|
||||
opActionedEventsMax = actionedEventsMax;
|
||||
opActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
|
||||
}
|
||||
|
||||
if (typeof provider === 'string') {
|
||||
cache = {
|
||||
defaultProvider = {
|
||||
store: provider as CacheProvider,
|
||||
...cacheOptDefaults
|
||||
};
|
||||
} else {
|
||||
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
|
||||
defaultProvider = {
|
||||
store,
|
||||
...cacheOptDefaults,
|
||||
...rest,
|
||||
};
|
||||
}
|
||||
cache = {
|
||||
...cacheTTLDefaults,
|
||||
...restConfig,
|
||||
actionedEventsMax: opActionedEventsMax,
|
||||
actionedEventsDefault: opActionedEventsDefault,
|
||||
provider: defaultProvider,
|
||||
}
|
||||
}
|
||||
|
||||
let hydratedBots: BotInstanceConfig[] = bots.map(x => {
|
||||
const {
|
||||
name: botName,
|
||||
polling: {
|
||||
sharedMod = false,
|
||||
stagger,
|
||||
limit = 100,
|
||||
interval = 30,
|
||||
} = {},
|
||||
queue: {
|
||||
maxWorkers = 1,
|
||||
} = {},
|
||||
caching,
|
||||
nanny: {
|
||||
softLimit = 250,
|
||||
hardLimit = 50
|
||||
} = {},
|
||||
snoowrap = {},
|
||||
credentials = {},
|
||||
subreddits: {
|
||||
names = [],
|
||||
exclude = [],
|
||||
wikiConfig = 'botconfig/contextbot',
|
||||
dryRun,
|
||||
heartbeatInterval = 300,
|
||||
} = {},
|
||||
} = x;
|
||||
|
||||
let botCache: StrongCache;
|
||||
let botActionedEventsDefault: number;
|
||||
|
||||
if (caching === undefined) {
|
||||
|
||||
botCache = {
|
||||
...cacheTTLDefaults,
|
||||
...restConfig,
|
||||
provider: {
|
||||
store: provider as CacheProvider,
|
||||
...cacheOptDefaults
|
||||
actionedEventsDefault: opActionedEventsDefault,
|
||||
actionedEventsMax: opActionedEventsMax,
|
||||
provider: {...defaultProvider}
|
||||
};
|
||||
} else {
|
||||
const {
|
||||
provider,
|
||||
actionedEventsMax = opActionedEventsMax,
|
||||
actionedEventsDefault = opActionedEventsDefault,
|
||||
...restConfig
|
||||
} = caching;
|
||||
|
||||
botActionedEventsDefault = actionedEventsDefault;
|
||||
if (actionedEventsMax !== undefined) {
|
||||
botActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
|
||||
}
|
||||
|
||||
if (typeof provider === 'string') {
|
||||
botCache = {
|
||||
...cacheTTLDefaults,
|
||||
...restConfig,
|
||||
actionedEventsDefault: botActionedEventsDefault,
|
||||
provider: {
|
||||
store: provider as CacheProvider,
|
||||
...cacheOptDefaults
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
|
||||
botCache = {
|
||||
...cacheTTLDefaults,
|
||||
...restConfig,
|
||||
actionedEventsDefault: botActionedEventsDefault,
|
||||
actionedEventsMax,
|
||||
provider: {
|
||||
store,
|
||||
...cacheOptDefaults,
|
||||
...rest,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let botCreds: BotCredentialsConfig;
|
||||
|
||||
if((credentials as any).clientId !== undefined) {
|
||||
const creds = credentials as RedditCredentials;
|
||||
const {
|
||||
clientId: ci,
|
||||
clientSecret: cs,
|
||||
...restCred
|
||||
} = creds;
|
||||
botCreds = {
|
||||
reddit: {
|
||||
clientId: (ci as string),
|
||||
clientSecret: (cs as string),
|
||||
...restCred,
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
|
||||
cache = {
|
||||
...cacheTTLDefaults,
|
||||
...restConfig,
|
||||
provider: {
|
||||
store,
|
||||
...cacheOptDefaults,
|
||||
...rest,
|
||||
const creds = credentials as BotCredentialsJsonConfig;
|
||||
const {
|
||||
reddit: {
|
||||
clientId: ci,
|
||||
clientSecret: cs,
|
||||
...restRedditCreds
|
||||
},
|
||||
...rest
|
||||
} = creds;
|
||||
botCreds = {
|
||||
reddit: {
|
||||
clientId: (ci as string),
|
||||
clientSecret: (cs as string),
|
||||
...restRedditCreds,
|
||||
},
|
||||
...rest
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
snoowrap,
|
||||
subreddits: {
|
||||
names,
|
||||
exclude,
|
||||
wikiConfig,
|
||||
heartbeatInterval,
|
||||
dryRun,
|
||||
},
|
||||
credentials: {
|
||||
clientId: (ci as string),
|
||||
clientSecret: (cs as string),
|
||||
...restCred,
|
||||
},
|
||||
caching: cache,
|
||||
polling: {
|
||||
sharedMod,
|
||||
limit,
|
||||
interval,
|
||||
},
|
||||
queue: {
|
||||
maxWorkers,
|
||||
},
|
||||
nanny: {
|
||||
softLimit,
|
||||
hardLimit
|
||||
if (botCache.provider.prefix === undefined || botCache.provider.prefix === defaultProvider.prefix) {
|
||||
// need to provide unique prefix to bot
|
||||
botCache.provider.prefix = buildCachePrefix([botCache.provider.prefix, 'bot', (botName || objectHash.sha1(botCreds))]);
|
||||
}
|
||||
|
||||
return {
|
||||
name: botName,
|
||||
snoowrap,
|
||||
subreddits: {
|
||||
names,
|
||||
exclude,
|
||||
wikiConfig,
|
||||
heartbeatInterval,
|
||||
dryRun,
|
||||
},
|
||||
credentials: botCreds,
|
||||
caching: botCache,
|
||||
polling: {
|
||||
sharedMod,
|
||||
stagger,
|
||||
limit,
|
||||
interval,
|
||||
},
|
||||
queue: {
|
||||
maxWorkers,
|
||||
},
|
||||
nanny: {
|
||||
softLimit,
|
||||
hardLimit
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
const defaultOperators = typeof name === 'string' ? [name] : name;
|
||||
|
||||
const config: OperatorConfig = {
|
||||
@@ -649,19 +783,19 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
level,
|
||||
path
|
||||
},
|
||||
caching: cache,
|
||||
web: {
|
||||
port,
|
||||
caching: {
|
||||
...defaultProvider,
|
||||
...webCaching
|
||||
},
|
||||
invites: {
|
||||
maxAge: inviteMaxAge,
|
||||
},
|
||||
session: {
|
||||
secret,
|
||||
provider: typeof sessionProvider === 'string' ? {
|
||||
...buildCacheOptionsFromProvider({
|
||||
ttl: 86400000,
|
||||
store: sessionProvider,
|
||||
})
|
||||
} : {
|
||||
...buildCacheOptionsFromProvider(sessionProvider),
|
||||
ttl: 86400000,
|
||||
},
|
||||
maxAge: sessionMaxAge,
|
||||
},
|
||||
maxLogs,
|
||||
clients: clients === undefined ? [{host: 'localhost:8095', secret: apiSecret}] : clients,
|
||||
@@ -674,6 +808,7 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
|
||||
friendly
|
||||
},
|
||||
bots: hydratedBots,
|
||||
credentials,
|
||||
};
|
||||
|
||||
return config;
|
||||
|
||||
@@ -1,19 +1,21 @@
|
||||
import {SubmissionRule, SubmissionRuleJSONConfig} from "./SubmissionRule";
|
||||
import {ActivityWindowType, DomainInfo, ReferenceSubmission} from "../Common/interfaces";
|
||||
import {ActivityWindowType, CommentState, DomainInfo, ReferenceSubmission, SubmissionState} from "../Common/interfaces";
|
||||
import {Rule, RuleOptions, RuleResult} from "./index";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {getAttributionIdentifier} from "../Utils/SnoowrapUtils";
|
||||
import dayjs from "dayjs";
|
||||
import {
|
||||
comparisonTextOp,
|
||||
asSubmission,
|
||||
comparisonTextOp, convertSubredditsRawToStrong,
|
||||
FAIL,
|
||||
formatNumber,
|
||||
formatNumber, getActivitySubredditName, isSubmission,
|
||||
parseGenericValueOrPercentComparison,
|
||||
parseSubredditName,
|
||||
PASS
|
||||
} from "../util";
|
||||
import { Comment } from "snoowrap/dist/objects";
|
||||
import SimpleError from "../Utils/SimpleError";
|
||||
import as from "async";
|
||||
|
||||
|
||||
export interface AttributionCriteria {
|
||||
@@ -52,8 +54,6 @@ export interface AttributionCriteria {
|
||||
/**
|
||||
* A list of domains whose Activities will be tested against `threshold`.
|
||||
*
|
||||
* If this is present then `aggregateOn` is ignored.
|
||||
*
|
||||
* The values are tested as partial strings so you do not need to include full URLs, just the part that matters.
|
||||
*
|
||||
* EX `["youtube"]` will match submissions with the domain `https://youtube.com/c/aChannel`
|
||||
@@ -77,38 +77,55 @@ export interface AttributionCriteria {
|
||||
domainsCombined?: boolean,
|
||||
|
||||
/**
|
||||
* Only include Activities from this list of Subreddits (by name, case-insensitive)
|
||||
* When present, Activities WILL ONLY be counted if they are found in this list of Subreddits
|
||||
*
|
||||
* Each value in the list can be either:
|
||||
*
|
||||
* EX `["mealtimevideos","askscience"]`
|
||||
* @examples ["mealtimevideos","askscience"]
|
||||
* @minItems 1
|
||||
* * string (name of subreddit)
|
||||
* * regular expression to run on the subreddit name
|
||||
* * `SubredditState`
|
||||
*
|
||||
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
|
||||
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
|
||||
* */
|
||||
include?: string[],
|
||||
/**
|
||||
* Do not include Activities from this list of Subreddits (by name, case-insensitive)
|
||||
* When present, Activities WILL NOT be counted if they are found in this list of Subreddits
|
||||
*
|
||||
* Will be ignored if `include` is present.
|
||||
* Each value in the list can be either:
|
||||
*
|
||||
* EX `["mealtimevideos","askscience"]`
|
||||
* @examples ["mealtimevideos","askscience"]
|
||||
* @minItems 1
|
||||
* * string (name of subreddit)
|
||||
* * regular expression to run on the subreddit name
|
||||
* * `SubredditState`
|
||||
*
|
||||
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
|
||||
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
|
||||
* */
|
||||
exclude?: string[],
|
||||
|
||||
/**
|
||||
* If `domains` is not specified this list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`
|
||||
* When present, Submissions from `window` will only be counted if they meet this criteria
|
||||
* */
|
||||
submissionState?: SubmissionState
|
||||
/**
|
||||
* When present, Comments from `window` will only be counted if they meet this criteria
|
||||
* */
|
||||
commentState?: CommentState
|
||||
|
||||
/**
|
||||
* This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`
|
||||
*
|
||||
* * If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)
|
||||
* * If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)
|
||||
* * If `link` is included then aggregate author's submission history which is external links but not media
|
||||
* * If `redditMedia` is included then aggregate on author's submissions history which are media hosted on reddit: galleries, videos, and images (i.redd.it / v.redd.it)
|
||||
* * If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or domain is `reddit.com`
|
||||
* * If `link` is included then aggregate author's submission history which is external links and not recognized as `media` by reddit
|
||||
*
|
||||
* If nothing is specified or list is empty (default) all domains are aggregated
|
||||
* If nothing is specified or list is empty (default) rule will only aggregate on `link` and `media` (ignores reddit-hosted content and self-posts)
|
||||
*
|
||||
* @default undefined
|
||||
* @examples [[]]
|
||||
* */
|
||||
aggregateOn?: ('media' | 'self' | 'link')[],
|
||||
aggregateOn?: ('media' | 'redditMedia' | 'self' | 'link')[],
|
||||
|
||||
/**
|
||||
* Should the criteria consolidate recognized media domains into the parent domain?
|
||||
@@ -174,25 +191,40 @@ export class AttributionRule extends Rule {
|
||||
window,
|
||||
thresholdOn = 'all',
|
||||
minActivityCount = 10,
|
||||
aggregateOn = [],
|
||||
aggregateOn = ['link','media'],
|
||||
consolidateMediaDomains = false,
|
||||
domains = [],
|
||||
domainsCombined = false,
|
||||
include: includeRaw = [],
|
||||
exclude: excludeRaw = [],
|
||||
include = [],
|
||||
exclude = [],
|
||||
commentState,
|
||||
submissionState,
|
||||
} = criteria;
|
||||
|
||||
const include = includeRaw.map(x => parseSubredditName(x).toLowerCase());
|
||||
const exclude = excludeRaw.map(x => parseSubredditName(x).toLowerCase());
|
||||
|
||||
const {operator, value, isPercent, extra = ''} = parseGenericValueOrPercentComparison(threshold);
|
||||
|
||||
let activities = thresholdOn === 'submissions' ? await this.resources.getAuthorSubmissions(item.author, {window: window}) : await this.resources.getAuthorActivities(item.author, {window: window});
|
||||
activities = activities.filter(act => {
|
||||
if (include.length > 0) {
|
||||
return include.some(x => x === act.subreddit.display_name.toLowerCase());
|
||||
} else if (exclude.length > 0) {
|
||||
return !exclude.some(x => x === act.subreddit.display_name.toLowerCase())
|
||||
|
||||
if(include.length > 0 || exclude.length > 0) {
|
||||
const defaultOpts = {
|
||||
defaultFlags: 'i',
|
||||
generateDescription: true
|
||||
};
|
||||
if(include.length > 0) {
|
||||
const subStates = include.map(x => convertSubredditsRawToStrong(x, defaultOpts));
|
||||
activities = await this.resources.batchTestSubredditCriteria(activities, subStates);
|
||||
} else {
|
||||
const subStates = exclude.map(x => convertSubredditsRawToStrong(x, defaultOpts));
|
||||
const toExclude = (await this.resources.batchTestSubredditCriteria(activities, subStates)).map(x => x.id);
|
||||
activities = activities.filter(x => !toExclude.includes(x.id));
|
||||
}
|
||||
}
|
||||
|
||||
activities = await as.filter(activities, async (activity) => {
|
||||
if (asSubmission(activity) && submissionState !== undefined) {
|
||||
return await this.resources.testItemCriteria(activity, [submissionState]);
|
||||
} else if (commentState !== undefined) {
|
||||
return await this.resources.testItemCriteria(activity, [commentState]);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
@@ -219,7 +251,7 @@ export class AttributionRule extends Rule {
|
||||
|
||||
const realDomains: DomainInfo[] = domains.map(x => {
|
||||
if(x === SUBMISSION_DOMAIN) {
|
||||
if(!(item instanceof Submission)) {
|
||||
if(!(asSubmission(item))) {
|
||||
throw new SimpleError('Cannot run Attribution Rule with the domain SELF:AGG on a Comment');
|
||||
}
|
||||
return getAttributionIdentifier(item, consolidateMediaDomains);
|
||||
@@ -228,21 +260,28 @@ export class AttributionRule extends Rule {
|
||||
});
|
||||
const realDomainIdents = realDomains.map(x => x.aliases).flat(1).map(x => x.toLowerCase());
|
||||
|
||||
const submissions: Submission[] = thresholdOn === 'submissions' ? activities as Submission[] : activities.filter(x => x instanceof Submission) as Submission[];
|
||||
const submissions: Submission[] = thresholdOn === 'submissions' ? activities as Submission[] : activities.filter(x => isSubmission(x)) as Submission[];
|
||||
const aggregatedSubmissions = submissions.reduce((acc: Map<string, DomainAgg>, sub) => {
|
||||
const domainInfo = getAttributionIdentifier(sub, consolidateMediaDomains)
|
||||
|
||||
let domainType = 'link';
|
||||
if(sub.secure_media !== undefined && sub.secure_media !== null) {
|
||||
domainType = 'media';
|
||||
} else if((sub.is_self || sub.is_video || sub.domain === 'i.redd.it')) {
|
||||
if(sub.is_video || ['i.redd.it','v.redd.it'].includes(sub.domain)
|
||||
// @ts-ignore
|
||||
|| sub.gallery_data !== undefined) {
|
||||
domainType = 'redditMedia';
|
||||
} else if(sub.is_self || sub.domain === 'reddit.com') {
|
||||
domainType = 'self';
|
||||
} else if(sub.secure_media !== undefined && sub.secure_media !== null) {
|
||||
domainType = 'media';
|
||||
}
|
||||
|
||||
if(realDomains.length === 0 && aggregateOn.length !== 0) {
|
||||
if(aggregateOn.length !== 0) {
|
||||
if(domainType === 'media' && !aggregateOn.includes('media')) {
|
||||
return acc;
|
||||
}
|
||||
if(domainType === 'redditMedia' && !aggregateOn.includes('redditMedia')) {
|
||||
return acc;
|
||||
}
|
||||
if(domainType === 'self' && !aggregateOn.includes('self')) {
|
||||
return acc;
|
||||
}
|
||||
@@ -385,7 +424,7 @@ export class AttributionRule extends Rule {
|
||||
|
||||
}
|
||||
|
||||
interface AttributionConfig extends ReferenceSubmission {
|
||||
interface AttributionConfig {
|
||||
|
||||
/**
|
||||
* A list threshold-window values to test attribution against
|
||||
|
||||
@@ -12,11 +12,11 @@ export interface AuthorRuleConfig {
|
||||
/**
|
||||
* Will "pass" if any set of AuthorCriteria passes
|
||||
* */
|
||||
include: AuthorCriteria[];
|
||||
include?: AuthorCriteria[];
|
||||
/**
|
||||
* Only runs if include is not present. Will "pass" if any of set of the AuthorCriteria does not pass
|
||||
* */
|
||||
exclude: AuthorCriteria[];
|
||||
exclude?: AuthorCriteria[];
|
||||
}
|
||||
|
||||
export interface AuthorRuleOptions extends AuthorRuleConfig, RuleOptions {
|
||||
@@ -34,8 +34,13 @@ export class AuthorRule extends Rule {
|
||||
constructor(options: AuthorRuleOptions) {
|
||||
super(options);
|
||||
|
||||
this.include = options.include.map(x => new Author(x));
|
||||
this.exclude = options.exclude.map(x => new Author(x));
|
||||
const {
|
||||
include,
|
||||
exclude,
|
||||
} = options;
|
||||
|
||||
this.include = include !== undefined ? include.map(x => new Author(x)) : [];
|
||||
this.exclude = exclude !== undefined ? exclude.map(x => new Author(x)) : [];
|
||||
|
||||
if(this.include.length === 0 && this.exclude.length === 0) {
|
||||
throw new Error('At least one of the properties [include,exclude] on Author Rule must not be empty');
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
|
||||
import {ActivityWindowType, CompareValueOrPercent, ThresholdCriteria} from "../Common/interfaces";
|
||||
import {ActivityWindowType, CompareValueOrPercent, SubredditState, ThresholdCriteria} from "../Common/interfaces";
|
||||
import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {getAuthorActivities} from "../Utils/SnoowrapUtils";
|
||||
import dayjs from "dayjs";
|
||||
import {
|
||||
asSubmission,
|
||||
comparisonTextOp,
|
||||
FAIL,
|
||||
formatNumber,
|
||||
formatNumber, getActivitySubredditName, isSubmission,
|
||||
parseGenericValueOrPercentComparison, parseSubredditName,
|
||||
PASS,
|
||||
percentFromString
|
||||
percentFromString, toStrongSubredditState
|
||||
} from "../util";
|
||||
import {Comment} from "snoowrap";
|
||||
|
||||
export interface CommentThresholdCriteria extends ThresholdCriteria {
|
||||
/**
|
||||
@@ -23,42 +25,56 @@ export interface CommentThresholdCriteria extends ThresholdCriteria {
|
||||
asOp?: boolean
|
||||
}
|
||||
/**
|
||||
* If both `submission` and `comment` are defined then criteria will only trigger if BOTH thresholds are met
|
||||
* Criteria will only trigger if ALL present thresholds (comment, submission, total) are met
|
||||
* */
|
||||
export interface HistoryCriteria {
|
||||
|
||||
/**
|
||||
* A string containing a comparison operator and a value to compare submissions against
|
||||
* A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) submissions against
|
||||
*
|
||||
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign]`
|
||||
*
|
||||
* * EX `> 100` => greater than 100 submissions
|
||||
* * EX `<= 75%` => submissions are equal to or less than 75% of all Activities
|
||||
* * EX `> 100` => greater than 100 filtered submissions
|
||||
* * EX `<= 75%` => filtered submissions are equal to or less than 75% of unfiltered Activities
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
|
||||
* */
|
||||
submission?: CompareValueOrPercent
|
||||
/**
|
||||
* A string containing a comparison operator and a value to compare comments against
|
||||
* A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) comments against
|
||||
*
|
||||
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`
|
||||
*
|
||||
* * EX `> 100` => greater than 100 comments
|
||||
* * EX `<= 75%` => comments are equal to or less than 75% of all Activities
|
||||
* * EX `<= 75%` => comments are equal to or less than 75% of unfiltered Activities
|
||||
*
|
||||
* If your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:
|
||||
*
|
||||
* * EX `> 100 OP` => greater than 100 comments as OP
|
||||
* * EX `<= 25% as OP` => Comments as OP were less then or equal to 25% of **all Comments**
|
||||
* * EX `> 100 OP` => greater than 100 filtered comments as OP
|
||||
* * EX `<= 25% as OP` => **Filtered** comments as OP were less then or equal to 25% of **unfiltered Comments**
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
|
||||
* */
|
||||
comment?: CompareValueOrPercent
|
||||
|
||||
/**
|
||||
* A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`) activities against
|
||||
*
|
||||
* **Note:** This is only useful if using `include` or `exclude` otherwise percent will always be 100% and total === activityTotal
|
||||
*
|
||||
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`
|
||||
*
|
||||
* * EX `> 100` => greater than 100 filtered activities
|
||||
* * EX `<= 75%` => filtered activities are equal to or less than 75% of all Activities
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
|
||||
* */
|
||||
total?: CompareValueOrPercent
|
||||
|
||||
window: ActivityWindowType
|
||||
|
||||
/**
|
||||
* The minimum number of activities that must exist from the `window` results for this criteria to run
|
||||
* The minimum number of **filtered** activities that must exist from the `window` results for this criteria to run
|
||||
* @default 5
|
||||
* */
|
||||
minActivityCount?: number
|
||||
@@ -68,8 +84,9 @@ export interface HistoryCriteria {
|
||||
export class HistoryRule extends Rule {
|
||||
criteria: HistoryCriteria[];
|
||||
condition: 'AND' | 'OR';
|
||||
include: string[];
|
||||
exclude: string[];
|
||||
include: (string | SubredditState)[];
|
||||
exclude: (string | SubredditState)[];
|
||||
activityFilterFunc: (x: Submission|Comment) => Promise<boolean> = async (x) => true;
|
||||
|
||||
constructor(options: HistoryOptions) {
|
||||
super(options);
|
||||
@@ -85,8 +102,41 @@ export class HistoryRule extends Rule {
|
||||
if (this.criteria.length === 0) {
|
||||
throw new Error('Must provide at least one HistoryCriteria');
|
||||
}
|
||||
this.include = include.map(x => parseSubredditName(x).toLowerCase());
|
||||
this.exclude = exclude.map(x => parseSubredditName(x).toLowerCase());
|
||||
|
||||
this.include = include;
|
||||
this.exclude = exclude;
|
||||
|
||||
if(this.include.length > 0) {
|
||||
const subStates = include.map((x) => {
|
||||
if(typeof x === 'string') {
|
||||
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
|
||||
}
|
||||
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
|
||||
});
|
||||
this.activityFilterFunc = async (x: Submission|Comment) => {
|
||||
for(const ss of subStates) {
|
||||
if(await this.resources.testSubredditCriteria(x, ss)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
} else if(this.exclude.length > 0) {
|
||||
const subStates = exclude.map((x) => {
|
||||
if(typeof x === 'string') {
|
||||
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
|
||||
}
|
||||
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
|
||||
});
|
||||
this.activityFilterFunc = async (x: Submission|Comment) => {
|
||||
for(const ss of subStates) {
|
||||
if(await this.resources.testSubredditCriteria(x, ss)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
getKind(): string {
|
||||
@@ -107,25 +157,23 @@ export class HistoryRule extends Rule {
|
||||
|
||||
for (const criteria of this.criteria) {
|
||||
|
||||
const {comment, window, submission, minActivityCount = 5} = criteria;
|
||||
const {comment, window, submission, total, minActivityCount = 5} = criteria;
|
||||
|
||||
let activities = await this.resources.getAuthorActivities(item.author, {window: window});
|
||||
activities = activities.filter(act => {
|
||||
if (this.include.length > 0) {
|
||||
return this.include.some(x => x === act.subreddit.display_name.toLowerCase());
|
||||
} else if (this.exclude.length > 0) {
|
||||
return !this.exclude.some(x => x === act.subreddit.display_name.toLowerCase())
|
||||
const filteredActivities = [];
|
||||
for(const a of activities) {
|
||||
if(await this.activityFilterFunc(a)) {
|
||||
filteredActivities.push(a);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
if (activities.length < minActivityCount) {
|
||||
if (filteredActivities.length < minActivityCount) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const activityTotal = activities.length;
|
||||
const {submissionTotal, commentTotal, opTotal} = activities.reduce((acc, act) => {
|
||||
if(act instanceof Submission) {
|
||||
if(asSubmission(act)) {
|
||||
return {...acc, submissionTotal: acc.submissionTotal + 1};
|
||||
}
|
||||
let a = {...acc, commentTotal: acc.commentTotal + 1};
|
||||
@@ -134,6 +182,24 @@ export class HistoryRule extends Rule {
|
||||
}
|
||||
return a;
|
||||
},{submissionTotal: 0, commentTotal: 0, opTotal: 0});
|
||||
let fSubmissionTotal = submissionTotal;
|
||||
let fCommentTotal = commentTotal;
|
||||
let fOpTotal = opTotal;
|
||||
if(activities.length !== filteredActivities.length) {
|
||||
const filteredCounts = filteredActivities.reduce((acc, act) => {
|
||||
if(asSubmission(act)) {
|
||||
return {...acc, submissionTotal: acc.submissionTotal + 1};
|
||||
}
|
||||
let a = {...acc, commentTotal: acc.commentTotal + 1};
|
||||
if(act.is_submitter) {
|
||||
a.opTotal = a.opTotal + 1;
|
||||
}
|
||||
return a;
|
||||
},{submissionTotal: 0, commentTotal: 0, opTotal: 0});
|
||||
fSubmissionTotal = filteredCounts.submissionTotal;
|
||||
fCommentTotal = filteredCounts.commentTotal;
|
||||
fOpTotal = filteredCounts.opTotal;
|
||||
}
|
||||
|
||||
let commentTrigger = undefined;
|
||||
if(comment !== undefined) {
|
||||
@@ -142,15 +208,15 @@ export class HistoryRule extends Rule {
|
||||
if(isPercent) {
|
||||
const per = value / 100;
|
||||
if(asOp) {
|
||||
commentTrigger = comparisonTextOp(opTotal / commentTotal, operator, per);
|
||||
commentTrigger = comparisonTextOp(fOpTotal / commentTotal, operator, per);
|
||||
} else {
|
||||
commentTrigger = comparisonTextOp(commentTotal / activityTotal, operator, per);
|
||||
commentTrigger = comparisonTextOp(fCommentTotal / activityTotal, operator, per);
|
||||
}
|
||||
} else {
|
||||
if(asOp) {
|
||||
commentTrigger = comparisonTextOp(opTotal, operator, value);
|
||||
commentTrigger = comparisonTextOp(fOpTotal, operator, value);
|
||||
} else {
|
||||
commentTrigger = comparisonTextOp(commentTotal, operator, value);
|
||||
commentTrigger = comparisonTextOp(fCommentTotal, operator, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -160,27 +226,40 @@ export class HistoryRule extends Rule {
|
||||
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(submission);
|
||||
if(isPercent) {
|
||||
const per = value / 100;
|
||||
submissionTrigger = comparisonTextOp(submissionTotal / activityTotal, operator, per);
|
||||
submissionTrigger = comparisonTextOp(fSubmissionTotal / activityTotal, operator, per);
|
||||
} else {
|
||||
submissionTrigger = comparisonTextOp(submissionTotal, operator, value);
|
||||
submissionTrigger = comparisonTextOp(fSubmissionTotal, operator, value);
|
||||
}
|
||||
}
|
||||
|
||||
let totalTrigger = undefined;
|
||||
if(total !== undefined) {
|
||||
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(total);
|
||||
if(isPercent) {
|
||||
const per = value / 100;
|
||||
totalTrigger = comparisonTextOp(filteredActivities.length / activityTotal, operator, per);
|
||||
} else {
|
||||
totalTrigger = comparisonTextOp(filteredActivities.length, operator, value);
|
||||
}
|
||||
}
|
||||
|
||||
const firstActivity = activities[0];
|
||||
const lastActivity = activities[activities.length - 1];
|
||||
|
||||
const activityTotalWindow = dayjs.duration(dayjs(firstActivity.created_utc * 1000).diff(dayjs(lastActivity.created_utc * 1000)));
|
||||
const activityTotalWindow = activities.length === 0 ? dayjs.duration(0, 's') : dayjs.duration(dayjs(firstActivity.created_utc * 1000).diff(dayjs(lastActivity.created_utc * 1000)));
|
||||
|
||||
criteriaResults.push({
|
||||
criteria,
|
||||
activityTotal,
|
||||
activityTotalWindow,
|
||||
submissionTotal,
|
||||
commentTotal,
|
||||
opTotal,
|
||||
submissionTotal: fSubmissionTotal,
|
||||
commentTotal: fCommentTotal,
|
||||
opTotal: fOpTotal,
|
||||
filteredTotal: filteredActivities.length,
|
||||
submissionTrigger,
|
||||
commentTrigger,
|
||||
triggered: (submissionTrigger === undefined || submissionTrigger === true) && (commentTrigger === undefined || commentTrigger === true)
|
||||
totalTrigger,
|
||||
triggered: (submissionTrigger === undefined || submissionTrigger === true) && (commentTrigger === undefined || commentTrigger === true) && (totalTrigger === undefined || totalTrigger === true)
|
||||
});
|
||||
}
|
||||
|
||||
@@ -223,36 +302,50 @@ export class HistoryRule extends Rule {
|
||||
activityTotalWindow,
|
||||
submissionTotal,
|
||||
commentTotal,
|
||||
filteredTotal,
|
||||
opTotal,
|
||||
criteria: {
|
||||
comment,
|
||||
submission,
|
||||
total,
|
||||
window,
|
||||
},
|
||||
criteria,
|
||||
triggered,
|
||||
submissionTrigger,
|
||||
commentTrigger,
|
||||
totalTrigger,
|
||||
} = results;
|
||||
|
||||
const data: any = {
|
||||
activityTotal,
|
||||
submissionTotal,
|
||||
commentTotal,
|
||||
filteredTotal,
|
||||
opTotal,
|
||||
commentPercent: formatNumber((commentTotal/activityTotal)*100),
|
||||
submissionPercent: formatNumber((submissionTotal/activityTotal)*100),
|
||||
opPercent: formatNumber((opTotal/commentTotal)*100),
|
||||
filteredPercent: formatNumber((filteredTotal/activityTotal)*100),
|
||||
criteria,
|
||||
window: typeof window === 'number' ? `${activityTotal} Items` : activityTotalWindow.humanize(true),
|
||||
window: typeof window === 'number' || activityTotal === 0 ? `${activityTotal} Items` : activityTotalWindow.humanize(true),
|
||||
triggered,
|
||||
submissionTrigger,
|
||||
commentTrigger,
|
||||
totalTrigger,
|
||||
};
|
||||
|
||||
let thresholdSummary = [];
|
||||
let totalSummary;
|
||||
let submissionSummary;
|
||||
let commentSummary;
|
||||
if(total !== undefined) {
|
||||
const {operator, value, isPercent, displayText} = parseGenericValueOrPercentComparison(total);
|
||||
const suffix = !isPercent ? 'Items' : `(${formatNumber((filteredTotal/activityTotal)*100)}%) of ${activityTotal} Total`;
|
||||
totalSummary = `${includePassFailSymbols ? `${submissionTrigger ? PASS : FAIL} ` : ''}Filtered Activities (${filteredTotal}) were${totalTrigger ? '' : ' not'} ${displayText} ${suffix}`;
|
||||
data.totalSummary = totalSummary;
|
||||
thresholdSummary.push(totalSummary);
|
||||
}
|
||||
if(submission !== undefined) {
|
||||
const {operator, value, isPercent, displayText} = parseGenericValueOrPercentComparison(submission);
|
||||
const suffix = !isPercent ? 'Items' : `(${formatNumber((submissionTotal/activityTotal)*100)}%) of ${activityTotal} Total`;
|
||||
@@ -298,21 +391,45 @@ interface HistoryConfig {
|
||||
condition?: 'AND' | 'OR'
|
||||
|
||||
/**
|
||||
* Only include Submissions from this list of Subreddits (by name, case-insensitive)
|
||||
* If present, activities will be counted only if they are found in this list of Subreddits.
|
||||
*
|
||||
* EX `["mealtimevideos","askscience"]`
|
||||
* @examples ["mealtimevideos","askscience"]
|
||||
* @minItems 1
|
||||
* Each value in the list can be either:
|
||||
*
|
||||
* * string (name of subreddit)
|
||||
* * regular expression to run on the subreddit name
|
||||
* * `SubredditState`
|
||||
*
|
||||
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
|
||||
*
|
||||
* **Note:** This affects **post-window retrieval** activities. So that:
|
||||
*
|
||||
* * `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering
|
||||
* * all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**
|
||||
* * -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`
|
||||
*
|
||||
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
|
||||
* */
|
||||
include?: string[],
|
||||
include?: (string | SubredditState)[],
|
||||
/**
|
||||
* Do not include Submissions from this list of Subreddits (by name, case-insensitive)
|
||||
* If present, activities will be counted only if they are **NOT** found in this list of Subreddits
|
||||
*
|
||||
* EX `["mealtimevideos","askscience"]`
|
||||
* @examples ["mealtimevideos","askscience"]
|
||||
* @minItems 1
|
||||
* Each value in the list can be either:
|
||||
*
|
||||
* * string (name of subreddit)
|
||||
* * regular expression to run on the subreddit name
|
||||
* * `SubredditState`
|
||||
*
|
||||
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
|
||||
*
|
||||
* **Note:** This affects **post-window retrieval** activities. So that:
|
||||
*
|
||||
* * `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering
|
||||
* * all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**
|
||||
* * -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`
|
||||
*
|
||||
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
|
||||
* */
|
||||
exclude?: string[],
|
||||
exclude?: (string | SubredditState)[],
|
||||
}
|
||||
|
||||
export interface HistoryOptions extends HistoryConfig, RuleOptions {
|
||||
|
||||
@@ -1,27 +1,50 @@
|
||||
import {Rule, RuleJSONConfig, RuleOptions, RulePremise, RuleResult} from "./index";
|
||||
import {Comment, VoteableContent} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import as from 'async';
|
||||
import pMap from 'p-map';
|
||||
// @ts-ignore
|
||||
import subImageMatch from 'matches-subimage';
|
||||
import {
|
||||
activityWindowText,
|
||||
comparisonTextOp, FAIL, formatNumber,
|
||||
parseGenericValueOrPercentComparison, parseSubredditName,
|
||||
asSubmission, bitsToHexLength,
|
||||
// blockHashImage,
|
||||
compareImages,
|
||||
comparisonTextOp, convertSubredditsRawToStrong,
|
||||
FAIL,
|
||||
formatNumber,
|
||||
getActivitySubredditName, imageCompareMaxConcurrencyGuess,
|
||||
//getImageDataFromUrl,
|
||||
isSubmission,
|
||||
isValidImageURL,
|
||||
objectToStringSummary,
|
||||
parseGenericValueOrPercentComparison,
|
||||
parseStringToRegex,
|
||||
parseSubredditName,
|
||||
parseUsableLinkIdentifier,
|
||||
PASS
|
||||
PASS, sleep,
|
||||
toStrongSubredditState
|
||||
} from "../util";
|
||||
import {
|
||||
ActivityWindow,
|
||||
ActivityWindowCriteria,
|
||||
ActivityWindowType,
|
||||
ReferenceSubmission,
|
||||
SubredditCriteria
|
||||
ActivityWindowType, CommentState,
|
||||
//ImageData,
|
||||
ImageDetection,
|
||||
ReferenceSubmission, StrongImageDetection, StrongSubredditState, SubmissionState,
|
||||
SubredditCriteria, SubredditState
|
||||
} from "../Common/interfaces";
|
||||
import ImageData from "../Common/ImageData";
|
||||
import {blockhash, hammingDistance} from "../Common/blockhash/blockhash";
|
||||
import leven from "leven";
|
||||
|
||||
const parseLink = parseUsableLinkIdentifier();
|
||||
|
||||
export class RecentActivityRule extends Rule {
|
||||
window: ActivityWindowType;
|
||||
thresholds: SubThreshold[];
|
||||
thresholds: ActivityThreshold[];
|
||||
useSubmissionAsReference: boolean;
|
||||
imageDetection: StrongImageDetection
|
||||
lookAt?: 'comments' | 'submissions';
|
||||
|
||||
constructor(options: RecentActivityRuleOptions) {
|
||||
@@ -29,8 +52,47 @@ export class RecentActivityRule extends Rule {
|
||||
const {
|
||||
window = 15,
|
||||
useSubmissionAsReference = true,
|
||||
imageDetection,
|
||||
lookAt,
|
||||
} = options || {};
|
||||
|
||||
const {
|
||||
enable = false,
|
||||
fetchBehavior = 'extension',
|
||||
threshold = 5,
|
||||
hash = {},
|
||||
pixel = {},
|
||||
} = imageDetection || {};
|
||||
|
||||
const {
|
||||
enable: hEnable = true,
|
||||
bits = 16,
|
||||
ttl = 60,
|
||||
hardThreshold = threshold,
|
||||
softThreshold
|
||||
} = hash || {};
|
||||
|
||||
const {
|
||||
enable: pEnable = true,
|
||||
threshold: pThreshold = threshold,
|
||||
} = pixel || {};
|
||||
|
||||
this.imageDetection = {
|
||||
enable,
|
||||
fetchBehavior,
|
||||
threshold,
|
||||
hash: {
|
||||
enable: hEnable,
|
||||
hardThreshold,
|
||||
softThreshold,
|
||||
bits,
|
||||
ttl,
|
||||
},
|
||||
pixel: {
|
||||
enable: pEnable,
|
||||
threshold: pThreshold
|
||||
}
|
||||
};
|
||||
this.lookAt = lookAt;
|
||||
this.useSubmissionAsReference = useSubmissionAsReference;
|
||||
this.window = window;
|
||||
@@ -67,46 +129,226 @@ export class RecentActivityRule extends Rule {
|
||||
|
||||
let viableActivity = activities;
|
||||
if (this.useSubmissionAsReference) {
|
||||
if (!(item instanceof Submission)) {
|
||||
if (!asSubmission(item)) {
|
||||
this.logger.warn('Cannot use post as reference because triggered item is not a Submission');
|
||||
} else if (item.is_self) {
|
||||
this.logger.warn('Cannot use post as reference because triggered Submission is not a link type');
|
||||
} else {
|
||||
const usableUrl = parseLink(await item.url);
|
||||
viableActivity = viableActivity.filter((x) => {
|
||||
if (!(x instanceof Submission)) {
|
||||
return false;
|
||||
const itemId = item.id;
|
||||
const referenceUrl = await item.url;
|
||||
const usableUrl = parseLink(referenceUrl);
|
||||
let filteredActivity: (Submission|Comment)[] = [];
|
||||
let analysisTimes: number[] = [];
|
||||
let referenceImage: ImageData | undefined;
|
||||
if (this.imageDetection.enable) {
|
||||
try {
|
||||
referenceImage = ImageData.fromSubmission(item);
|
||||
referenceImage.setPreferredResolutionByWidth(800);
|
||||
if(this.imageDetection.hash.enable) {
|
||||
let refHash: string | undefined;
|
||||
if(this.imageDetection.hash.ttl !== undefined) {
|
||||
refHash = await this.resources.getImageHash(referenceImage);
|
||||
if(refHash === undefined) {
|
||||
refHash = await referenceImage.hash(this.imageDetection.hash.bits);
|
||||
await this.resources.setImageHash(referenceImage, refHash, this.imageDetection.hash.ttl);
|
||||
} else if(refHash.length !== bitsToHexLength(this.imageDetection.hash.bits)) {
|
||||
this.logger.warn('Reference image hash length did not correspond to bits specified in config. Recomputing...');
|
||||
refHash = await referenceImage.hash(this.imageDetection.hash.bits);
|
||||
await this.resources.setImageHash(referenceImage, refHash, this.imageDetection.hash.ttl);
|
||||
}
|
||||
} else {
|
||||
refHash = await referenceImage.hash(this.imageDetection.hash.bits);
|
||||
}
|
||||
}
|
||||
//await referenceImage.sharp();
|
||||
// await referenceImage.hash();
|
||||
// if (referenceImage.preferredResolution !== undefined) {
|
||||
// await (referenceImage.getSimilarResolutionVariant(...referenceImage.preferredResolution) as ImageData).sharp();
|
||||
// }
|
||||
} catch (err: any) {
|
||||
this.logger.verbose(err.message);
|
||||
}
|
||||
}
|
||||
let longRun;
|
||||
if (referenceImage !== undefined) {
|
||||
const l = this.logger;
|
||||
longRun = setTimeout(() => {
|
||||
l.verbose('FYI: Image processing is causing rule to take longer than normal');
|
||||
}, 2500);
|
||||
}
|
||||
// @ts-ignore
|
||||
const ci = async (x: (Submission|Comment)) => {
|
||||
if (!asSubmission(x) || x.id === itemId) {
|
||||
return null;
|
||||
}
|
||||
if (x.url === undefined) {
|
||||
return false;
|
||||
return null;
|
||||
}
|
||||
return parseLink(x.url) === usableUrl;
|
||||
});
|
||||
if (parseLink(x.url) === usableUrl) {
|
||||
return x;
|
||||
}
|
||||
// only do image detection if regular URL comparison and other conditions fail first
|
||||
// to reduce CPU/bandwidth usage
|
||||
if (referenceImage !== undefined) {
|
||||
try {
|
||||
let imgData = ImageData.fromSubmission(x);
|
||||
imgData.setPreferredResolutionByWidth(800);
|
||||
if(this.imageDetection.hash.enable) {
|
||||
let compareHash: string | undefined;
|
||||
if(this.imageDetection.hash.ttl !== undefined) {
|
||||
compareHash = await this.resources.getImageHash(imgData);
|
||||
}
|
||||
if(compareHash === undefined)
|
||||
{
|
||||
compareHash = await imgData.hash(this.imageDetection.hash.bits);
|
||||
if(this.imageDetection.hash.ttl !== undefined) {
|
||||
await this.resources.setImageHash(imgData, compareHash, this.imageDetection.hash.ttl);
|
||||
}
|
||||
}
|
||||
const refHash = await referenceImage.hash(this.imageDetection.hash.bits);
|
||||
if(refHash.length !== compareHash.length) {
|
||||
this.logger.debug(`Hash lengths were not the same! Will need to recompute compare hash to match reference.\n\nReference: ${referenceImage.baseUrl} has is ${refHash.length} char long | Comparing: ${imgData.baseUrl} has is ${compareHash} ${compareHash.length} long`);
|
||||
compareHash = await imgData.hash(this.imageDetection.hash.bits)
|
||||
}
|
||||
const distance = leven(refHash, compareHash);
|
||||
const diff = (distance/refHash.length)*100;
|
||||
|
||||
|
||||
// return image if hard is defined and diff is less
|
||||
if(null !== this.imageDetection.hash.hardThreshold && diff <= this.imageDetection.hash.hardThreshold) {
|
||||
return x;
|
||||
}
|
||||
// hard is either not defined or diff was gerater than hard
|
||||
|
||||
// if soft is defined
|
||||
if (this.imageDetection.hash.softThreshold !== undefined) {
|
||||
// and diff is greater than soft allowance
|
||||
if(diff > this.imageDetection.hash.softThreshold) {
|
||||
// not similar enough
|
||||
return null;
|
||||
}
|
||||
// similar enough, will continue on to pixel (if enabled!)
|
||||
} else {
|
||||
// only hard was defined and did not pass
|
||||
return null;
|
||||
}
|
||||
}
|
||||
// at this point either hash was not enabled or it was and we hit soft threshold but not hard
|
||||
if(this.imageDetection.pixel.enable) {
|
||||
try {
|
||||
const [compareResult, sameImage] = await compareImages(referenceImage, imgData, this.imageDetection.pixel.threshold / 100);
|
||||
analysisTimes.push(compareResult.analysisTime);
|
||||
if (sameImage) {
|
||||
return x;
|
||||
}
|
||||
} catch (err: any) {
|
||||
this.logger.warn(`Unexpected error encountered while pixel-comparing images, will skip comparison => ${err.message}`);
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
if(!err.message.includes('did not end with a valid image extension')) {
|
||||
this.logger.warn(`Will not compare image from Submission ${x.id} due to error while parsing image URL => ${err.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
// parallel all the things
|
||||
this.logger.profile('asyncCompare');
|
||||
const results = await pMap(viableActivity, ci, {concurrency: imageCompareMaxConcurrencyGuess});
|
||||
this.logger.profile('asyncCompare', {level: 'debug', message: 'Total time for image comparison (incl download/cache calls)'});
|
||||
const totalAnalysisTime = analysisTimes.reduce((acc, x) => acc + x,0);
|
||||
if(analysisTimes.length > 0) {
|
||||
this.logger.debug(`Reference image pixel-compared ${analysisTimes.length} times. Timings: Avg ${formatNumber(totalAnalysisTime / analysisTimes.length, {toFixed: 0})}ms | Max: ${Math.max(...analysisTimes)}ms | Min: ${Math.min(...analysisTimes)}ms | Total: ${totalAnalysisTime}ms (${formatNumber(totalAnalysisTime/1000)}s)`);
|
||||
}
|
||||
filteredActivity = filteredActivity.concat(results.filter(x => x !== null));
|
||||
if (longRun !== undefined) {
|
||||
clearTimeout(longRun);
|
||||
}
|
||||
viableActivity = filteredActivity;
|
||||
}
|
||||
}
|
||||
const groupedActivity = viableActivity.reduce((grouped, activity) => {
|
||||
const s = activity.subreddit.display_name.toLowerCase();
|
||||
grouped[s] = (grouped[s] || []).concat(activity);
|
||||
return grouped;
|
||||
}, {} as Record<string, (Submission | Comment)[]>);
|
||||
|
||||
|
||||
const summaries = [];
|
||||
let totalTriggeredOn;
|
||||
for (const triggerSet of this.thresholds) {
|
||||
let currCount = 0;
|
||||
const presentSubs = [];
|
||||
const {threshold = '>= 1', subreddits = []} = triggerSet;
|
||||
for (const sub of subreddits.map(x => parseSubredditName(x))) {
|
||||
const isub = sub.toLowerCase();
|
||||
const {[isub]: tSub = []} = groupedActivity;
|
||||
if (tSub.length > 0) {
|
||||
currCount += tSub.length;
|
||||
presentSubs.push(sub);
|
||||
const presentSubs: string[] = [];
|
||||
let combinedKarma = 0;
|
||||
const {
|
||||
threshold = '>= 1',
|
||||
subreddits = [],
|
||||
karma: karmaThreshold,
|
||||
commentState,
|
||||
submissionState,
|
||||
} = triggerSet;
|
||||
|
||||
// convert subreddits array into entirely StrongSubredditState
|
||||
const defaultOpts = {
|
||||
defaultFlags: 'i',
|
||||
generateDescription: true
|
||||
};
|
||||
const subStates: StrongSubredditState[] = subreddits.map((x) => convertSubredditsRawToStrong(x, defaultOpts));
|
||||
|
||||
let validActivity: (Comment | Submission)[] = await as.filter(viableActivity, async (activity) => {
|
||||
if (asSubmission(activity) && submissionState !== undefined) {
|
||||
return await this.resources.testItemCriteria(activity, [submissionState]);
|
||||
} else if (commentState !== undefined) {
|
||||
return await this.resources.testItemCriteria(activity, [commentState]);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
validActivity = await this.resources.batchTestSubredditCriteria(validActivity, subStates);
|
||||
for (const activity of validActivity) {
|
||||
currCount++;
|
||||
// @ts-ignore
|
||||
combinedKarma += activity.score;
|
||||
const pSub = getActivitySubredditName(activity);
|
||||
if (!presentSubs.includes(pSub)) {
|
||||
presentSubs.push(pSub);
|
||||
}
|
||||
}
|
||||
|
||||
for (const activity of viableActivity) {
|
||||
if (asSubmission(activity) && submissionState !== undefined) {
|
||||
if (!(await this.resources.testItemCriteria(activity, [submissionState]))) {
|
||||
continue;
|
||||
}
|
||||
} else if (commentState !== undefined) {
|
||||
if (!(await this.resources.testItemCriteria(activity, [commentState]))) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
let inSubreddits = false;
|
||||
for (const ss of subStates) {
|
||||
const res = await this.resources.testSubredditCriteria(activity, ss);
|
||||
if (res) {
|
||||
inSubreddits = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (inSubreddits) {
|
||||
currCount++;
|
||||
combinedKarma += activity.score;
|
||||
const pSub = getActivitySubredditName(activity);
|
||||
if (!presentSubs.includes(pSub)) {
|
||||
presentSubs.push(pSub);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(threshold);
|
||||
let sum = {subsWithActivity: presentSubs, subreddits, count: currCount, threshold, triggered: false, testValue: currCount.toString()};
|
||||
let sum = {
|
||||
subsWithActivity: presentSubs,
|
||||
combinedKarma,
|
||||
karmaThreshold,
|
||||
subreddits: subStates.map(x => x.stateDescription),
|
||||
count: currCount,
|
||||
threshold,
|
||||
triggered: false,
|
||||
testValue: currCount.toString()
|
||||
};
|
||||
if (isPercent) {
|
||||
sum.testValue = `${formatNumber((currCount / viableActivity.length) * 100)}%`;
|
||||
if (comparisonTextOp(currCount / viableActivity.length, operator, value / 100)) {
|
||||
@@ -117,6 +359,15 @@ export class RecentActivityRule extends Rule {
|
||||
sum.triggered = true;
|
||||
totalTriggeredOn = sum;
|
||||
}
|
||||
// if we would trigger on threshold need to also test for karma
|
||||
if (totalTriggeredOn !== undefined && karmaThreshold !== undefined) {
|
||||
const {operator: opKarma, value: valueKarma} = parseGenericValueOrPercentComparison(karmaThreshold);
|
||||
if (!comparisonTextOp(combinedKarma, opKarma, valueKarma)) {
|
||||
sum.triggered = false;
|
||||
totalTriggeredOn = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
summaries.push(sum);
|
||||
// if either trigger condition is hit end the iteration early
|
||||
if (totalTriggeredOn !== undefined) {
|
||||
@@ -129,7 +380,7 @@ export class RecentActivityRule extends Rule {
|
||||
result = `${PASS} ${resultData.result}`;
|
||||
this.logger.verbose(result);
|
||||
return Promise.resolve([true, this.getResult(true, resultData)]);
|
||||
} else if(summaries.length === 1) {
|
||||
} else if (summaries.length === 1) {
|
||||
// can display result if its only one summary otherwise need to log to debug
|
||||
const res = this.generateResultData(summaries[0], viableActivity);
|
||||
result = `${FAIL} ${res.result}`;
|
||||
@@ -142,7 +393,7 @@ export class RecentActivityRule extends Rule {
|
||||
|
||||
return Promise.resolve([false, this.getResult(false, {result})]);
|
||||
}
|
||||
|
||||
|
||||
generateResultData(summary: any, activities: (Submission | Comment)[] = []) {
|
||||
const {
|
||||
count,
|
||||
@@ -150,10 +401,15 @@ export class RecentActivityRule extends Rule {
|
||||
subreddits = [],
|
||||
subsWithActivity = [],
|
||||
threshold,
|
||||
triggered
|
||||
triggered,
|
||||
combinedKarma,
|
||||
karmaThreshold,
|
||||
} = summary;
|
||||
const relevantSubs = subsWithActivity.length === 0 ? subreddits : subsWithActivity;
|
||||
const totalSummary = `${testValue} activities over ${relevantSubs.length} subreddits ${triggered ? 'met' : 'did not meet'} threshold of ${threshold}`;
|
||||
let totalSummary = `${testValue} activities over ${relevantSubs.length} subreddits${karmaThreshold !== undefined ? ` with ${combinedKarma} combined karma` : ''} ${triggered ? 'met' : 'did not meet'} threshold of ${threshold}${karmaThreshold !== undefined ? ` and ${karmaThreshold} combined karma` : ''}`;
|
||||
if (triggered && subsWithActivity.length > 0) {
|
||||
totalSummary = `${totalSummary} -- subreddits: ${subsWithActivity.join(', ')}`;
|
||||
}
|
||||
return {
|
||||
result: totalSummary,
|
||||
data: {
|
||||
@@ -163,7 +419,8 @@ export class RecentActivityRule extends Rule {
|
||||
subCount: relevantSubs.length,
|
||||
totalCount: count,
|
||||
threshold,
|
||||
testValue
|
||||
testValue,
|
||||
karmaThreshold,
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -175,7 +432,16 @@ export class RecentActivityRule extends Rule {
|
||||
* @minProperties 1
|
||||
* @additionalProperties false
|
||||
* */
|
||||
export interface SubThreshold extends SubredditCriteria {
|
||||
export interface ActivityThreshold {
|
||||
/**
|
||||
* When present, a Submission will only be counted if it meets this criteria
|
||||
* */
|
||||
submissionState?: SubmissionState
|
||||
/**
|
||||
* When present, a Comment will only be counted if it meets this criteria
|
||||
* */
|
||||
commentState?: CommentState
|
||||
|
||||
/**
|
||||
* A string containing a comparison operator and a value to compare recent activities against
|
||||
*
|
||||
@@ -191,6 +457,35 @@ export interface SubThreshold extends SubredditCriteria {
|
||||
* @examples [">= 1"]
|
||||
* */
|
||||
threshold?: string
|
||||
|
||||
/**
|
||||
* Test the **combined karma** from Activities found in the specified subreddits
|
||||
*
|
||||
* Value is a string containing a comparison operator and a number of **combined karma** to compare against
|
||||
*
|
||||
* If specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied
|
||||
*
|
||||
* The syntax is `(< OR > OR <= OR >=) <number>`
|
||||
*
|
||||
* * EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
|
||||
* */
|
||||
karma?: string
|
||||
|
||||
/**
|
||||
* Activities will be counted if they are found in this list of Subreddits
|
||||
*
|
||||
* Each value in the list can be either:
|
||||
*
|
||||
* * string (name of subreddit)
|
||||
* * regular expression to run on the subreddit name
|
||||
* * `SubredditState`
|
||||
*
|
||||
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
|
||||
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
|
||||
* */
|
||||
subreddits?: (string | SubredditState)[]
|
||||
}
|
||||
|
||||
interface RecentActivityConfig extends ActivityWindow, ReferenceSubmission {
|
||||
@@ -203,7 +498,9 @@ interface RecentActivityConfig extends ActivityWindow, ReferenceSubmission {
|
||||
* A list of subreddits/count criteria that may trigger this rule. ANY SubThreshold will trigger this rule.
|
||||
* @minItems 1
|
||||
* */
|
||||
thresholds: SubThreshold[],
|
||||
thresholds: ActivityThreshold[],
|
||||
|
||||
imageDetection?: ImageDetection
|
||||
}
|
||||
|
||||
export interface RecentActivityRuleOptions extends RecentActivityConfig, RuleOptions {
|
||||
|
||||
@@ -2,14 +2,16 @@ import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
|
||||
import {Comment} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {
|
||||
comparisonTextOp, FAIL, isExternalUrlSubmission, parseGenericValueComparison,
|
||||
parseGenericValueOrPercentComparison, parseRegex,
|
||||
PASS
|
||||
asSubmission,
|
||||
comparisonTextOp, FAIL, isExternalUrlSubmission, isSubmission, parseGenericValueComparison,
|
||||
parseGenericValueOrPercentComparison, parseRegex, parseStringToRegex,
|
||||
PASS, triggeredIndicator
|
||||
} from "../util";
|
||||
import {
|
||||
ActivityWindowType, JoinOperands,
|
||||
} from "../Common/interfaces";
|
||||
import dayjs from 'dayjs';
|
||||
import SimpleError from "../Utils/SimpleError";
|
||||
|
||||
export interface RegexCriteria {
|
||||
/**
|
||||
@@ -21,17 +23,11 @@ export interface RegexCriteria {
|
||||
/**
|
||||
* A valid Regular Expression to test content against
|
||||
*
|
||||
* Do not wrap expression in forward slashes
|
||||
* If no flags are specified then the **global** flag is used by default
|
||||
*
|
||||
* EX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`
|
||||
*
|
||||
* @examples ["reddit|FoxxMD"]
|
||||
* @examples ["/reddit|FoxxMD/ig"]
|
||||
* */
|
||||
regex: string,
|
||||
/**
|
||||
* Regex flags to use
|
||||
* */
|
||||
regexFlags?: string,
|
||||
|
||||
/**
|
||||
* Which content from an Activity to test the regex against
|
||||
@@ -134,12 +130,11 @@ export class RegexRule extends Rule {
|
||||
|
||||
let criteriaResults = [];
|
||||
|
||||
for (const criteria of this.criteria) {
|
||||
for (const [index, criteria] of this.criteria.entries()) {
|
||||
|
||||
const {
|
||||
name,
|
||||
name = (index + 1),
|
||||
regex,
|
||||
regexFlags,
|
||||
testOn: testOnVals = ['title', 'body'],
|
||||
lookAt = 'all',
|
||||
matchThreshold = '> 0',
|
||||
@@ -157,7 +152,10 @@ export class RegexRule extends Rule {
|
||||
}, []);
|
||||
|
||||
// check regex
|
||||
const reg = new RegExp(regex);
|
||||
const reg = parseStringToRegex(regex, 'g');
|
||||
if(reg === undefined) {
|
||||
throw new SimpleError(`Value given for regex on Criteria ${name} was not valid: ${regex}`);
|
||||
}
|
||||
// ok cool its a valid regex
|
||||
|
||||
const matchComparison = parseGenericValueComparison(matchThreshold);
|
||||
@@ -176,7 +174,7 @@ export class RegexRule extends Rule {
|
||||
|
||||
// first lets see if the activity we are checking satisfies thresholds
|
||||
// since we may be able to avoid api calls to get history
|
||||
let actMatches = this.getMatchesFromActivity(item, testOn, reg, regexFlags);
|
||||
let actMatches = this.getMatchesFromActivity(item, testOn, reg);
|
||||
matches = matches.concat(actMatches).slice(0, 100);
|
||||
matchCount += actMatches.length;
|
||||
|
||||
@@ -226,7 +224,7 @@ export class RegexRule extends Rule {
|
||||
|
||||
for (const h of history) {
|
||||
activitiesTested++;
|
||||
const aMatches = this.getMatchesFromActivity(h, testOn, reg, regexFlags);
|
||||
const aMatches = this.getMatchesFromActivity(h, testOn, reg);
|
||||
matches = matches.concat(aMatches).slice(0, 100);
|
||||
matchCount += aMatches.length;
|
||||
const matched = comparisonTextOp(aMatches.length, matchComparison.operator, matchComparison.value);
|
||||
@@ -298,32 +296,38 @@ export class RegexRule extends Rule {
|
||||
|
||||
const logSummary: string[] = [];
|
||||
let index = 0;
|
||||
let matchSample = undefined;
|
||||
for (const c of criteriaResults) {
|
||||
index++;
|
||||
let msg = `Crit ${c.criteria.name || index} ${c.triggered ? PASS : FAIL}`;
|
||||
let msg = `Criteria ${c.criteria.name || `#${index}`} ${triggeredIndicator(c.triggered)}`;
|
||||
if (c.activityThresholdMet !== undefined) {
|
||||
msg = `${msg} -- Activity Match=> ${c.activityThresholdMet ? PASS : FAIL} ${c.activitiesMatchedCount} ${c.criteria.activityMatchThreshold} (Threshold ${c.criteria.matchThreshold})`;
|
||||
msg = `${msg} -- Activity Match ${triggeredIndicator(c.activityThresholdMet)} => ${c.activitiesMatchedCount} ${c.criteria.activityMatchThreshold} (Threshold ${c.criteria.matchThreshold})`;
|
||||
}
|
||||
if (c.totalThresholdMet !== undefined) {
|
||||
msg = `${msg} -- Total Matches=> ${c.totalThresholdMet ? PASS : FAIL} ${c.matchCount} ${c.criteria.totalMatchThreshold}`;
|
||||
msg = `${msg} -- Total Matches ${triggeredIndicator(c.totalThresholdMet)} => ${c.matchCount} ${c.criteria.totalMatchThreshold}`;
|
||||
} else {
|
||||
msg = `${msg} and ${c.matchCount} Total Matches`;
|
||||
}
|
||||
msg = `${msg} (Window: ${c.criteria.window})`;
|
||||
logSummary.push(msg);
|
||||
if(c.matches.length > 0) {
|
||||
matchSample = `${c.matches.slice(0, 3).map(x => `"${x}"`).join(', ')}${c.matches.length > 3 ? `, and ${c.matches.length - 3} more...` : ''}`;
|
||||
logSummary.push(`${msg} -- Matched Values: ${matchSample}`);
|
||||
} else {
|
||||
logSummary.push(msg);
|
||||
}
|
||||
}
|
||||
|
||||
const result = `${criteriaMet ? PASS : FAIL} ${logSummary.join(' || ')}`;
|
||||
const result = `${triggeredIndicator(criteriaMet)} ${logSummary.join(' || ')}`;
|
||||
this.logger.verbose(result);
|
||||
|
||||
return Promise.resolve([criteriaMet, this.getResult(criteriaMet, {result, data: criteriaResults})]);
|
||||
return Promise.resolve([criteriaMet, this.getResult(criteriaMet, {result, data: {results: criteriaResults, matchSample }})]);
|
||||
}
|
||||
|
||||
protected getMatchesFromActivity(a: (Submission | Comment), testOn: string[], reg: RegExp, flags?: string): string[] {
|
||||
protected getMatchesFromActivity(a: (Submission | Comment), testOn: string[], reg: RegExp): string[] {
|
||||
let m: string[] = [];
|
||||
// determine what content we are testing
|
||||
let contents: string[] = [];
|
||||
if (a instanceof Submission) {
|
||||
if (asSubmission(a)) {
|
||||
for (const l of testOn) {
|
||||
switch (l) {
|
||||
case 'title':
|
||||
@@ -346,7 +350,7 @@ export class RegexRule extends Rule {
|
||||
}
|
||||
|
||||
for (const c of contents) {
|
||||
const results = parseRegex(reg, c, flags);
|
||||
const results = parseRegex(reg, c);
|
||||
if (results.matched) {
|
||||
m = m.concat(results.matches);
|
||||
}
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
|
||||
import {Comment} from "snoowrap";
|
||||
import {
|
||||
activityWindowText,
|
||||
comparisonTextOp, FAIL, isExternalUrlSubmission, isRedditMedia,
|
||||
activityWindowText, asSubmission,
|
||||
comparisonTextOp, FAIL, getActivitySubredditName, isExternalUrlSubmission, isRedditMedia,
|
||||
parseGenericValueComparison, parseSubredditName,
|
||||
parseUsableLinkIdentifier as linkParser, PASS
|
||||
parseUsableLinkIdentifier as linkParser, PASS, subredditStateIsNameOnly, toStrongSubredditState
|
||||
} from "../util";
|
||||
import {ActivityWindow, ActivityWindowType, ReferenceSubmission} from "../Common/interfaces";
|
||||
import {
|
||||
ActivityWindow,
|
||||
ActivityWindowType,
|
||||
ReferenceSubmission,
|
||||
StrongSubredditState,
|
||||
SubredditState
|
||||
} from "../Common/interfaces";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import dayjs from "dayjs";
|
||||
import Fuse from 'fuse.js'
|
||||
@@ -25,7 +31,7 @@ interface RepeatActivityReducer {
|
||||
|
||||
const getActivityIdentifier = (activity: (Submission | Comment), length = 200) => {
|
||||
let identifier: string;
|
||||
if (activity instanceof Submission) {
|
||||
if (asSubmission(activity)) {
|
||||
if (activity.is_self) {
|
||||
identifier = `${activity.title}${activity.selftext.slice(0, length)}`;
|
||||
} else if(isRedditMedia(activity)) {
|
||||
@@ -50,8 +56,10 @@ export class RepeatActivityRule extends Rule {
|
||||
gapAllowance?: number;
|
||||
useSubmissionAsReference: boolean;
|
||||
lookAt: 'submissions' | 'all';
|
||||
include: string[];
|
||||
exclude: string[];
|
||||
include: (string | SubredditState)[];
|
||||
exclude: (string | SubredditState)[];
|
||||
hasFullSubredditCrits: boolean = false;
|
||||
activityFilterFunc: (x: Submission|Comment) => Promise<boolean> = async (x) => true;
|
||||
keepRemoved: boolean;
|
||||
minWordCount: number;
|
||||
|
||||
@@ -74,8 +82,42 @@ export class RepeatActivityRule extends Rule {
|
||||
this.window = window;
|
||||
this.gapAllowance = gapAllowance;
|
||||
this.useSubmissionAsReference = useSubmissionAsReference;
|
||||
this.include = include.map(x => parseSubredditName(x).toLowerCase());
|
||||
this.exclude = exclude.map(x => parseSubredditName(x).toLowerCase());
|
||||
this.include = include;
|
||||
this.exclude = exclude;
|
||||
|
||||
if(this.include.length > 0) {
|
||||
const subStates = include.map((x) => {
|
||||
if(typeof x === 'string') {
|
||||
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
|
||||
}
|
||||
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
|
||||
});
|
||||
this.hasFullSubredditCrits = !subStates.every(x => subredditStateIsNameOnly(x));
|
||||
this.activityFilterFunc = async (x: Submission|Comment) => {
|
||||
for(const ss of subStates) {
|
||||
if(await this.resources.testSubredditCriteria(x, ss)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
} else if(this.exclude.length > 0) {
|
||||
const subStates = exclude.map((x) => {
|
||||
if(typeof x === 'string') {
|
||||
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
|
||||
}
|
||||
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
|
||||
});
|
||||
this.hasFullSubredditCrits = !subStates.every(x => subredditStateIsNameOnly(x));
|
||||
this.activityFilterFunc = async (x: Submission|Comment) => {
|
||||
for(const ss of subStates) {
|
||||
if(await this.resources.testSubredditCriteria(x, ss)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
}
|
||||
this.lookAt = lookAt;
|
||||
}
|
||||
|
||||
@@ -96,17 +138,10 @@ export class RepeatActivityRule extends Rule {
|
||||
|
||||
async process(item: Submission|Comment): Promise<[boolean, RuleResult]> {
|
||||
let referenceUrl;
|
||||
if(item instanceof Submission && this.useSubmissionAsReference) {
|
||||
if(asSubmission(item) && this.useSubmissionAsReference) {
|
||||
referenceUrl = await item.url;
|
||||
}
|
||||
|
||||
let filterFunc = (x: any) => true;
|
||||
if(this.include.length > 0) {
|
||||
filterFunc = (x: Submission|Comment) => this.include.includes(x.subreddit.display_name.toLowerCase());
|
||||
} else if(this.exclude.length > 0) {
|
||||
filterFunc = (x: Submission|Comment) => !this.exclude.includes(x.subreddit.display_name.toLowerCase());
|
||||
}
|
||||
|
||||
let activities: (Submission | Comment)[] = [];
|
||||
switch (this.lookAt) {
|
||||
case 'submissions':
|
||||
@@ -117,13 +152,20 @@ export class RepeatActivityRule extends Rule {
|
||||
break;
|
||||
}
|
||||
|
||||
const condensedActivities = activities.reduce((acc: RepeatActivityReducer, activity: (Submission | Comment), index: number) => {
|
||||
if(this.hasFullSubredditCrits) {
|
||||
// go ahead and cache subreddits now
|
||||
// because we can't use batch test since testing activities in order is important for this rule
|
||||
await this.resources.cacheSubreddits(activities.map(x => x.subreddit));
|
||||
}
|
||||
|
||||
const condensedActivities = await activities.reduce(async (accProm: Promise<RepeatActivityReducer>, activity: (Submission | Comment), index: number) => {
|
||||
const acc = await accProm;
|
||||
const {openSets = [], allSets = []} = acc;
|
||||
|
||||
let identifier = getActivityIdentifier(activity);
|
||||
const isUrl = isExternalUrlSubmission(activity);
|
||||
let fu = new Fuse([identifier], !isUrl ? fuzzyOptions : {...fuzzyOptions, distance: 5});
|
||||
const validSub = filterFunc(activity);
|
||||
const validSub = await this.activityFilterFunc(activity);
|
||||
let minMet = identifier.length >= this.minWordCount;
|
||||
|
||||
let updatedAllSets = [...allSets];
|
||||
@@ -174,7 +216,7 @@ export class RepeatActivityRule extends Rule {
|
||||
|
||||
return {openSets: updatedOpenSets, allSets: updatedAllSets};
|
||||
|
||||
}, {openSets: [], allSets: []});
|
||||
}, Promise.resolve({openSets: [], allSets: []}));
|
||||
|
||||
const allRepeatSets = [...condensedActivities.allSets, ...condensedActivities.openSets];
|
||||
|
||||
@@ -223,7 +265,7 @@ export class RepeatActivityRule extends Rule {
|
||||
};
|
||||
for (let set of value) {
|
||||
const test = comparisonTextOp(set.length, operator, thresholdValue);
|
||||
const md = set.map((x: (Comment | Submission)) => `[${x instanceof Submission ? x.title : getActivityIdentifier(x, 50)}](https://reddit.com${x.permalink}) in ${x.subreddit_name_prefixed} on ${dayjs(x.created_utc * 1000).utc().format()}`);
|
||||
const md = set.map((x: (Comment | Submission)) => `[${asSubmission(x) ? x.title : getActivityIdentifier(x, 50)}](https://reddit.com${x.permalink}) in ${x.subreddit_name_prefixed} on ${dayjs(x.created_utc * 1000).utc().format()}`);
|
||||
|
||||
summaryData.sets.push(set);
|
||||
summaryData.largestTrigger = Math.max(summaryData.largestTrigger, set.length);
|
||||
@@ -294,21 +336,31 @@ interface RepeatActivityConfig extends ActivityWindow, ReferenceSubmission {
|
||||
* */
|
||||
gapAllowance?: number,
|
||||
/**
|
||||
* Only include Submissions from this list of Subreddits (by name, case-insensitive)
|
||||
* If present, activities will be counted only if they are found in this list of Subreddits
|
||||
*
|
||||
* EX `["mealtimevideos","askscience"]`
|
||||
* @examples ["mealtimevideos","askscience"]
|
||||
* @minItems 1
|
||||
* Each value in the list can be either:
|
||||
*
|
||||
* * string (name of subreddit)
|
||||
* * regular expression to run on the subreddit name
|
||||
* * `SubredditState`
|
||||
*
|
||||
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
|
||||
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
|
||||
* */
|
||||
include?: string[],
|
||||
include?: (string | SubredditState)[],
|
||||
/**
|
||||
* Do not include Submissions from this list of Subreddits (by name, case-insensitive)
|
||||
* If present, activities will be counted only if they are **NOT** found in this list of Subreddits
|
||||
*
|
||||
* EX `["mealtimevideos","askscience"]`
|
||||
* @examples ["mealtimevideos","askscience"]
|
||||
* @minItems 1
|
||||
* Each value in the list can be either:
|
||||
*
|
||||
* * string (name of subreddit)
|
||||
* * regular expression to run on the subreddit name
|
||||
* * `SubredditState`
|
||||
*
|
||||
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
|
||||
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
|
||||
* */
|
||||
exclude?: string[],
|
||||
exclude?: (string | SubredditState)[],
|
||||
|
||||
/**
|
||||
* If present determines which activities to consider for gapAllowance.
|
||||
|
||||
897
src/Rule/RepostRule.ts
Normal file
897
src/Rule/RepostRule.ts
Normal file
@@ -0,0 +1,897 @@
|
||||
import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
|
||||
import {Listing, SearchOptions} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import Comment from "snoowrap/dist/objects/Comment";
|
||||
import {
|
||||
compareDurationValue,
|
||||
comparisonTextOp,
|
||||
FAIL, formatNumber,
|
||||
isRepostItemResult, parseDurationComparison, parseGenericValueComparison,
|
||||
parseUsableLinkIdentifier,
|
||||
PASS, searchAndReplace, stringSameness, triggeredIndicator, windowToActivityWindowCriteria, wordCount
|
||||
} from "../util";
|
||||
import {
|
||||
ActivityWindow,
|
||||
ActivityWindowType,
|
||||
CompareValue, DurationComparor,
|
||||
JoinOperands,
|
||||
RepostItem,
|
||||
RepostItemResult,
|
||||
SearchAndReplaceRegExp,
|
||||
SearchFacetType,
|
||||
} from "../Common/interfaces";
|
||||
import objectHash from "object-hash";
|
||||
import {getActivities, getAttributionIdentifier} from "../Utils/SnoowrapUtils";
|
||||
import Fuse from "fuse.js";
|
||||
import leven from "leven";
|
||||
import {YoutubeClient, commentsAsRepostItems} from "../Utils/ThirdParty/YoutubeClient";
|
||||
import dayjs from "dayjs";
|
||||
import {rest} from "lodash";
|
||||
|
||||
const parseYtIdentifier = parseUsableLinkIdentifier();
|
||||
|
||||
export interface TextMatchOptions {
|
||||
/**
|
||||
* The percentage, as a whole number, of a repost title/comment that must match the title/comment being checked in order to consider both a match
|
||||
*
|
||||
* Note: Setting to 0 will make every candidate considered a match -- useful if you want to match if the URL has been reposted anywhere
|
||||
*
|
||||
* Defaults to `85` (85%)
|
||||
*
|
||||
* @default 85
|
||||
* @example [85]
|
||||
* */
|
||||
matchScore?: number
|
||||
|
||||
/**
|
||||
* The minimum number of words in the activity being checked for which this rule will run on
|
||||
*
|
||||
* If the word count is below the minimum the rule fails
|
||||
*
|
||||
* Defaults to 2
|
||||
*
|
||||
* @default 2
|
||||
* @example [2]
|
||||
* */
|
||||
minWordCount?: number
|
||||
|
||||
/**
|
||||
* Should text matching be case sensitive?
|
||||
*
|
||||
* Defaults to false
|
||||
*
|
||||
* @default false
|
||||
* @example [false]
|
||||
**/
|
||||
caseSensitive?: boolean
|
||||
}
|
||||
|
||||
export interface TextTransformOptions {
|
||||
/**
|
||||
* A set of search-and-replace operations to perform on text values before performing a match. Transformations are performed in the order they are defined.
|
||||
*
|
||||
* * If `transformationsActivity` IS NOT defined then these transformations will be performed on BOTH the activity text (submission title or comment) AND the repost candidate text
|
||||
* * If `transformationsActivity` IS defined then these transformations are only performed on repost candidate text
|
||||
* */
|
||||
transformations?: SearchAndReplaceRegExp[]
|
||||
|
||||
/**
|
||||
* Specify a separate set of transformations for the activity text (submission title or comment)
|
||||
*
|
||||
* To perform no transformations when `transformations` is defined set this to an empty array (`[]`)
|
||||
* */
|
||||
transformationsActivity?: SearchAndReplaceRegExp[]
|
||||
}
|
||||
|
||||
export interface SearchFacetJSONConfig extends TextMatchOptions, TextTransformOptions, ActivityWindow {
|
||||
kind: SearchFacetType | SearchFacetType[]
|
||||
}
|
||||
|
||||
export interface SearchFacet extends SearchFacetJSONConfig {
|
||||
kind: SearchFacetType
|
||||
}
|
||||
|
||||
export type TimeBasedSelector = "newest" | "oldest" | "any" | "all";
|
||||
|
||||
export interface OccurredAt {
|
||||
/**
|
||||
* Which repost to test on
|
||||
*
|
||||
* * `any` -- ANY repost passing `condition` will cause this criteria to be true
|
||||
* * `all` -- ALL reposts must pass `condition` for this criteria to be true
|
||||
* */
|
||||
"testOn": TimeBasedSelector,
|
||||
"condition": DurationComparor
|
||||
}
|
||||
|
||||
export interface OccurrenceTests {
|
||||
count?: {
|
||||
condition?: JoinOperands
|
||||
/**
|
||||
* An array of strings containing a comparison operator and the number of repost occurrences to compare against
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* * `">= 7"` -- TRUE if 7 or more reposts were found
|
||||
* * `"< 1"` -- TRUE if less than 0 reposts were found
|
||||
* */
|
||||
test: CompareValue[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Test the time the reposts occurred at
|
||||
* */
|
||||
time?: {
|
||||
/**
|
||||
* How to test all the specified comparisons
|
||||
*
|
||||
* * AND -- All criteria must be true
|
||||
* * OR -- Any criteria must be true
|
||||
*
|
||||
* Defaults to AND
|
||||
*
|
||||
* @default AND
|
||||
* @example ["AND", "OR"]
|
||||
* */
|
||||
condition?: JoinOperands
|
||||
/**
|
||||
* An array of time-based conditions to test against found reposts (test when a repost was made)
|
||||
* */
|
||||
test: OccurredAt[]
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A set of criteria used to find reposts
|
||||
*
|
||||
* Contains options and conditions used to define how candidate reposts are retrieved and if they are a match.
|
||||
*
|
||||
* */
|
||||
export interface RepostCriteria extends ActivityWindow, TextMatchOptions, TextTransformOptions {
|
||||
/**
|
||||
* Define how to find candidate reposts
|
||||
*
|
||||
* * **title** -- search reddit for submissions with the same title
|
||||
* * **url** -- search reddit for submissions with the same url
|
||||
* * **external** -- WHEN ACTIVITY IS A COMMENT - tries to get comments from external source (youtube, twitter, etc...)
|
||||
* */
|
||||
searchOn?: (SearchFacetType | SearchFacetJSONConfig)[]
|
||||
|
||||
/**
|
||||
* A set of comparisons to test against the number of reposts found
|
||||
*
|
||||
* If not specified the default is "AND [occurrences] > 0" IE any reposts makes this test pass
|
||||
* */
|
||||
occurrences?: {
|
||||
/**
|
||||
* How to test all the specified comparisons
|
||||
*
|
||||
* * AND -- All criteria must be true
|
||||
* * OR -- Any criteria must be true
|
||||
*
|
||||
* Defaults to AND
|
||||
*
|
||||
* @default AND
|
||||
* @example ["AND", "OR"]
|
||||
* */
|
||||
condition?: JoinOperands
|
||||
|
||||
criteria?: OccurrenceTests[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Test the time the reposts occurred at
|
||||
* */
|
||||
occurredAt?: {
|
||||
/**
|
||||
* How to test all the specified comparisons
|
||||
*
|
||||
* * AND -- All criteria must be true
|
||||
* * OR -- Any criteria must be true
|
||||
*
|
||||
* Defaults to AND
|
||||
*
|
||||
* @default AND
|
||||
* @example ["AND", "OR"]
|
||||
* */
|
||||
condition?: JoinOperands
|
||||
/**
|
||||
* An array of time-based conditions to test against found reposts (test when a repost was made)
|
||||
* */
|
||||
criteria: OccurredAt[]
|
||||
}
|
||||
|
||||
/**
|
||||
* The maximum number of comments/submissions to check
|
||||
*
|
||||
* In both cases this list is gathered from sorting all submissions or all comments from all submission by number of votes and taking the "top" maximum specified
|
||||
*
|
||||
* For comment checks this is the number of comments cached
|
||||
*
|
||||
* @default 50
|
||||
* @example [50]
|
||||
* */
|
||||
maxRedditItems?: number
|
||||
|
||||
/**
|
||||
* The maximum number of external items (youtube comments) to check (and cache for comment checks)
|
||||
*
|
||||
* @default 50
|
||||
* @example [50]
|
||||
* */
|
||||
maxExternalItems?: number
|
||||
}
|
||||
|
||||
export interface CriteriaResult {
|
||||
passed: boolean
|
||||
conditionsSummary: string
|
||||
items: RepostItemResult[]
|
||||
}
|
||||
|
||||
const parentSubmissionSearchFacetDefaults = {
|
||||
title: {
|
||||
matchScore: 85,
|
||||
minWordCount: 3
|
||||
},
|
||||
url: {
|
||||
matchScore: 0, // when looking for submissions to find repost comments on automatically include any with exact same url
|
||||
},
|
||||
duplicates: {
|
||||
matchScore: 0, // when looking for submissions to find repost comments on automatically include any that reddit thinks are duplicates
|
||||
},
|
||||
crossposts: {
|
||||
matchScore: 0, // when looking for submissions to find repost comments on automatically include any that reddit thinks are crossposts
|
||||
},
|
||||
external: {}
|
||||
}
|
||||
|
||||
const isSearchFacetType = (val: any): val is SearchFacetType => {
|
||||
if (typeof val === 'string') {
|
||||
return ['title', 'url', 'duplicates', 'crossposts', 'external'].includes(val);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
const generateSearchFacet = (val: SearchFacetType | SearchFacetJSONConfig): SearchFacet[] => {
|
||||
let facets: SearchFacet[] = [];
|
||||
if (isSearchFacetType(val)) {
|
||||
facets.push({
|
||||
kind: val
|
||||
});
|
||||
} else if (Array.isArray(val.kind)) {
|
||||
facets.concat(val.kind.map(x => ({...val, kind: x})));
|
||||
} else {
|
||||
facets.push(val as SearchFacet);
|
||||
}
|
||||
|
||||
return facets.map(x => {
|
||||
return {
|
||||
...parentSubmissionSearchFacetDefaults[x.kind],
|
||||
...x,
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export class RepostRule extends Rule {
|
||||
criteria: RepostCriteria[]
|
||||
condition: JoinOperands;
|
||||
|
||||
submission?: Submission;
|
||||
|
||||
constructor(options: RepostRuleOptions) {
|
||||
super(options);
|
||||
const {
|
||||
criteria = [{}],
|
||||
condition = 'OR'
|
||||
} = options || {};
|
||||
if (criteria.length < 1) {
|
||||
throw new Error('Must provide at least one RepostCriteria');
|
||||
}
|
||||
this.criteria = criteria;
|
||||
this.condition = condition;
|
||||
}
|
||||
|
||||
getKind(): string {
|
||||
return 'Repost';
|
||||
}
|
||||
|
||||
protected getSpecificPremise(): object {
|
||||
return {
|
||||
criteria: this.criteria,
|
||||
condition: this.condition
|
||||
}
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
protected async getSubmission(item: Submission | Comment) {
|
||||
if (item instanceof Comment) {
|
||||
// @ts-ignore
|
||||
return await this.client.getSubmission(item.link_id).fetch();
|
||||
}
|
||||
return item;
|
||||
}
|
||||
|
||||
protected async process(item: Submission | Comment): Promise<[boolean, RuleResult]> {
|
||||
|
||||
let criteriaResults: CriteriaResult[] = [];
|
||||
let ytClient: YoutubeClient | undefined = undefined;
|
||||
let criteriaMatchedResults: RepostItemResult[] = [];
|
||||
let totalSubs = 0;
|
||||
let totalCommentSubs = 0;
|
||||
let totalComments = 0;
|
||||
let totalExternal = new Map<string,number>();
|
||||
let fromCache = false;
|
||||
let andFail = false;
|
||||
|
||||
for (const rCriteria of this.criteria) {
|
||||
criteriaMatchedResults = [];
|
||||
const {
|
||||
searchOn = (item instanceof Submission ? ['title', 'url', 'duplicates', 'crossposts'] : ['external', 'title', 'url', 'duplicates', 'crossposts']),
|
||||
//criteria = {},
|
||||
maxRedditItems = 50,
|
||||
maxExternalItems = 50,
|
||||
window = 20,
|
||||
...restCriteria
|
||||
} = rCriteria;
|
||||
|
||||
const searchFacets = searchOn.map(x => generateSearchFacet(x)).flat(1) as SearchFacet[];
|
||||
|
||||
const includeCrossposts = searchFacets.some(x => x.kind === 'crossposts');
|
||||
|
||||
// in getDuplicate() options add "crossposts_only=1" to get only crossposts https://www.reddit.com/r/redditdev/comments/b4t5g4/get_all_the_subreddits_that_a_post_has_been/
|
||||
// if a submission is a crosspost it has "crosspost_parent" attribute https://www.reddit.com/r/redditdev/comments/l46y2l/check_if_post_is_a_crosspost/
|
||||
|
||||
const strongWindow = windowToActivityWindowCriteria(window);
|
||||
|
||||
const candidateHash = `repostItems-${item instanceof Submission ? item.id : item.link_id}-${objectHash.sha1({
|
||||
window,
|
||||
searchOn
|
||||
})}`;
|
||||
let items: (RepostItem|RepostItemResult)[] = [];
|
||||
let cacheRes = undefined;
|
||||
if (item instanceof Comment) {
|
||||
cacheRes = await this.resources.cache.get(candidateHash) as ((RepostItem|RepostItemResult)[] | undefined | null);
|
||||
}
|
||||
|
||||
if (cacheRes === undefined || cacheRes === null) {
|
||||
|
||||
const sub = await this.getSubmission(item);
|
||||
let dups: (Submission[] | undefined) = undefined;
|
||||
|
||||
for (const sf of searchFacets) {
|
||||
|
||||
const {
|
||||
matchScore = 85,
|
||||
minWordCount = 3,
|
||||
transformations = [],
|
||||
} = sf;
|
||||
|
||||
if (sf.kind === 'external') {
|
||||
const attribution = getAttributionIdentifier(sub);
|
||||
switch (attribution.provider) {
|
||||
case 'YouTube':
|
||||
const ytCreds = this.resources.getThirdPartyCredentials('youtube')
|
||||
if (ytCreds === undefined) {
|
||||
throw new Error('Cannot extract comments from Youtube because a Youtube Data API key was not provided in configuration');
|
||||
}
|
||||
if (ytClient === undefined) {
|
||||
ytClient = new YoutubeClient(ytCreds.apiKey);
|
||||
}
|
||||
const ytComments = commentsAsRepostItems(await ytClient.getVideoTopComments(sub.url, maxExternalItems));
|
||||
items = items.concat(ytComments)
|
||||
totalExternal.set('Youtube comments', (totalExternal.get('Youtube comments') ?? 0) + ytComments.length);
|
||||
break;
|
||||
default:
|
||||
if (attribution.provider === undefined) {
|
||||
this.logger.debug('Unable to determine external provider');
|
||||
continue;
|
||||
} else {
|
||||
this.logger.debug(`External parsing of ${attribution} is not supported yet.`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let subs: Submission[];
|
||||
|
||||
if (['title', 'url'].includes(sf.kind)) {
|
||||
let query: string;
|
||||
let searchFunc: (limit: number) => Promise<Listing<Submission | Comment>>;
|
||||
if (sf.kind === 'title') {
|
||||
query = (await this.getSubmission(item)).title;
|
||||
searchFunc = (limit: number) => {
|
||||
let opts: SearchOptions = {
|
||||
query,
|
||||
limit,
|
||||
sort: 'relevance'
|
||||
};
|
||||
if (strongWindow.subreddits?.include !== undefined && strongWindow.subreddits?.include.length > 0) {
|
||||
opts.restrictSr = true;
|
||||
opts.subreddit = strongWindow.subreddits?.include.join('+');
|
||||
}
|
||||
return this.client.search(opts);
|
||||
}
|
||||
} else {
|
||||
const attr = getAttributionIdentifier(sub);
|
||||
if (attr.provider === 'YouTube') {
|
||||
const ytId = parseYtIdentifier(sub.url);
|
||||
query = `url:https://youtu.be/${ytId}`;
|
||||
} else {
|
||||
query = `url:${sub.url}`;
|
||||
}
|
||||
searchFunc = (limit: number) => {
|
||||
let opts: SearchOptions = {
|
||||
query,
|
||||
limit,
|
||||
sort: 'top'
|
||||
};
|
||||
if (strongWindow.subreddits?.include !== undefined && strongWindow.subreddits?.include.length > 0) {
|
||||
opts.restrictSr = true;
|
||||
opts.subreddit = strongWindow.subreddits?.include.join('+');
|
||||
}
|
||||
return this.client.search(opts);
|
||||
}
|
||||
}
|
||||
subs = await getActivities(searchFunc, {window: strongWindow}) as Submission[];
|
||||
} else {
|
||||
|
||||
if (dups === undefined) {
|
||||
let searchFunc: (limit: number) => Promise<Listing<Submission | Comment>> = (limit: number) => {
|
||||
// this does not work correctly
|
||||
// see https://github.com/not-an-aardvark/snoowrap/issues/320
|
||||
// searchFunc = (limit: number) => {
|
||||
// return sub.getDuplicates({crossposts_only: 0, limit});
|
||||
// };
|
||||
return this.client.oauthRequest({
|
||||
uri: `duplicates/${sub.id}`,
|
||||
qs: {
|
||||
limit,
|
||||
}
|
||||
}).then(x => {
|
||||
return Promise.resolve(x.comments) as Promise<Listing<Submission>>
|
||||
});
|
||||
};
|
||||
subs = await getActivities(searchFunc, {window: strongWindow}) as Submission[];
|
||||
dups = subs;
|
||||
} else {
|
||||
subs = dups;
|
||||
}
|
||||
|
||||
if (sf.kind === 'duplicates') {
|
||||
// @ts-ignore
|
||||
subs = subs.filter(x => x.crosspost_parent === undefined)
|
||||
} else {
|
||||
// @ts-ignore
|
||||
subs = subs.filter(x => x.crosspost_parent !== undefined && x.crosspost_parent === sub.id)
|
||||
}
|
||||
}
|
||||
|
||||
// filter by minimum word count
|
||||
subs = subs.filter(x => wordCount(x.title) > minWordCount);
|
||||
|
||||
items = items.concat(subs.map(x => ({
|
||||
value: searchAndReplace(x.title, transformations),
|
||||
createdOn: x.created,
|
||||
source: 'reddit',
|
||||
sourceUrl: x.permalink,
|
||||
id: x.id,
|
||||
score: x.score,
|
||||
itemType: 'submission',
|
||||
acquisitionType: sf.kind,
|
||||
sourceObj: x,
|
||||
reqSameness: matchScore,
|
||||
})));
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
if (!includeCrossposts) {
|
||||
const sub = await this.getSubmission(item);
|
||||
// remove submissions if they are official crossposts of the submission being checked and searchOn did not include 'crossposts'
|
||||
items = items.filter(x => x.itemType !== 'submission' || !(x.sourceObj.crosspost_parent !== undefined && x.sourceObj.crosspost_parent === sub.id))
|
||||
}
|
||||
|
||||
let sourceTitle = searchAndReplace(sub.title, restCriteria.transformationsActivity ?? []);
|
||||
|
||||
// do submission scoring BEFORE pruning duplicates bc...
|
||||
// might end up in a situation where we get same submission for both title and url
|
||||
// -- url is always a repost but title is not guaranteed and we if remove the url item but not the title we could potentially filter the title submission out and miss this repost
|
||||
items = items.reduce((acc: (RepostItem|RepostItemResult)[], x) => {
|
||||
if(x.itemType === 'submission') {
|
||||
totalSubs++;
|
||||
const sf = searchFacets.find(y => y.kind === x.acquisitionType) as SearchFacet;
|
||||
|
||||
let cleanTitle = x.value;
|
||||
if (!(sf.caseSensitive ?? false)) {
|
||||
cleanTitle = cleanTitle.toLowerCase();
|
||||
}
|
||||
const strMatchResults = stringSameness(sourceTitle, cleanTitle);
|
||||
if(strMatchResults.highScoreWeighted >= (x.reqSameness as number)) {
|
||||
return acc.concat({
|
||||
...x,
|
||||
sameness: Math.min(strMatchResults.highScoreWeighted, 100),
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
}
|
||||
return acc.concat(x);
|
||||
}, []);
|
||||
|
||||
// now remove duplicate submissions
|
||||
items = items.reduce((acc: RepostItem[], curr) => {
|
||||
if(curr.itemType !== 'submission') {
|
||||
return acc.concat(curr);
|
||||
}
|
||||
const subId = curr.sourceObj.id;
|
||||
if (sub.id !== subId && !acc.some(x => x.itemType === 'submission' && x.sourceObj.id === subId)) {
|
||||
return acc.concat(curr);
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
|
||||
if (item instanceof Comment) {
|
||||
// we need to gather comments from submissions
|
||||
|
||||
// first cut down the number of submissions to retrieve because we don't care about have ALL submissions,
|
||||
// just most popular comments (which will be in the most popular submissions)
|
||||
let subs = items.filter(x => x.itemType === 'submission').map(x => x.sourceObj) as Submission[];
|
||||
totalCommentSubs += subs.length;
|
||||
|
||||
const nonSubItems = items.filter(x => x.itemType !== 'submission' && wordCount(x.value) > (restCriteria.minWordCount ?? 3));
|
||||
|
||||
subs.sort((a, b) => a.score - b.score).reverse();
|
||||
// take top 10 submissions
|
||||
subs = subs.slice(0, 10);
|
||||
|
||||
let comments: Comment[] = [];
|
||||
for (const sub of subs) {
|
||||
|
||||
const commFunc = (limit: number) => {
|
||||
return this.client.oauthRequest({
|
||||
uri: `${sub.subreddit_name_prefixed}/comments/${sub.id}`,
|
||||
// get ONLY top-level comments, sorted by Top
|
||||
qs: {
|
||||
sort: 'top',
|
||||
depth: 0,
|
||||
limit,
|
||||
}
|
||||
}).then(x => {
|
||||
return x.comments as Promise<Listing<Comment>>
|
||||
});
|
||||
}
|
||||
// and return the top 20 most popular
|
||||
const subComments = await getActivities(commFunc, {window: {count: 20}, skipReplies: true}) as Listing<Comment>;
|
||||
comments = comments.concat(subComments);
|
||||
}
|
||||
|
||||
// sort by highest scores
|
||||
comments.sort((a, b) => a.score - b.score).reverse();
|
||||
// filter out all comments with fewer words than required (prevent false negatives)
|
||||
comments.filter(x => wordCount(x.body) > (restCriteria.minWordCount ?? 3));
|
||||
totalComments += Math.min(comments.length, maxRedditItems);
|
||||
|
||||
// and take the user-defined maximum number of items
|
||||
items = nonSubItems.concat(comments.slice(0, maxRedditItems).map(x => ({
|
||||
value: searchAndReplace(x.body, restCriteria.transformations ?? []),
|
||||
createdOn: x.created,
|
||||
source: 'reddit',
|
||||
id: x.id,
|
||||
sourceUrl: x.permalink,
|
||||
score: x.score,
|
||||
itemType: 'comment',
|
||||
acquisitionType: 'comment'
|
||||
})));
|
||||
}
|
||||
|
||||
// cache items for 20 minutes
|
||||
await this.resources.cache.set(candidateHash, items, {ttl: 1200});
|
||||
} else {
|
||||
items = cacheRes;
|
||||
totalExternal = items.reduce((acc, curr) => {
|
||||
if(curr.acquisitionType === 'external') {
|
||||
acc.set(`${curr.source} comments`, (acc.get(`${curr.source} comments`) ?? 0 ) + 1);
|
||||
return acc;
|
||||
}
|
||||
return acc;
|
||||
}, new Map<string, number>());
|
||||
//totalSubs = items.filter(x => x.itemType === 'submission').length;
|
||||
//totalCommentSubs = totalSubs;
|
||||
totalComments = items.filter(x => x.itemType === 'comment' && x.source === 'reddit').length;
|
||||
fromCache = true;
|
||||
}
|
||||
|
||||
const {
|
||||
matchScore = 85,
|
||||
caseSensitive = false,
|
||||
transformations = [],
|
||||
transformationsActivity = transformations,
|
||||
occurrences = {
|
||||
condition: 'AND',
|
||||
criteria: [
|
||||
{
|
||||
count: {
|
||||
test: ['> 0']
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
} = restCriteria;
|
||||
|
||||
if(item instanceof Submission) {
|
||||
// we've already done difference calculations in the searchFacet phase
|
||||
// and when the check is for a sub it means we are only checking if the submissions has been reposted which means either:
|
||||
// * very similar title (default sameness of 85% or more)
|
||||
// * duplicate/same URL -- which is a repost, duh
|
||||
// so just add all items to critMatches at this point
|
||||
criteriaMatchedResults = criteriaMatchedResults.concat(items.filter(x => "sameness" in x) as RepostItemResult[]);
|
||||
} else {
|
||||
let sourceContent = searchAndReplace(item.body, transformationsActivity);
|
||||
if (!caseSensitive) {
|
||||
sourceContent = sourceContent.toLowerCase();
|
||||
}
|
||||
|
||||
for (const i of items) {
|
||||
const itemContent = !caseSensitive ? i.value.toLowerCase() : i.value;
|
||||
const strMatchResults = stringSameness(sourceContent, itemContent);
|
||||
if(strMatchResults.highScoreWeighted >= matchScore) {
|
||||
criteriaMatchedResults.push({
|
||||
...i,
|
||||
// @ts-ignore
|
||||
reqSameness: matchScore,
|
||||
sameness: Math.min(strMatchResults.highScoreWeighted, 100)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// now do occurrence and time tests
|
||||
|
||||
const {
|
||||
condition: occCondition = 'AND',
|
||||
criteria: occCriteria = [
|
||||
{
|
||||
count: {
|
||||
test: ['> 0']
|
||||
}
|
||||
}
|
||||
]
|
||||
} = occurrences;
|
||||
|
||||
let orPass = false;
|
||||
let occurrenceReason = null;
|
||||
|
||||
for(const occurrenceTest of occCriteria) {
|
||||
|
||||
const {
|
||||
count:{
|
||||
condition: oCondition = 'AND',
|
||||
test: oCriteria = []
|
||||
} = {},
|
||||
time: {
|
||||
condition: tCondition = 'AND',
|
||||
test: tCriteria = [],
|
||||
} = {}
|
||||
} = occurrenceTest;
|
||||
|
||||
let conditionFailSummaries = [];
|
||||
|
||||
const passedConditions = [];
|
||||
const failedConditions = [];
|
||||
|
||||
for (const oc of oCriteria) {
|
||||
const ocCompare = parseGenericValueComparison(oc);
|
||||
const ocMatch = comparisonTextOp(criteriaMatchedResults.length, ocCompare.operator, ocCompare.value);
|
||||
if (ocMatch) {
|
||||
passedConditions.push(oc);
|
||||
} else {
|
||||
failedConditions.push(oc);
|
||||
if (oCondition === 'AND') {
|
||||
conditionFailSummaries.push(`(AND) ${oc} occurrences was not true`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (passedConditions.length === 0 && oCriteria.length > 0) {
|
||||
conditionFailSummaries.push('(OR) No occurrence tests passed');
|
||||
}
|
||||
|
||||
const existingPassed = passedConditions.length;
|
||||
if (conditionFailSummaries.length === 0) {
|
||||
const timeAwareReposts = [...criteriaMatchedResults].filter(x => x.createdOn !== undefined).sort((a, b) => (a.createdOn as number) - (b.createdOn as number));
|
||||
for (const tc of tCriteria) {
|
||||
let toTest: RepostItemResult[] = [];
|
||||
const durationCompare = parseDurationComparison(tc.condition);
|
||||
switch (tc.testOn) {
|
||||
case 'newest':
|
||||
case 'oldest':
|
||||
if (tc.testOn === 'newest') {
|
||||
toTest = timeAwareReposts.slice(-1);
|
||||
} else {
|
||||
toTest = timeAwareReposts.slice(0, 1);
|
||||
}
|
||||
break;
|
||||
case 'any':
|
||||
case 'all':
|
||||
toTest = timeAwareReposts;
|
||||
break;
|
||||
}
|
||||
const timePass = tc.testOn === 'any' ? toTest.some(x => compareDurationValue(durationCompare, dayjs.unix(x.createdOn as number))) : toTest.every(x => compareDurationValue(durationCompare, dayjs.unix(x.createdOn as number)));
|
||||
if (timePass) {
|
||||
passedConditions.push(tc.condition);
|
||||
} else {
|
||||
failedConditions.push(tc.condition);
|
||||
if (tCondition === 'AND') {
|
||||
conditionFailSummaries.push(`(AND) ${tc.condition} was not true`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (tCriteria.length > 0 && passedConditions.length === existingPassed) {
|
||||
conditionFailSummaries.push('(OR) No time-based tests passed');
|
||||
}
|
||||
}
|
||||
|
||||
if(conditionFailSummaries.length !== 0 && occCondition === 'AND') {
|
||||
// failed occurrence tests (high-level)
|
||||
occurrenceReason = conditionFailSummaries.join(' | ');
|
||||
break;
|
||||
}
|
||||
|
||||
if(passedConditions.length > 0 && occCondition === 'OR') {
|
||||
occurrenceReason = passedConditions.join(' | ');
|
||||
orPass = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let passed = occCriteria.length === 0;
|
||||
|
||||
if(occCriteria.length > 0) {
|
||||
if(occCondition === 'OR') {
|
||||
passed = orPass;
|
||||
occurrenceReason = occurrenceReason === null ? 'No occurrence test sets passed' : occurrenceReason;
|
||||
} else if(occCondition === 'AND') {
|
||||
passed = occurrenceReason === null;
|
||||
occurrenceReason = occurrenceReason === null ? 'All tests passed' : occurrenceReason;
|
||||
}
|
||||
//passed = (occCondition === 'OR' && orPass) || (occurrenceFailureReason === null && occCondition === 'AND')
|
||||
}
|
||||
|
||||
const results = {
|
||||
passed,
|
||||
conditionsSummary: occurrenceReason as string,
|
||||
items: criteriaMatchedResults
|
||||
};
|
||||
criteriaResults.push(results)
|
||||
|
||||
|
||||
if(!results.passed) {
|
||||
if(this.condition === 'AND') {
|
||||
andFail = true;
|
||||
break;
|
||||
}
|
||||
} else if(this.condition === 'OR') {
|
||||
break;
|
||||
}
|
||||
if (!results.passed && this.condition === 'AND') {
|
||||
andFail = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// get all repost items for stats and SCIENCE
|
||||
const repostItemResults = [...criteriaResults
|
||||
// only want reposts from criteria that passed
|
||||
.filter(x => x.passed).map(x => x.items)
|
||||
.flat()
|
||||
// make sure we are only accumulating unique reposts
|
||||
.reduce((acc, curr) => {
|
||||
const hash = `${curr.source}-${curr.itemType}-${curr.id}`;
|
||||
if (!acc.has(hash)) {
|
||||
acc.set(hash, curr);
|
||||
}
|
||||
return acc;
|
||||
}, new Map<string, RepostItemResult>()).values()];
|
||||
|
||||
repostItemResults.sort((a, b) => a.sameness - b.sameness).reverse();
|
||||
const foundRepost = criteriaResults.length > 0;
|
||||
|
||||
|
||||
let avgSameness = null;
|
||||
let closestSummary = null;
|
||||
let closestSameness = null;
|
||||
let searchCandidateSummary = '';
|
||||
|
||||
if(item instanceof Comment) {
|
||||
searchCandidateSummary = `Searched top ${totalComments} comments in top 10 ${fromCache ? '' : `of ${totalCommentSubs} `}most popular submissions`;
|
||||
if(totalExternal.size > 0) {
|
||||
searchCandidateSummary += ", ";
|
||||
const extSumm: string[] = [];
|
||||
totalExternal.forEach((v, k) => {
|
||||
extSumm.push(`${v} ${k}`);
|
||||
});
|
||||
searchCandidateSummary += extSumm.join(', ');
|
||||
}
|
||||
} else {
|
||||
searchCandidateSummary = `Searched ${totalSubs}`
|
||||
}
|
||||
|
||||
let summary = `${searchCandidateSummary} and found ${repostItemResults.length} reposts.`;
|
||||
|
||||
if(repostItemResults.length > 0) {
|
||||
avgSameness = formatNumber(repostItemResults.reduce((acc, curr) => acc + curr.sameness, 0) / criteriaResults.length);
|
||||
const closest = repostItemResults[0];
|
||||
summary += ` --- Closest Match => >> ${closest.value} << from ${closest.source} (${closest.sourceUrl}) with ${formatNumber(closest.sameness)}% sameness.`
|
||||
closestSummary = `matched a ${closest.itemType} from ${closest.source}`;
|
||||
closestSameness = closest.sameness;
|
||||
if(criteriaResults.length > 1) {
|
||||
summary += ` Avg ${formatNumber(avgSameness)}%`;
|
||||
}
|
||||
}
|
||||
|
||||
let passed;
|
||||
|
||||
if(this.condition === 'AND') {
|
||||
const failedCrit = criteriaResults.find(x => !x.passed);
|
||||
if(failedCrit !== undefined) {
|
||||
summary += `BUT a criteria failed >> ${failedCrit.conditionsSummary} << and rule has AND condition.`;
|
||||
passed = false;
|
||||
} else {
|
||||
passed = true;
|
||||
}
|
||||
} else {
|
||||
const passedCrit = criteriaResults.find(x => x.passed);
|
||||
if(passedCrit === undefined) {
|
||||
summary += `BUT all criteria failed`;
|
||||
passed = false;
|
||||
} else {
|
||||
passed = true;
|
||||
}
|
||||
}
|
||||
|
||||
const result = `${passed ? PASS : FAIL} ${summary}`;
|
||||
this.logger.verbose(result);
|
||||
|
||||
return [passed, this.getResult(passed, {
|
||||
result,
|
||||
data: {
|
||||
allResults: criteriaResults,
|
||||
closestSameness: passed ? formatNumber(closestSameness as number) : undefined,
|
||||
closestSummary: passed ? closestSummary : undefined,
|
||||
}
|
||||
})];
|
||||
}
|
||||
}
|
||||
|
||||
interface RepostConfig {
|
||||
/**
|
||||
* A list of Regular Expressions and conditions under which tested Activity(ies) are matched
|
||||
* @minItems 1
|
||||
* @examples [{"regex": "/reddit/", "matchThreshold": "> 3"}]
|
||||
* */
|
||||
criteria?: RepostCriteria[]
|
||||
/**
|
||||
* * If `OR` then any set of Criteria that pass will trigger the Rule
|
||||
* * If `AND` then all Criteria sets must pass to trigger the Rule
|
||||
*
|
||||
* @default "OR"
|
||||
* */
|
||||
condition?: 'AND' | 'OR'
|
||||
}
|
||||
|
||||
export interface RepostRuleOptions extends RepostConfig, RuleOptions {
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for reposts of a Submission or Comment
|
||||
*
|
||||
* * For submissions the title or URL can searched and matched against
|
||||
* * For comments, candidate comments are gathered from similar reddit submissions and/or external sources (youtube, twitter, etc..) and then matched against
|
||||
*
|
||||
* */
|
||||
export interface RepostRuleJSONConfig extends RepostConfig, RuleJSONConfig {
|
||||
/**
|
||||
* @examples ["repost"]
|
||||
* */
|
||||
kind: 'repost'
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import HistoryRule, {HistoryJSONConfig} from "./HistoryRule";
|
||||
import RegexRule, {RegexRuleJSONConfig} from "./RegexRule";
|
||||
import {SubredditResources} from "../Subreddit/SubredditResources";
|
||||
import Snoowrap from "snoowrap";
|
||||
import {RepostRule, RepostRuleJSONConfig} from "./RepostRule";
|
||||
|
||||
export function ruleFactory
|
||||
(config: RuleJSONConfig, logger: Logger, subredditName: string, resources: SubredditResources, client: Snoowrap): Rule {
|
||||
@@ -31,6 +32,9 @@ export function ruleFactory
|
||||
case 'regex':
|
||||
cfg = config as RegexRuleJSONConfig;
|
||||
return new RegexRule({...cfg, logger, subredditName, resources, client});
|
||||
case 'repost':
|
||||
cfg = config as RepostRuleJSONConfig;
|
||||
return new RepostRule({...cfg, logger, subredditName, resources, client});
|
||||
default:
|
||||
throw new Error('rule "kind" was not recognized.');
|
||||
}
|
||||
|
||||
@@ -28,10 +28,16 @@ interface ResultContext {
|
||||
|
||||
export interface RuleResult extends ResultContext {
|
||||
premise: RulePremise
|
||||
kind: string
|
||||
name: string
|
||||
triggered: (boolean | null)
|
||||
}
|
||||
|
||||
export type FormattedRuleResult = RuleResult & {
|
||||
triggered: string
|
||||
result: string
|
||||
}
|
||||
|
||||
export interface RuleSetResult {
|
||||
results: RuleResult[],
|
||||
condition: 'OR' | 'AND',
|
||||
@@ -111,13 +117,13 @@ export abstract class Rule implements IRule, Triggerable {
|
||||
this.logger.verbose('(Skipped) Exclusive author criteria not matched');
|
||||
return Promise.resolve([null, this.getResult(null, {result: 'Exclusive author criteria not matched'})]);
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
this.logger.error('Error occurred during Rule pre-process checks');
|
||||
throw err;
|
||||
}
|
||||
try {
|
||||
return this.process(item);
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
this.logger.error('Error occurred while processing rule');
|
||||
throw err;
|
||||
}
|
||||
@@ -148,6 +154,7 @@ export abstract class Rule implements IRule, Triggerable {
|
||||
protected getResult(triggered: (boolean | null) = null, context: ResultContext = {}): RuleResult {
|
||||
return {
|
||||
premise: this.getPremise(),
|
||||
kind: this.getKind(),
|
||||
name: this.name,
|
||||
triggered,
|
||||
...context,
|
||||
@@ -204,22 +211,6 @@ export interface UserNoteCriteria {
|
||||
search?: 'current' | 'consecutive' | 'total'
|
||||
}
|
||||
|
||||
/**
|
||||
* A duration and how to compare it against a value
|
||||
*
|
||||
* The syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`
|
||||
*
|
||||
* * EX `> 100 days` => Passes if the date being compared is before 100 days ago
|
||||
* * EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months
|
||||
*
|
||||
* Unit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)
|
||||
*
|
||||
* [See] https://regexr.com/609n8 for example
|
||||
*
|
||||
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\s*$
|
||||
* */
|
||||
export type DurationComparor = string;
|
||||
|
||||
export interface IRule extends ChecksActivityState {
|
||||
/**
|
||||
* An optional, but highly recommended, friendly name for this rule. If not present will default to `kind`.
|
||||
@@ -249,6 +240,6 @@ export interface RuleJSONConfig extends IRule {
|
||||
* The kind of rule to run
|
||||
* @examples ["recentActivity", "repeatActivity", "author", "attribution", "history"]
|
||||
*/
|
||||
kind: 'recentActivity' | 'repeatActivity' | 'author' | 'attribution' | 'history' | 'regex'
|
||||
kind: 'recentActivity' | 'repeatActivity' | 'author' | 'attribution' | 'history' | 'regex' | 'repost'
|
||||
}
|
||||
|
||||
|
||||
@@ -29,6 +29,26 @@
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"anyOf": [
|
||||
{
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"description": "An (array of) string/regular expression to test contents of an Author's profile description against\n\nIf no flags are specified then the **insensitive** flag is used by default\n\nIf using an array then if **any** value in the array passes the description test passes",
|
||||
"examples": [
|
||||
[
|
||||
"/test$/i",
|
||||
"look for this string literal"
|
||||
]
|
||||
]
|
||||
},
|
||||
"flairCssClass": {
|
||||
"description": "A list of (user) flair css class values from the subreddit to match against",
|
||||
"examples": [
|
||||
@@ -69,6 +89,10 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"shadowBanned": {
|
||||
"description": "Is the author shadowbanned?\n\nThis is determined by trying to retrieve the author's profile. If a 404 is returned it is likely they are shadowbanned",
|
||||
"type": "boolean"
|
||||
},
|
||||
"totalKarma": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
@@ -132,12 +156,22 @@
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"age": {
|
||||
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"approved": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"deleted": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"depth": {
|
||||
"description": "The (nested) level of a comment.\n\n* 0 mean the comment is at top-level (replying to submission)\n* non-zero, Nth value means the comment has N parent comments",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"distinguished": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -154,6 +188,16 @@
|
||||
"removed": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"spam": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -179,6 +223,11 @@
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"age": {
|
||||
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
|
||||
"type": "string"
|
||||
},
|
||||
"approved": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -213,6 +262,16 @@
|
||||
"removed": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"reports": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
|
||||
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
|
||||
"type": "string"
|
||||
},
|
||||
"spam": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
||||
1083
src/Schema/App.json
1083
src/Schema/App.json
File diff suppressed because it is too large
Load Diff
@@ -19,89 +19,42 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"BotCredentialsJsonConfig": {
|
||||
"properties": {
|
||||
"reddit": {
|
||||
"$ref": "#/definitions/RedditCredentials"
|
||||
},
|
||||
"youtube": {
|
||||
"properties": {
|
||||
"apiKey": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"apiKey"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"reddit"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"BotInstanceJsonConfig": {
|
||||
"description": "The configuration for an **individual reddit account** ContextMod will run as a bot.\n\nMultiple bot configs may be specified (one per reddit account).\n\n**NOTE:** If `bots` is not specified in a `FILE` then a default `bot` is generated using `ENV/ARG` values IE `CLIENT_ID`, etc...but if `bots` IS specified the default is not generated.",
|
||||
"properties": {
|
||||
"caching": {
|
||||
"description": "Settings to configure the default caching behavior for each suberddit",
|
||||
"properties": {
|
||||
"authorTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"commentTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, a comment should be cached",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"filterCriteriaTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CacheOptions"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"none",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"description": "The cache provider and, optionally, a custom configuration for that provider\n\nIf not present or `null` provider will be `memory`.\n\nTo specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`"
|
||||
},
|
||||
"submissionTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, a submission should be cached",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"userNotesTTL": {
|
||||
"default": 300,
|
||||
"description": "Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"wikiTTL": {
|
||||
"default": 300,
|
||||
"description": "Amount of time, in seconds, wiki content pages should be cached",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
"$ref": "#/definitions/OperatorCacheConfig",
|
||||
"description": "Settings to configure the default caching behavior for this bot\n\nEvery setting not specified will default to what is specified by the global operator caching config"
|
||||
},
|
||||
"credentials": {
|
||||
"$ref": "#/definitions/RedditCredentials",
|
||||
"description": "Credentials required for the bot to interact with Reddit's API\n\nThese credentials will provided to both the API and Web interface unless otherwise specified with the `web.credentials` property\n\nRefer to the [required credentials table](https://github.com/FoxxMD/context-mod/blob/master/docs/operatorConfiguration.md#minimum-required-configuration) to see what is necessary to run the bot.",
|
||||
"examples": [
|
||||
"anyOf": [
|
||||
{
|
||||
"accessToken": "p75_1c467b2",
|
||||
"clientId": "f4b4df1_9oiu",
|
||||
"clientSecret": "34v5q1c564_yt7",
|
||||
"redirectUri": "http://localhost:8085/callback",
|
||||
"refreshToken": "34_f1w1v4"
|
||||
"$ref": "#/definitions/RedditCredentials"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/BotCredentialsJsonConfig"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -145,6 +98,10 @@
|
||||
"default": false,
|
||||
"description": "If set to `true` all subreddits polling unmoderated/modqueue with default polling settings will share a request to \"r/mod\"\notherwise each subreddit will poll its own mod view\n\n* ENV => `SHARE_MOD`\n* ARG => `--shareMod`",
|
||||
"type": "boolean"
|
||||
},
|
||||
"stagger": {
|
||||
"description": "If sharing a mod stream stagger pushing relevant Activities to individual subreddits.\n\nUseful when running many subreddits and rules are potentially cpu/memory/traffic heavy -- allows spreading out load",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
@@ -243,6 +200,8 @@
|
||||
"type": "object"
|
||||
},
|
||||
"CacheOptions": {
|
||||
"additionalProperties": {
|
||||
},
|
||||
"description": "Configure granular settings for a cache provider with this object",
|
||||
"properties": {
|
||||
"auth_pass": {
|
||||
@@ -394,6 +353,114 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"OperatorCacheConfig": {
|
||||
"properties": {
|
||||
"actionedEventsDefault": {
|
||||
"default": 25,
|
||||
"description": "The **default** number of Events that the cache will store triggered result summaries for\n\nThese summaries are viewable through the Web UI.\n\nThe value specified cannot be larger than `actionedEventsMax` for the global/bot config (if set)",
|
||||
"type": "number"
|
||||
},
|
||||
"actionedEventsMax": {
|
||||
"default": 25,
|
||||
"description": "The **maximum** number of Events that the cache should store triggered result summaries for\n\nThese summaries are viewable through the Web UI.\n\nThe value specified by a subreddit cannot be larger than the value set by the Operator for the global/bot config (if set)",
|
||||
"type": "number"
|
||||
},
|
||||
"authorTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"commentTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, a comment should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"filterCriteriaTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CacheOptions"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"none",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"description": "The cache provider and, optionally, a custom configuration for that provider\n\nIf not present or `null` provider will be `memory`.\n\nTo specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`"
|
||||
},
|
||||
"submissionTTL": {
|
||||
"default": 60,
|
||||
"description": "Amount of time, in seconds, a submission should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
60
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"subredditTTL": {
|
||||
"default": 600,
|
||||
"description": "Amount of time, in seconds, a subreddit (attributes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
600
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"userNotesTTL": {
|
||||
"default": 300,
|
||||
"description": "Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
},
|
||||
"wikiTTL": {
|
||||
"default": 300,
|
||||
"description": "Amount of time, in seconds, wiki content pages should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
|
||||
"examples": [
|
||||
300
|
||||
],
|
||||
"type": [
|
||||
"number",
|
||||
"boolean"
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"PollingDefaults": {
|
||||
"properties": {
|
||||
"delayUntil": {
|
||||
@@ -462,6 +529,24 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"ThirdPartyCredentialsJsonConfig": {
|
||||
"additionalProperties": {
|
||||
},
|
||||
"properties": {
|
||||
"youtube": {
|
||||
"properties": {
|
||||
"apiKey": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"apiKey"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"WebCredentials": {
|
||||
"description": "Separate credentials for the web interface can be provided when also running the api.\n\nAll properties not specified will default to values given in ENV/ARG credential properties\n\nRefer to the [required credentials table](https://github.com/FoxxMD/context-mod/blob/master/docs/operatorConfiguration.md#minimum-required-configuration) to see what is necessary for the web interface.",
|
||||
"examples": [
|
||||
@@ -527,6 +612,13 @@
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"caching": {
|
||||
"$ref": "#/definitions/OperatorCacheConfig",
|
||||
"description": "Settings to configure the default caching behavior globally\n\nThese settings will be used by each bot, and subreddit, that does not specify their own"
|
||||
},
|
||||
"credentials": {
|
||||
"$ref": "#/definitions/ThirdPartyCredentialsJsonConfig"
|
||||
},
|
||||
"logging": {
|
||||
"description": "Settings to configure global logging defaults",
|
||||
"properties": {
|
||||
@@ -605,6 +697,21 @@
|
||||
"web": {
|
||||
"description": "Settings for the web interface",
|
||||
"properties": {
|
||||
"caching": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CacheOptions"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"description": "Caching provider to use for session and invite data\n\nIf none is provided the top-level caching provider is used"
|
||||
},
|
||||
"clients": {
|
||||
"description": "A list of CM Servers this Client should connect to.\n\nIf not specified a default `BotConnection` for this instance is generated",
|
||||
"examples": [
|
||||
@@ -631,6 +738,20 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"invites": {
|
||||
"description": "Settings related to oauth flow invites",
|
||||
"properties": {
|
||||
"maxAge": {
|
||||
"default": 0,
|
||||
"description": "Number of seconds an invite should be valid for\n\n If `0` or not specified (default) invites do not expire",
|
||||
"examples": [
|
||||
0
|
||||
],
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"logLevel": {
|
||||
"description": "The default log level to filter to in the web interface\n\nIf not specified or `null` will be same as global `logLevel`",
|
||||
"enum": [
|
||||
@@ -674,27 +795,16 @@
|
||||
"session": {
|
||||
"description": "Settings to configure the behavior of user sessions -- the session is what the web interface uses to identify logged in users.",
|
||||
"properties": {
|
||||
"provider": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/CacheOptions"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"memory",
|
||||
"redis"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"default": "memory",
|
||||
"description": "The cache provider to use.\n\nThe default should be sufficient for almost all use cases",
|
||||
"maxAge": {
|
||||
"default": 86400,
|
||||
"description": "Number of seconds a session should be valid for.\n\nDefault is 1 day",
|
||||
"examples": [
|
||||
"memory"
|
||||
]
|
||||
86400
|
||||
],
|
||||
"type": "number"
|
||||
},
|
||||
"secret": {
|
||||
"description": "The secret value used to encrypt session data\n\nIf provider is persistent (redis) specifying a value here will ensure sessions are valid between application restarts\n\nWhen not present or `null` a random string is generated on application start",
|
||||
"description": "The secret value used to encrypt session data\n\nIf provider is persistent (`redis`) specifying a value here will ensure sessions are valid between application restarts\n\nWhen not present or `null` a random string is generated on application start",
|
||||
"examples": [
|
||||
"definitelyARandomString"
|
||||
],
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -3,19 +3,21 @@ import {Logger} from "winston";
|
||||
import {SubmissionCheck} from "../Check/SubmissionCheck";
|
||||
import {CommentCheck} from "../Check/CommentCheck";
|
||||
import {
|
||||
cacheStats,
|
||||
cacheStats, createHistoricalStatsDisplay,
|
||||
createRetryHandler,
|
||||
determineNewResults, formatNumber,
|
||||
mergeArr, parseFromJsonOrYamlToObject, pollingInfo, sleep, totalFromMapStats,
|
||||
determineNewResults, findLastIndex, formatNumber,
|
||||
mergeArr, parseFromJsonOrYamlToObject, pollingInfo, resultsSummary, sleep, totalFromMapStats, triggeredIndicator,
|
||||
} from "../util";
|
||||
import {Poll} from "snoostorm";
|
||||
import pEvent from "p-event";
|
||||
import {RuleResult} from "../Rule";
|
||||
import {ConfigBuilder, buildPollingOptions} from "../ConfigBuilder";
|
||||
import {
|
||||
ActionedEvent,
|
||||
ActionResult,
|
||||
DEFAULT_POLLING_INTERVAL,
|
||||
DEFAULT_POLLING_LIMIT, Invokee,
|
||||
ManagerOptions, ManagerStateChangeOption, PAUSED,
|
||||
ManagerOptions, ManagerStateChangeOption, ManagerStats, PAUSED,
|
||||
PollingOptionsStrong, ResourceStats, RUNNING, RunState, STOPPED, SYSTEM, USER
|
||||
} from "../Common/interfaces";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
@@ -36,6 +38,10 @@ import {queue, QueueObject} from 'async';
|
||||
import {JSONConfig} from "../JsonConfig";
|
||||
import {CheckStructuredJson} from "../Check";
|
||||
import NotificationManager from "../Notification/NotificationManager";
|
||||
import action from "../Web/Server/routes/authenticated/user/action";
|
||||
import {createHistoricalDefaults, historicalDefaults} from "../Common/defaults";
|
||||
import {ExtendedSnoowrap} from "../Utils/SnoowrapClients";
|
||||
import {isRateLimitError} from "../Utils/Errors";
|
||||
|
||||
export interface RunningState {
|
||||
state: RunState,
|
||||
@@ -46,6 +52,7 @@ export interface runCheckOptions {
|
||||
checkNames?: string[],
|
||||
delayUntil?: number,
|
||||
dryRun?: boolean,
|
||||
refresh?: boolean,
|
||||
}
|
||||
|
||||
export interface CheckTask {
|
||||
@@ -61,42 +68,15 @@ export interface RuntimeManagerOptions extends ManagerOptions {
|
||||
maxWorkers: number;
|
||||
}
|
||||
|
||||
export interface ManagerStats {
|
||||
eventsCheckedTotal: number
|
||||
eventsCheckedSinceStartTotal: number
|
||||
eventsAvg: number
|
||||
checksRunTotal: number
|
||||
checksRunSinceStartTotal: number
|
||||
checksTriggered: number
|
||||
checksTriggeredTotal: number
|
||||
checksTriggeredSinceStart: number
|
||||
checksTriggeredSinceStartTotal: number
|
||||
rulesRunTotal: number
|
||||
rulesRunSinceStartTotal: number
|
||||
rulesCachedTotal: number
|
||||
rulesCachedSinceStartTotal: number
|
||||
rulesTriggeredTotal: number
|
||||
rulesTriggeredSinceStartTotal: number
|
||||
rulesAvg: number
|
||||
actionsRun: number
|
||||
actionsRunTotal: number
|
||||
actionsRunSinceStart: number,
|
||||
actionsRunSinceStartTotal: number
|
||||
cache: {
|
||||
provider: string,
|
||||
currentKeyCount: number,
|
||||
isShared: boolean,
|
||||
totalRequests: number,
|
||||
totalMiss: number,
|
||||
missPercent: string,
|
||||
requestRate: number,
|
||||
types: ResourceStats
|
||||
},
|
||||
interface QueuedIdentifier {
|
||||
id: string,
|
||||
shouldRefresh: boolean
|
||||
state: 'queued' | 'processing'
|
||||
}
|
||||
|
||||
export class Manager {
|
||||
export class Manager extends EventEmitter {
|
||||
subreddit: Subreddit;
|
||||
client: Snoowrap;
|
||||
client: ExtendedSnoowrap;
|
||||
logger: Logger;
|
||||
botName: string;
|
||||
pollOptions: PollingOptionsStrong[] = [];
|
||||
@@ -115,8 +95,16 @@ export class Manager {
|
||||
sharedModqueue: boolean;
|
||||
cacheManager: BotResourcesManager;
|
||||
globalDryRun?: boolean;
|
||||
emitter: EventEmitter = new EventEmitter();
|
||||
queue: QueueObject<CheckTask>;
|
||||
// firehose is used to ensure all activities from different polling streams are unique
|
||||
// that is -- if the same activities is in both modqueue and unmoderated we don't want to process the activity twice or use stale data
|
||||
//
|
||||
// so all activities get queued to firehose, it keeps track of items by id (using queuedItemsMeta)
|
||||
// and ensures that if any activities are ingested while they are ALSO currently queued or working then they are properly handled by either
|
||||
// 1) if queued, do not re-queue but instead tell worker to refresh before processing
|
||||
// 2) if currently processing then re-queue but also refresh before processing
|
||||
firehose: QueueObject<CheckTask>;
|
||||
queuedItemsMeta: QueuedIdentifier[] = [];
|
||||
globalMaxWorkers: number;
|
||||
subMaxWorkers?: number;
|
||||
|
||||
@@ -145,49 +133,22 @@ export class Manager {
|
||||
// use by api nanny to slow event consumption
|
||||
delayBy?: number;
|
||||
|
||||
eventsCheckedTotal: number = 0;
|
||||
eventsCheckedSinceStartTotal: number = 0;
|
||||
eventsSample: number[] = [];
|
||||
eventsSampleInterval: any;
|
||||
eventsRollingAvg: number = 0;
|
||||
checksRunTotal: number = 0;
|
||||
checksRunSinceStartTotal: number = 0;
|
||||
checksTriggered: Map<string, number> = new Map();
|
||||
checksTriggeredSinceStart: Map<string, number> = new Map();
|
||||
rulesRunTotal: number = 0;
|
||||
rulesRunSinceStartTotal: number = 0;
|
||||
rulesCachedTotal: number = 0;
|
||||
rulesCachedSinceStartTotal: number = 0;
|
||||
rulesTriggeredTotal: number = 0;
|
||||
rulesTriggeredSinceStartTotal: number = 0;
|
||||
rulesUniqueSample: number[] = [];
|
||||
rulesUniqueSampleInterval: any;
|
||||
rulesUniqueRollingAvg: number = 0;
|
||||
actionsRun: Map<string, number> = new Map();
|
||||
actionsRunSinceStart: Map<string, number> = new Map();
|
||||
actionedEvents: ActionedEvent[] = [];
|
||||
|
||||
getStats = async (): Promise<ManagerStats> => {
|
||||
const data: any = {
|
||||
eventsCheckedTotal: this.eventsCheckedTotal,
|
||||
eventsCheckedSinceStartTotal: this.eventsCheckedSinceStartTotal,
|
||||
eventsAvg: formatNumber(this.eventsRollingAvg),
|
||||
checksRunTotal: this.checksRunTotal,
|
||||
checksRunSinceStartTotal: this.checksRunSinceStartTotal,
|
||||
checksTriggered: this.checksTriggered,
|
||||
checksTriggeredTotal: totalFromMapStats(this.checksTriggered),
|
||||
checksTriggeredSinceStart: this.checksTriggeredSinceStart,
|
||||
checksTriggeredSinceStartTotal: totalFromMapStats(this.checksTriggeredSinceStart),
|
||||
rulesRunTotal: this.rulesRunTotal,
|
||||
rulesRunSinceStartTotal: this.rulesRunSinceStartTotal,
|
||||
rulesCachedTotal: this.rulesCachedTotal,
|
||||
rulesCachedSinceStartTotal: this.rulesCachedSinceStartTotal,
|
||||
rulesTriggeredTotal: this.rulesTriggeredTotal,
|
||||
rulesTriggeredSinceStartTotal: this.rulesTriggeredSinceStartTotal,
|
||||
rulesAvg: formatNumber(this.rulesUniqueRollingAvg),
|
||||
actionsRun: this.actionsRun,
|
||||
actionsRunTotal: totalFromMapStats(this.actionsRun),
|
||||
actionsRunSinceStart: this.actionsRunSinceStart,
|
||||
actionsRunSinceStartTotal: totalFromMapStats(this.actionsRunSinceStart),
|
||||
historical: {
|
||||
lastReload: createHistoricalStatsDisplay(createHistoricalDefaults()),
|
||||
allTime: createHistoricalStatsDisplay(createHistoricalDefaults()),
|
||||
},
|
||||
cache: {
|
||||
provider: 'none',
|
||||
currentKeyCount: 0,
|
||||
@@ -203,6 +164,7 @@ export class Manager {
|
||||
if (this.resources !== undefined) {
|
||||
const resStats = await this.resources.getStats();
|
||||
|
||||
data.historical = this.resources.getHistoricalDisplayStats();
|
||||
data.cache = resStats.cache;
|
||||
data.cache.currentKeyCount = await this.resources.getCacheKeyCount();
|
||||
data.cache.isShared = this.resources.cacheSettingsHash === 'default';
|
||||
@@ -219,7 +181,9 @@ export class Manager {
|
||||
return this.displayLabel;
|
||||
}
|
||||
|
||||
constructor(sub: Subreddit, client: Snoowrap, logger: Logger, cacheManager: BotResourcesManager, opts: RuntimeManagerOptions = {botName: 'ContextMod', maxWorkers: 1}) {
|
||||
constructor(sub: Subreddit, client: ExtendedSnoowrap, logger: Logger, cacheManager: BotResourcesManager, opts: RuntimeManagerOptions = {botName: 'ContextMod', maxWorkers: 1}) {
|
||||
super();
|
||||
|
||||
const {dryRun, sharedModqueue = false, wikiLocation = 'botconfig/contextbot', botName, maxWorkers} = opts;
|
||||
this.displayLabel = opts.nickname || `${sub.display_name_prefixed}`;
|
||||
const getLabels = this.getCurrentLabels;
|
||||
@@ -246,11 +210,13 @@ export class Manager {
|
||||
|
||||
this.queue = this.generateQueue(this.getMaxWorkers(this.globalMaxWorkers));
|
||||
this.queue.pause();
|
||||
this.firehose = this.generateFirehose();
|
||||
|
||||
this.eventsSampleInterval = setInterval((function(self) {
|
||||
return function() {
|
||||
const et = self.resources !== undefined ? self.resources.stats.historical.allTime.eventsCheckedTotal : 0;
|
||||
const rollingSample = self.eventsSample.slice(0, 7)
|
||||
rollingSample.unshift(self.eventsCheckedTotal)
|
||||
rollingSample.unshift(et)
|
||||
self.eventsSample = rollingSample;
|
||||
const diff = self.eventsSample.reduceRight((acc: number[], curr, index) => {
|
||||
if(self.eventsSample[index + 1] !== undefined) {
|
||||
@@ -270,7 +236,8 @@ export class Manager {
|
||||
this.rulesUniqueSampleInterval = setInterval((function(self) {
|
||||
return function() {
|
||||
const rollingSample = self.rulesUniqueSample.slice(0, 7)
|
||||
rollingSample.unshift(self.rulesRunTotal - self.rulesCachedTotal);
|
||||
const rt = self.resources !== undefined ? self.resources.stats.historical.allTime.rulesRunTotal - self.resources.stats.historical.allTime.rulesCachedTotal : 0;
|
||||
rollingSample.unshift(rt);
|
||||
self.rulesUniqueSample = rollingSample;
|
||||
const diff = self.rulesUniqueSample.reduceRight((acc: number[], curr, index) => {
|
||||
if(self.rulesUniqueSample[index + 1] !== undefined) {
|
||||
@@ -306,6 +273,32 @@ export class Manager {
|
||||
return maxWorkers;
|
||||
}
|
||||
|
||||
protected generateFirehose() {
|
||||
return queue(async (task: CheckTask, cb) => {
|
||||
// items in queuedItemsMeta will be processing FIFO so earlier elements (by index) are older
|
||||
//
|
||||
// if we insert the same item again because it is currently being processed AND THEN we get the item AGAIN we only want to update the newest meta
|
||||
// so search the array backwards to get the neweset only
|
||||
const queuedItemIndex = findLastIndex(this.queuedItemsMeta, x => x.id === task.activity.id);
|
||||
if(queuedItemIndex !== -1) {
|
||||
const itemMeta = this.queuedItemsMeta[queuedItemIndex];
|
||||
let msg = `Item ${itemMeta.id} is already ${itemMeta.state}.`;
|
||||
if(itemMeta.state === 'queued') {
|
||||
this.logger.debug(`${msg} Flagging to refresh data before processing.`);
|
||||
this.queuedItemsMeta.splice(queuedItemIndex, 1, {...itemMeta, shouldRefresh: true});
|
||||
} else {
|
||||
this.logger.debug(`${msg} Re-queuing item but will also refresh data before processing.`);
|
||||
this.queuedItemsMeta.push({id: task.activity.id, shouldRefresh: true, state: 'queued'});
|
||||
this.queue.push(task);
|
||||
}
|
||||
} else {
|
||||
this.queuedItemsMeta.push({id: task.activity.id, shouldRefresh: false, state: 'queued'});
|
||||
this.queue.push(task);
|
||||
}
|
||||
}
|
||||
, 1);
|
||||
}
|
||||
|
||||
protected generateQueue(maxWorkers: number) {
|
||||
if (maxWorkers > 1) {
|
||||
this.logger.warn(`Setting max queue workers above 1 (specified: ${maxWorkers}) may have detrimental effects to log readability and api usage. Consult the documentation before using this advanced/experimental feature.`);
|
||||
@@ -316,7 +309,16 @@ export class Manager {
|
||||
this.logger.debug(`SOFT API LIMIT MODE: Delaying Event run by ${this.delayBy} seconds`);
|
||||
await sleep(this.delayBy * 1000);
|
||||
}
|
||||
await this.runChecks(task.checkType, task.activity, task.options);
|
||||
|
||||
const queuedItemIndex = this.queuedItemsMeta.findIndex(x => x.id === task.activity.id);
|
||||
try {
|
||||
const itemMeta = this.queuedItemsMeta[queuedItemIndex];
|
||||
this.queuedItemsMeta.splice(queuedItemIndex, 1, {...itemMeta, state: 'processing'});
|
||||
await this.runChecks(task.checkType, task.activity, {...task.options, refresh: itemMeta.shouldRefresh});
|
||||
} finally {
|
||||
// always remove item meta regardless of success or failure since we are done with it meow
|
||||
this.queuedItemsMeta.splice(queuedItemIndex, 1);
|
||||
}
|
||||
}
|
||||
, maxWorkers);
|
||||
q.error((err, task) => {
|
||||
@@ -331,7 +333,7 @@ export class Manager {
|
||||
return q;
|
||||
}
|
||||
|
||||
protected parseConfigurationFromObject(configObj: object) {
|
||||
protected async parseConfigurationFromObject(configObj: object) {
|
||||
try {
|
||||
const configBuilder = new ConfigBuilder({logger: this.logger});
|
||||
const validJson = configBuilder.validateJson(configObj);
|
||||
@@ -339,6 +341,7 @@ export class Manager {
|
||||
const {
|
||||
polling = [{pollOn: 'unmoderated', limit: DEFAULT_POLLING_LIMIT, interval: DEFAULT_POLLING_INTERVAL}],
|
||||
caching,
|
||||
credentials,
|
||||
dryRun,
|
||||
footer,
|
||||
nickname,
|
||||
@@ -379,9 +382,10 @@ export class Manager {
|
||||
logger: this.logger,
|
||||
subreddit: this.subreddit,
|
||||
caching,
|
||||
credentials,
|
||||
client: this.client,
|
||||
};
|
||||
this.resources = this.cacheManager.set(this.subreddit.display_name, resourceConfig);
|
||||
this.resources = await this.cacheManager.set(this.subreddit.display_name, resourceConfig);
|
||||
this.resources.setLogger(this.logger);
|
||||
|
||||
this.logger.info('Subreddit-specific options updated');
|
||||
@@ -390,6 +394,10 @@ export class Manager {
|
||||
const commentChecks: Array<CommentCheck> = [];
|
||||
const subChecks: Array<SubmissionCheck> = [];
|
||||
const structuredChecks = configBuilder.parseToStructured(validJson);
|
||||
|
||||
// TODO check that bot has permissions for subreddit for all specified actions
|
||||
// can find permissions in this.subreddit.mod_permissions
|
||||
|
||||
for (const jCheck of structuredChecks) {
|
||||
const checkConfig = {
|
||||
...jCheck,
|
||||
@@ -415,7 +423,7 @@ export class Manager {
|
||||
this.logger.info(checkSummary);
|
||||
}
|
||||
this.validConfigLoaded = true;
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
this.validConfigLoaded = false;
|
||||
throw err;
|
||||
}
|
||||
@@ -456,7 +464,7 @@ export class Manager {
|
||||
|
||||
this.lastWikiRevision = revisionDate;
|
||||
sourceData = await wiki.content_md;
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
const msg = `Could not read wiki configuration. Please ensure the page https://reddit.com${this.subreddit.url}wiki/${this.wikiLocation} exists and is readable -- error: ${err.message}`;
|
||||
this.logger.error(msg);
|
||||
throw new ConfigParseError(msg);
|
||||
@@ -476,7 +484,7 @@ export class Manager {
|
||||
throw new ConfigParseError('Could not parse wiki page contents as JSON or YAML')
|
||||
}
|
||||
|
||||
this.parseConfigurationFromObject(configObj);
|
||||
await this.parseConfigurationFromObject(configObj);
|
||||
this.logger.info('Checks updated');
|
||||
|
||||
if(!suppressNotification) {
|
||||
@@ -484,7 +492,7 @@ export class Manager {
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
this.validConfigLoaded = false;
|
||||
throw err;
|
||||
}
|
||||
@@ -493,8 +501,6 @@ export class Manager {
|
||||
async runChecks(checkType: ('Comment' | 'Submission'), activity: (Submission | Comment), options?: runCheckOptions): Promise<void> {
|
||||
const checks = checkType === 'Comment' ? this.commentChecks : this.submissionChecks;
|
||||
let item = activity;
|
||||
this.eventsCheckedTotal++;
|
||||
this.eventsCheckedSinceStartTotal++;
|
||||
const itemId = await item.id;
|
||||
let allRuleResults: RuleResult[] = [];
|
||||
const itemIdentifier = `${checkType === 'Submission' ? 'SUB' : 'COM'} ${itemId}`;
|
||||
@@ -504,46 +510,73 @@ export class Manager {
|
||||
const [peek, _] = await itemContentPeek(item);
|
||||
ePeek = peek;
|
||||
this.logger.info(`<EVENT> ${peek}`);
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
this.logger.error(`Error occurred while generate item peek for ${checkType} Activity ${itemId}`, err);
|
||||
}
|
||||
|
||||
const {
|
||||
checkNames = [],
|
||||
delayUntil,
|
||||
dryRun,
|
||||
} = options || {};
|
||||
|
||||
if (delayUntil !== undefined) {
|
||||
const created = dayjs.unix(item.created_utc);
|
||||
const diff = dayjs().diff(created, 's');
|
||||
if (diff < delayUntil) {
|
||||
this.logger.verbose(`Delaying processing until Activity is ${delayUntil} seconds old (${delayUntil - diff}s)`);
|
||||
await sleep(delayUntil - diff);
|
||||
// @ts-ignore
|
||||
item = await activity.refresh();
|
||||
}
|
||||
}
|
||||
|
||||
const startingApiLimit = this.client.ratelimitRemaining;
|
||||
|
||||
if (item instanceof Submission) {
|
||||
if (await item.removed_by_category === 'deleted') {
|
||||
this.logger.warn('Submission was deleted, cannot process.');
|
||||
return;
|
||||
}
|
||||
} else if (item.author.name === '[deleted]') {
|
||||
this.logger.warn('Comment was deleted, cannot process.');
|
||||
return;
|
||||
}
|
||||
|
||||
let checksRun = 0;
|
||||
let actionsRun = 0;
|
||||
let totalRulesRun = 0;
|
||||
let runActions: Action[] = [];
|
||||
let runActions: ActionResult[] = [];
|
||||
let actionedEvent: ActionedEvent = {
|
||||
subreddit: this.subreddit.display_name_prefixed,
|
||||
activity: {
|
||||
peek: ePeek,
|
||||
link: item.permalink
|
||||
},
|
||||
author: item.author.name,
|
||||
timestamp: Date.now(),
|
||||
check: '',
|
||||
ruleSummary: '',
|
||||
ruleResults: [],
|
||||
actionResults: [],
|
||||
}
|
||||
let triggered = false;
|
||||
let triggeredCheckName;
|
||||
const checksRunNames = [];
|
||||
const cachedCheckNames = [];
|
||||
const startingApiLimit = this.client.ratelimitRemaining;
|
||||
|
||||
const {
|
||||
checkNames = [],
|
||||
delayUntil,
|
||||
dryRun,
|
||||
refresh = false,
|
||||
} = options || {};
|
||||
|
||||
let wasRefreshed = false;
|
||||
|
||||
try {
|
||||
let triggered = false;
|
||||
|
||||
if (delayUntil !== undefined) {
|
||||
const created = dayjs.unix(item.created_utc);
|
||||
const diff = dayjs().diff(created, 's');
|
||||
if (diff < delayUntil) {
|
||||
this.logger.verbose(`Delaying processing until Activity is ${delayUntil} seconds old (${delayUntil - diff}s)`);
|
||||
await sleep(delayUntil - diff);
|
||||
// @ts-ignore
|
||||
item = await activity.refresh();
|
||||
wasRefreshed = true;
|
||||
}
|
||||
}
|
||||
// refresh signal from firehose if activity was ingested multiple times before processing or re-queued while processing
|
||||
// want to make sure we have the most recent data
|
||||
if(!wasRefreshed && refresh === true) {
|
||||
this.logger.verbose('Refreshed data (probably due to signal from firehose)');
|
||||
// @ts-ignore
|
||||
item = await activity.refresh();
|
||||
}
|
||||
|
||||
if (item instanceof Submission) {
|
||||
if (await item.removed_by_category === 'deleted') {
|
||||
this.logger.warn('Submission was deleted, cannot process.');
|
||||
return;
|
||||
}
|
||||
} else if (item.author.name === '[deleted]') {
|
||||
this.logger.warn('Comment was deleted, cannot process.');
|
||||
return;
|
||||
}
|
||||
|
||||
for (const check of checks) {
|
||||
if (checkNames.length > 0 && !checkNames.map(x => x.toLowerCase()).some(x => x === check.name.toLowerCase())) {
|
||||
this.logger.warn(`Check ${check.name} not in array of requested checks to run, skipping...`);
|
||||
@@ -553,30 +586,47 @@ export class Manager {
|
||||
this.logger.info(`Check ${check.name} not run because it is not enabled, skipping...`);
|
||||
continue;
|
||||
}
|
||||
checksRunNames.push(check.name);
|
||||
checksRun++;
|
||||
triggered = false;
|
||||
let isFromCache = false;
|
||||
let currentResults: RuleResult[] = [];
|
||||
try {
|
||||
const [checkTriggered, checkResults] = await check.runRules(item, allRuleResults);
|
||||
await check.setCacheResult(item, checkTriggered);
|
||||
const [checkTriggered, checkResults, fromCache = false] = await check.runRules(item, allRuleResults);
|
||||
isFromCache = fromCache;
|
||||
if(!fromCache) {
|
||||
await check.setCacheResult(item, {result: checkTriggered, ruleResults: checkResults});
|
||||
} else {
|
||||
cachedCheckNames.push(check.name);
|
||||
}
|
||||
currentResults = checkResults;
|
||||
totalRulesRun += checkResults.length;
|
||||
allRuleResults = allRuleResults.concat(determineNewResults(allRuleResults, checkResults));
|
||||
triggered = checkTriggered;
|
||||
} catch (e) {
|
||||
if(triggered && fromCache && !check.cacheUserResult.runActions) {
|
||||
this.logger.info('Check was triggered but cache result options specified NOT to run actions...counting as check NOT triggered');
|
||||
triggered = false;
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (e.logged !== true) {
|
||||
this.logger.warn(`Running rules for Check ${check.name} failed due to uncaught exception`, e);
|
||||
}
|
||||
}
|
||||
|
||||
if (triggered) {
|
||||
this.checksTriggered.set(check.name, (this.checksTriggered.get(check.name) || 0) + 1);
|
||||
this.checksTriggeredSinceStart.set(check.name, (this.checksTriggeredSinceStart.get(check.name) || 0) + 1);
|
||||
triggeredCheckName = check.name;
|
||||
actionedEvent.check = check.name;
|
||||
actionedEvent.ruleResults = currentResults;
|
||||
if(isFromCache) {
|
||||
actionedEvent.ruleSummary = `Check result was found in cache: ${triggeredIndicator(true)}`;
|
||||
} else {
|
||||
actionedEvent.ruleSummary = resultsSummary(currentResults, check.condition);
|
||||
}
|
||||
runActions = await check.runActions(item, currentResults.filter(x => x.triggered), dryRun);
|
||||
actionsRun = runActions.length;
|
||||
|
||||
if(check.notifyOnTrigger) {
|
||||
const ar = runActions.map(x => x.getActionUniqueName()).join(', ');
|
||||
const ar = runActions.map(x => x.name).join(', ');
|
||||
this.notificationManager.handle('eventActioned', 'Check Triggered', `Check "${check.name}" was triggered on Event: \n\n ${ePeek} \n\n with the following actions run: ${ar}`);
|
||||
}
|
||||
break;
|
||||
@@ -587,35 +637,35 @@ export class Manager {
|
||||
this.logger.info('No checks triggered');
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
if (!(err instanceof LoggedError) && err.logged !== true) {
|
||||
this.logger.error('An unhandled error occurred while running checks', err);
|
||||
}
|
||||
this.emit('error', err);
|
||||
} finally {
|
||||
try {
|
||||
const cachedTotal = totalRulesRun - allRuleResults.length;
|
||||
const triggeredRulesTotal = allRuleResults.filter(x => x.triggered).length;
|
||||
|
||||
this.checksRunTotal += checksRun;
|
||||
this.checksRunSinceStartTotal += checksRun;
|
||||
this.rulesRunTotal += totalRulesRun;
|
||||
this.rulesRunSinceStartTotal += totalRulesRun;
|
||||
this.rulesCachedTotal += cachedTotal;
|
||||
this.rulesCachedSinceStartTotal += cachedTotal;
|
||||
this.rulesTriggeredTotal += triggeredRulesTotal;
|
||||
this.rulesTriggeredSinceStartTotal += triggeredRulesTotal;
|
||||
|
||||
for (const a of runActions) {
|
||||
const name = a.getActionUniqueName();
|
||||
this.actionsRun.set(name, (this.actionsRun.get(name) || 0) + 1);
|
||||
this.actionsRunSinceStart.set(name, (this.actionsRunSinceStart.get(name) || 0) + 1)
|
||||
actionedEvent.actionResults = runActions;
|
||||
if(triggered) {
|
||||
await this.resources.addActionedEvent(actionedEvent);
|
||||
}
|
||||
|
||||
this.logger.verbose(`Run Stats: Checks ${checksRun} | Rules => Total: ${totalRulesRun} Unique: ${allRuleResults.length} Cached: ${totalRulesRun - allRuleResults.length} Rolling Avg: ~${formatNumber(this.rulesUniqueRollingAvg)}/s | Actions ${actionsRun}`);
|
||||
this.logger.verbose(`Reddit API Stats: Initial ${startingApiLimit} | Current ${this.client.ratelimitRemaining} | Used ~${startingApiLimit - this.client.ratelimitRemaining} | Events ~${formatNumber(this.eventsRollingAvg)}/s`);
|
||||
this.currentLabels = [];
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
this.logger.error('Error occurred while cleaning up Activity check and generating stats', err);
|
||||
} finally {
|
||||
this.resources.updateHistoricalStats({
|
||||
eventsCheckedTotal: 1,
|
||||
eventsActionedTotal: triggered ? 1 : 0,
|
||||
checksTriggered: triggeredCheckName !== undefined ? [triggeredCheckName] : [],
|
||||
checksRun: checksRunNames,
|
||||
checksFromCache: cachedCheckNames,
|
||||
actionsRun: runActions.map(x => x.name),
|
||||
rulesRun: allRuleResults.map(x => x.name),
|
||||
rulesTriggered: allRuleResults.filter(x => x.triggered).map(x => x.name),
|
||||
rulesCachedTotal: totalRulesRun - allRuleResults.length,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -624,7 +674,7 @@ export class Manager {
|
||||
// give current handle() time to stop
|
||||
//await sleep(1000);
|
||||
|
||||
const retryHandler = createRetryHandler({maxRequestRetry: 5, maxOtherRetry: 1}, this.logger);
|
||||
const retryHandler = createRetryHandler({maxRequestRetry: 3, maxOtherRetry: 1}, this.logger);
|
||||
|
||||
const subName = this.subreddit.display_name;
|
||||
|
||||
@@ -633,7 +683,8 @@ export class Manager {
|
||||
pollOn,
|
||||
limit,
|
||||
interval,
|
||||
delayUntil
|
||||
delayUntil,
|
||||
clearProcessed,
|
||||
} = pollOpt;
|
||||
let stream: SPoll<Snoowrap.Submission | Snoowrap.Comment>;
|
||||
let modStreamType: string | undefined;
|
||||
@@ -649,6 +700,7 @@ export class Manager {
|
||||
subreddit: this.subreddit.display_name,
|
||||
limit: limit,
|
||||
pollTime: interval * 1000,
|
||||
clearProcessed,
|
||||
});
|
||||
}
|
||||
break;
|
||||
@@ -662,6 +714,7 @@ export class Manager {
|
||||
subreddit: this.subreddit.display_name,
|
||||
limit: limit,
|
||||
pollTime: interval * 1000,
|
||||
clearProcessed
|
||||
});
|
||||
}
|
||||
break;
|
||||
@@ -670,6 +723,7 @@ export class Manager {
|
||||
subreddit: this.subreddit.display_name,
|
||||
limit: limit,
|
||||
pollTime: interval * 1000,
|
||||
clearProcessed
|
||||
});
|
||||
break;
|
||||
case 'newComm':
|
||||
@@ -677,6 +731,7 @@ export class Manager {
|
||||
subreddit: this.subreddit.display_name,
|
||||
limit: limit,
|
||||
pollTime: interval * 1000,
|
||||
clearProcessed
|
||||
});
|
||||
break;
|
||||
}
|
||||
@@ -710,25 +765,30 @@ export class Manager {
|
||||
checkType = 'Comment';
|
||||
}
|
||||
if (checkType !== undefined) {
|
||||
this.queue.push({checkType, activity: item, options: {delayUntil}})
|
||||
this.firehose.push({checkType, activity: item, options: {delayUntil}})
|
||||
}
|
||||
};
|
||||
|
||||
stream.on('item', onItem);
|
||||
|
||||
if (modStreamType !== undefined) {
|
||||
this.modStreamCallbacks.set(pollOn, onItem);
|
||||
} else {
|
||||
stream.on('item', onItem);
|
||||
// @ts-ignore
|
||||
stream.on('error', async (err: any) => {
|
||||
|
||||
this.emit('error', err);
|
||||
|
||||
if(isRateLimitError(err)) {
|
||||
this.logger.error('Encountered rate limit while polling! Bot is all out of requests :( Stopping subreddit queue and polling.');
|
||||
await this.stop();
|
||||
}
|
||||
this.logger.error('Polling error occurred', err);
|
||||
const shouldRetry = await retryHandler(err);
|
||||
if (shouldRetry) {
|
||||
stream.startInterval();
|
||||
} else {
|
||||
this.logger.warn('Pausing event polling due to too many errors');
|
||||
await this.pauseEvents();
|
||||
this.logger.warn('Stopping subreddit processing/polling due to too many errors');
|
||||
await this.stop();
|
||||
}
|
||||
});
|
||||
this.streams.push(stream);
|
||||
@@ -743,14 +803,19 @@ export class Manager {
|
||||
} else if (!this.validConfigLoaded) {
|
||||
this.logger.warn('Cannot start activity processing queue while manager has an invalid configuration');
|
||||
} else {
|
||||
if(this.queueState.state === STOPPED) {
|
||||
// extra precaution to make sure queue meta is cleared before starting queue
|
||||
this.queuedItemsMeta = [];
|
||||
}
|
||||
this.queue.resume();
|
||||
this.firehose.resume();
|
||||
this.logger.info(`Activity processing queue started RUNNING with ${this.queue.length()} queued activities`);
|
||||
this.queueState = {
|
||||
state: RUNNING,
|
||||
causedBy
|
||||
}
|
||||
if(!suppressNotification) {
|
||||
this.notificationManager.handle('runStateChanged', 'Queue Started', reason, causedBy)
|
||||
this.notificationManager.handle('runStateChanged', 'Queue Started', reason, causedBy);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -807,12 +872,23 @@ export class Manager {
|
||||
} else {
|
||||
const pauseWaitStart = dayjs();
|
||||
this.logger.info(`Activity processing queue is stopping...waiting for ${this.queue.running()} activities to finish processing`);
|
||||
const fullStopTime = dayjs().add(5, 'seconds');
|
||||
let gracefulStop = true;
|
||||
while (this.queue.running() > 0) {
|
||||
gracefulStop = false;
|
||||
if(dayjs().isAfter(fullStopTime)) {
|
||||
break;
|
||||
}
|
||||
await sleep(1500);
|
||||
this.logger.verbose(`Activity processing queue is stopping...waiting for ${this.queue.running()} activities to finish processing`);
|
||||
}
|
||||
if(!gracefulStop) {
|
||||
this.logger.warn('Waited longer than 5 seconds to stop activities. Something isn\'t right so forcing stop :/ ');
|
||||
}
|
||||
this.logger.info(`Activity processing queue stopped by ${causedBy} and ${this.queue.length()} queued activities cleared (waited ${dayjs().diff(pauseWaitStart, 's')} seconds while activity processing finished)`);
|
||||
this.firehose.kill();
|
||||
this.queue.kill();
|
||||
this.queuedItemsMeta = [];
|
||||
}
|
||||
|
||||
this.queueState = {
|
||||
@@ -891,18 +967,12 @@ export class Manager {
|
||||
s.end();
|
||||
}
|
||||
this.streams = [];
|
||||
for (const [k, v] of this.modStreamCallbacks) {
|
||||
const stream = this.cacheManager.modStreams.get(k) as Poll<Snoowrap.Submission | Snoowrap.Comment>;
|
||||
stream.removeListener('item', v);
|
||||
}
|
||||
// for (const [k, v] of this.modStreamCallbacks) {
|
||||
// const stream = this.cacheManager.modStreams.get(k) as Poll<Snoowrap.Submission | Snoowrap.Comment>;
|
||||
// stream.removeListener('item', v);
|
||||
// }
|
||||
this.modStreamCallbacks = new Map();
|
||||
this.startedAt = undefined;
|
||||
this.eventsCheckedSinceStartTotal = 0;
|
||||
this.checksRunSinceStartTotal = 0;
|
||||
this.rulesRunSinceStartTotal = 0;
|
||||
this.rulesCachedSinceStartTotal = 0;
|
||||
this.rulesTriggeredSinceStartTotal = 0;
|
||||
this.checksTriggeredSinceStart = new Map();
|
||||
this.actionsRunSinceStart = new Map();
|
||||
this.logger.info(`Events STOPPED by ${causedBy}`);
|
||||
this.eventsState = {
|
||||
state: STOPPED,
|
||||
|
||||
@@ -2,51 +2,110 @@ import {Poll, SnooStormOptions} from "snoostorm"
|
||||
import Snoowrap from "snoowrap";
|
||||
import {EventEmitter} from "events";
|
||||
import {PollConfiguration} from "snoostorm/out/util/Poll";
|
||||
import {DEFAULT_POLLING_INTERVAL} from "../Common/interfaces";
|
||||
import {ClearProcessedOptions, DEFAULT_POLLING_INTERVAL} from "../Common/interfaces";
|
||||
import dayjs, {Dayjs} from "dayjs";
|
||||
import { Duration } from "dayjs/plugin/duration";
|
||||
import {parseDuration, setRandomInterval, sleep} from "../util";
|
||||
|
||||
type Awaitable<T> = Promise<T> | T;
|
||||
|
||||
interface RCBPollingOptions extends SnooStormOptions {
|
||||
subreddit: string,
|
||||
clearProcessed?: ClearProcessedOptions
|
||||
}
|
||||
|
||||
interface RCBPollConfiguration<T> extends PollConfiguration<T> {
|
||||
clearProcessed?: ClearProcessedOptions
|
||||
}
|
||||
|
||||
export class SPoll<T extends object> extends Poll<T> {
|
||||
identifier: keyof T;
|
||||
getter: () => Awaitable<T[]>;
|
||||
frequency;
|
||||
running: boolean = false;
|
||||
clearProcessedDuration?: Duration;
|
||||
clearProcessedSize?: number;
|
||||
clearProcessedAfter?: Dayjs;
|
||||
retainProcessed: number = 0;
|
||||
randInterval?: { clear: () => void };
|
||||
|
||||
constructor(options: PollConfiguration<T>) {
|
||||
constructor(options: RCBPollConfiguration<T>) {
|
||||
super(options);
|
||||
this.identifier = options.identifier;
|
||||
this.getter = options.get;
|
||||
this.frequency = options.frequency;
|
||||
const {
|
||||
after,
|
||||
size,
|
||||
retain = 0,
|
||||
} = options.clearProcessed || {};
|
||||
if(after !== undefined) {
|
||||
this.clearProcessedDuration = parseDuration(after);
|
||||
}
|
||||
this.clearProcessedSize = size;
|
||||
this.retainProcessed = retain;
|
||||
if (this.clearProcessedDuration !== undefined) {
|
||||
this.clearProcessedAfter = dayjs().add(this.clearProcessedDuration.asSeconds(), 's');
|
||||
}
|
||||
clearInterval(this.interval);
|
||||
}
|
||||
|
||||
startInterval = () => {
|
||||
this.running = true;
|
||||
this.interval = setInterval(async () => {
|
||||
try {
|
||||
const batch = await this.getter();
|
||||
const newItems: T[] = [];
|
||||
for (const item of batch) {
|
||||
const id = item[this.identifier];
|
||||
if (this.processed.has(id)) continue;
|
||||
this.randInterval = setRandomInterval((function (self) {
|
||||
return async () => {
|
||||
try {
|
||||
// DEBUGGING
|
||||
//
|
||||
// Removing processed clearing to see if it fixes weird, duplicate/delayed comment processing behavior
|
||||
//
|
||||
// clear the tracked, processed activity ids after a set period or number of activities have been processed
|
||||
// because when RCB is long-running and has streams from high-volume subreddits this list never gets smaller...
|
||||
|
||||
// Emit for new items and add it to the list
|
||||
newItems.push(item);
|
||||
this.processed.add(id);
|
||||
this.emit("item", item);
|
||||
// so clear if after time period
|
||||
// if ((self.clearProcessedAfter !== undefined && dayjs().isSameOrAfter(self.clearProcessedAfter))
|
||||
// // or clear if processed list is larger than defined max allowable size (default setting, 2 * polling option limit)
|
||||
// || (self.clearProcessedSize !== undefined && self.processed.size >= self.clearProcessedSize)) {
|
||||
// if (self.retainProcessed === 0) {
|
||||
// self.processed = new Set();
|
||||
// } else {
|
||||
// // retain some processed so we have continuity between processed list resets -- this is default behavior and retains polling option limit # of activities
|
||||
// // we can slice from the set here because ID order is guaranteed for Set object so list is oldest -> newest
|
||||
// // -- retain last LIMIT number of activities (or all if retain # is larger than list due to user config error)
|
||||
// self.processed = new Set(Array.from(self.processed).slice(Math.max(0, self.processed.size - self.retainProcessed)));
|
||||
// }
|
||||
// // reset time interval if there is one
|
||||
// if (self.clearProcessedAfter !== undefined && self.clearProcessedDuration !== undefined) {
|
||||
// self.clearProcessedAfter = dayjs().add(self.clearProcessedDuration.asSeconds(), 's');
|
||||
// }
|
||||
// }
|
||||
const batch = await self.getter();
|
||||
const newItems: T[] = [];
|
||||
for (const item of batch) {
|
||||
const id = item[self.identifier];
|
||||
if (self.processed.has(id)) continue;
|
||||
|
||||
// Emit for new items and add it to the list
|
||||
newItems.push(item);
|
||||
self.processed.add(id);
|
||||
self.emit("item", item);
|
||||
}
|
||||
|
||||
// Emit the new listing of all new items
|
||||
self.emit("listing", newItems);
|
||||
} catch (err: any) {
|
||||
self.emit('error', err);
|
||||
self.end();
|
||||
}
|
||||
|
||||
// Emit the new listing of all new items
|
||||
this.emit("listing", newItems);
|
||||
} catch (err) {
|
||||
this.emit('error', err);
|
||||
this.end();
|
||||
}
|
||||
}, this.frequency);
|
||||
})(this), this.frequency - 1, this.frequency + 1);
|
||||
}
|
||||
|
||||
end = () => {
|
||||
this.running = false;
|
||||
if(this.randInterval !== undefined) {
|
||||
this.randInterval.clear();
|
||||
}
|
||||
super.end();
|
||||
}
|
||||
}
|
||||
@@ -54,11 +113,12 @@ export class SPoll<T extends object> extends Poll<T> {
|
||||
export class UnmoderatedStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
|
||||
constructor(
|
||||
client: Snoowrap,
|
||||
options: SnooStormOptions & { subreddit: string }) {
|
||||
options: RCBPollingOptions) {
|
||||
super({
|
||||
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
|
||||
get: async () => client.getSubreddit(options.subreddit).getUnmoderated(options),
|
||||
identifier: "id",
|
||||
clearProcessed: options.clearProcessed
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -66,11 +126,12 @@ export class UnmoderatedStream extends SPoll<Snoowrap.Submission | Snoowrap.Comm
|
||||
export class ModQueueStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
|
||||
constructor(
|
||||
client: Snoowrap,
|
||||
options: SnooStormOptions & { subreddit: string }) {
|
||||
options: RCBPollingOptions) {
|
||||
super({
|
||||
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
|
||||
get: async () => client.getSubreddit(options.subreddit).getModqueue(options),
|
||||
identifier: "id",
|
||||
clearProcessed: options.clearProcessed
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -78,11 +139,12 @@ export class ModQueueStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment
|
||||
export class SubmissionStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
|
||||
constructor(
|
||||
client: Snoowrap,
|
||||
options: SnooStormOptions & { subreddit: string }) {
|
||||
options: RCBPollingOptions) {
|
||||
super({
|
||||
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
|
||||
get: async () => client.getNew(options.subreddit, options),
|
||||
identifier: "id",
|
||||
clearProcessed: options.clearProcessed
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -90,11 +152,12 @@ export class SubmissionStream extends SPoll<Snoowrap.Submission | Snoowrap.Comme
|
||||
export class CommentStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
|
||||
constructor(
|
||||
client: Snoowrap,
|
||||
options: SnooStormOptions & { subreddit: string }) {
|
||||
options: RCBPollingOptions) {
|
||||
super({
|
||||
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
|
||||
get: async () => client.getNewComments(options.subreddit, options),
|
||||
identifier: "id",
|
||||
clearProcessed: options.clearProcessed
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,12 +1,19 @@
|
||||
import dayjs, {Dayjs} from "dayjs";
|
||||
import {Comment, RedditUser, WikiPage} from "snoowrap";
|
||||
import {COMMENT_URL_ID, deflateUserNotes, inflateUserNotes, parseLinkIdentifier, SUBMISSION_URL_ID} from "../util";
|
||||
import {
|
||||
COMMENT_URL_ID,
|
||||
deflateUserNotes, getActivityAuthorName,
|
||||
inflateUserNotes,
|
||||
parseLinkIdentifier,
|
||||
SUBMISSION_URL_ID
|
||||
} from "../util";
|
||||
import Subreddit from "snoowrap/dist/objects/Subreddit";
|
||||
import {Logger} from "winston";
|
||||
import LoggedError from "../Utils/LoggedError";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import {RichContent} from "../Common/interfaces";
|
||||
import {Cache} from 'cache-manager';
|
||||
import {isScopeError} from "../Utils/Errors";
|
||||
|
||||
interface RawUserNotesPayload {
|
||||
ver: number,
|
||||
@@ -48,7 +55,7 @@ export interface RawNote {
|
||||
export type UserNotesConstants = Pick<any, "users" | "warnings">;
|
||||
|
||||
export class UserNotes {
|
||||
notesTTL: number;
|
||||
notesTTL: number | false;
|
||||
subreddit: Subreddit;
|
||||
wiki: WikiPage;
|
||||
moderators?: RedditUser[];
|
||||
@@ -63,8 +70,8 @@ export class UserNotes {
|
||||
debounceCB: any;
|
||||
batchCount: number = 0;
|
||||
|
||||
constructor(ttl: number, subreddit: Subreddit, logger: Logger, cache: Cache, cacheCB: Function) {
|
||||
this.notesTTL = ttl;
|
||||
constructor(ttl: number | boolean, subreddit: Subreddit, logger: Logger, cache: Cache, cacheCB: Function) {
|
||||
this.notesTTL = ttl === true ? 0 : ttl;
|
||||
this.subreddit = subreddit;
|
||||
this.logger = logger;
|
||||
this.wiki = subreddit.getWikiPage('usernotes');
|
||||
@@ -74,10 +81,11 @@ export class UserNotes {
|
||||
}
|
||||
|
||||
async getUserNotes(user: RedditUser): Promise<UserNote[]> {
|
||||
const userName = getActivityAuthorName(user);
|
||||
let notes: UserNote[] | undefined = [];
|
||||
|
||||
if (this.users !== undefined) {
|
||||
notes = this.users.get(user.name);
|
||||
notes = this.users.get(userName);
|
||||
if (notes !== undefined) {
|
||||
this.logger.debug('Returned cached notes');
|
||||
return notes;
|
||||
@@ -85,7 +93,7 @@ export class UserNotes {
|
||||
}
|
||||
|
||||
const payload = await this.retrieveData();
|
||||
const rawNotes = payload.blob[user.name];
|
||||
const rawNotes = payload.blob[userName];
|
||||
if (rawNotes !== undefined) {
|
||||
if (this.moderators === undefined) {
|
||||
this.moderators = await this.subreddit.getModerators();
|
||||
@@ -94,7 +102,7 @@ export class UserNotes {
|
||||
// sort in ascending order by time
|
||||
notes.sort((a, b) => a.time.isBefore(b.time) ? -1 : 1);
|
||||
if (this.notesTTL > 0 && this.cache !== undefined) {
|
||||
this.users.set(user.name, notes);
|
||||
this.users.set(userName, notes);
|
||||
}
|
||||
return notes;
|
||||
} else {
|
||||
@@ -105,6 +113,7 @@ export class UserNotes {
|
||||
async addUserNote(item: (Submission|Comment), type: string | number, text: string = ''): Promise<UserNote>
|
||||
{
|
||||
const payload = await this.retrieveData();
|
||||
const userName = getActivityAuthorName(item.author);
|
||||
|
||||
// idgaf
|
||||
// @ts-ignore
|
||||
@@ -120,16 +129,16 @@ export class UserNotes {
|
||||
}
|
||||
const newNote = new UserNote(dayjs(), text, mod, type, `https://reddit.com${item.permalink}`);
|
||||
|
||||
if(payload.blob[item.author.name] === undefined) {
|
||||
payload.blob[item.author.name] = {ns: []};
|
||||
if(payload.blob[userName] === undefined) {
|
||||
payload.blob[userName] = {ns: []};
|
||||
}
|
||||
payload.blob[item.author.name].ns.push(newNote.toRaw(payload.constants));
|
||||
payload.blob[userName].ns.push(newNote.toRaw(payload.constants));
|
||||
|
||||
await this.saveData(payload);
|
||||
if(this.notesTTL > 0) {
|
||||
const currNotes = this.users.get(item.author.name) || [];
|
||||
const currNotes = this.users.get(userName) || [];
|
||||
currNotes.push(newNote);
|
||||
this.users.set(item.author.name, currNotes);
|
||||
this.users.set(userName, currNotes);
|
||||
}
|
||||
return newNote;
|
||||
}
|
||||
@@ -144,7 +153,7 @@ export class UserNotes {
|
||||
let cacheMiss;
|
||||
if (this.notesTTL > 0) {
|
||||
const cachedPayload = await this.cache.get(this.identifier);
|
||||
if (cachedPayload !== undefined) {
|
||||
if (cachedPayload !== undefined && cachedPayload !== null) {
|
||||
this.cacheCB(false);
|
||||
return cachedPayload as unknown as RawUserNotesPayload;
|
||||
}
|
||||
@@ -153,14 +162,15 @@ export class UserNotes {
|
||||
}
|
||||
|
||||
try {
|
||||
if(cacheMiss && this.debounceCB !== undefined) {
|
||||
// timeout is still delayed. its our wiki data and we want it now! cm cacheworth 877 cache now
|
||||
this.logger.debug(`Detected missed cache on usernotes retrieval while batch (${this.batchCount}) save is in progress, executing save immediately before retrieving new notes...`);
|
||||
clearTimeout(this.saveDebounce);
|
||||
await this.debounceCB();
|
||||
this.debounceCB = undefined;
|
||||
this.saveDebounce = undefined;
|
||||
}
|
||||
// DISABLED for now because I think its causing issues
|
||||
// if(cacheMiss && this.debounceCB !== undefined) {
|
||||
// // timeout is still delayed. its our wiki data and we want it now! cm cacheworth 877 cache now
|
||||
// this.logger.debug(`Detected missed cache on usernotes retrieval while batch (${this.batchCount}) save is in progress, executing save immediately before retrieving new notes...`);
|
||||
// clearTimeout(this.saveDebounce);
|
||||
// await this.debounceCB();
|
||||
// this.debounceCB = undefined;
|
||||
// this.saveDebounce = undefined;
|
||||
// }
|
||||
// @ts-ignore
|
||||
this.wiki = await this.subreddit.getWikiPage('usernotes').fetch();
|
||||
const wikiContent = this.wiki.content_md;
|
||||
@@ -169,13 +179,13 @@ export class UserNotes {
|
||||
|
||||
userNotes.blob = inflateUserNotes(userNotes.blob);
|
||||
|
||||
if (this.notesTTL > 0) {
|
||||
if (this.notesTTL !== false) {
|
||||
await this.cache.set(`${this.subreddit.display_name}-usernotes`, userNotes, {ttl: this.notesTTL});
|
||||
this.users = new Map();
|
||||
}
|
||||
|
||||
return userNotes as RawUserNotesPayload;
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
const msg = `Could not read usernotes. Make sure at least one moderator has used toolbox and usernotes before.`;
|
||||
this.logger.error(msg, err);
|
||||
throw new LoggedError(msg);
|
||||
@@ -187,30 +197,36 @@ export class UserNotes {
|
||||
const blob = deflateUserNotes(payload.blob);
|
||||
const wikiPayload = {text: JSON.stringify({...payload, blob}), reason: 'ContextBot edited usernotes'};
|
||||
try {
|
||||
if (this.notesTTL > 0) {
|
||||
if (this.notesTTL !== false) {
|
||||
// DISABLED for now because if it fails throws an uncaught rejection
|
||||
// and need to figured out how to handle this other than just logging (want to interrupt action flow too?)
|
||||
//
|
||||
// debounce usernote save by 5 seconds -- effectively batch usernote saves
|
||||
//
|
||||
// so that if we are processing a ton of checks that write user notes we aren't calling to save the wiki page on every call
|
||||
// since we also have everything in cache (most likely...)
|
||||
//
|
||||
// TODO might want to increase timeout to 10 seconds
|
||||
if(this.saveDebounce !== undefined) {
|
||||
clearTimeout(this.saveDebounce);
|
||||
}
|
||||
this.debounceCB = (async function () {
|
||||
const p = wikiPayload;
|
||||
// @ts-ignore
|
||||
const self = this as UserNotes;
|
||||
// @ts-ignore
|
||||
self.wiki = await self.subreddit.getWikiPage('usernotes').edit(p);
|
||||
self.logger.debug(`Batch saved ${self.batchCount} usernotes`);
|
||||
self.debounceCB = undefined;
|
||||
self.saveDebounce = undefined;
|
||||
self.batchCount = 0;
|
||||
}).bind(this);
|
||||
this.saveDebounce = setTimeout(this.debounceCB,5000);
|
||||
this.batchCount++;
|
||||
this.logger.debug(`Saving Usernotes has been debounced for 5 seconds (${this.batchCount} batched)`)
|
||||
// if(this.saveDebounce !== undefined) {
|
||||
// clearTimeout(this.saveDebounce);
|
||||
// }
|
||||
// this.debounceCB = (async function () {
|
||||
// const p = wikiPayload;
|
||||
// // @ts-ignore
|
||||
// const self = this as UserNotes;
|
||||
// // @ts-ignore
|
||||
// self.wiki = await self.subreddit.getWikiPage('usernotes').edit(p);
|
||||
// self.logger.debug(`Batch saved ${self.batchCount} usernotes`);
|
||||
// self.debounceCB = undefined;
|
||||
// self.saveDebounce = undefined;
|
||||
// self.batchCount = 0;
|
||||
// }).bind(this);
|
||||
// this.saveDebounce = setTimeout(this.debounceCB,5000);
|
||||
// this.batchCount++;
|
||||
// this.logger.debug(`Saving Usernotes has been debounced for 5 seconds (${this.batchCount} batched)`)
|
||||
|
||||
// @ts-ignore
|
||||
await this.subreddit.getWikiPage('usernotes').edit(wikiPayload);
|
||||
await this.cache.set(this.identifier, payload, {ttl: this.notesTTL});
|
||||
this.users = new Map();
|
||||
} else {
|
||||
@@ -219,8 +235,14 @@ export class UserNotes {
|
||||
}
|
||||
|
||||
return payload as RawUserNotesPayload;
|
||||
} catch (err) {
|
||||
const msg = `Could not edit usernotes. Make sure at least one moderator has used toolbox and usernotes before and that this account has editing permissions`;
|
||||
} catch (err: any) {
|
||||
let msg = 'Could not edit usernotes.';
|
||||
// Make sure at least one moderator has used toolbox and usernotes before and that this account has editing permissions`;
|
||||
if(isScopeError(err)) {
|
||||
msg = `${msg} The bot account did not have sufficient OAUTH scope to perform this action. You must re-authenticate the bot and ensure it has has 'wikiedit' permissions.`
|
||||
} else {
|
||||
msg = `${msg} Make sure at least one moderator has used toolbox, created a usernote, and that this account has editing permissions for the wiki page.`;
|
||||
}
|
||||
this.logger.error(msg, err);
|
||||
throw new LoggedError(msg);
|
||||
}
|
||||
|
||||
18
src/Utils/Errors.ts
Normal file
18
src/Utils/Errors.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import {StatusCodeError} from "../Common/interfaces";
|
||||
|
||||
|
||||
export const isRateLimitError = (err: any) => {
|
||||
return typeof err === 'object' && err.name === 'RateLimitError';
|
||||
}
|
||||
|
||||
export const isScopeError = (err: any): boolean => {
|
||||
if(typeof err === 'object' && err.name === 'StatusCodeError' && err.response !== undefined) {
|
||||
const authHeader = err.response.headers['www-authenticate'];
|
||||
return authHeader !== undefined && authHeader.includes('insufficient_scope');
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export const isStatusError = (err: any): err is StatusCodeError => {
|
||||
return typeof err === 'object' && err.name === 'StatusCodeError' && err.response !== undefined;
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
import Snoowrap from "snoowrap";
|
||||
import Snoowrap, {Listing} from "snoowrap";
|
||||
import {Subreddit} from "snoowrap/dist/objects";
|
||||
import {parseSubredditName} from "../util";
|
||||
|
||||
// const proxyFactory = (endpoint: string) => {
|
||||
// return class ProxiedSnoowrap extends Snoowrap {
|
||||
@@ -12,7 +14,40 @@ import Snoowrap from "snoowrap";
|
||||
// }
|
||||
// }
|
||||
|
||||
export class RequestTrackingSnoowrap extends Snoowrap {
|
||||
export class ExtendedSnoowrap extends Snoowrap {
|
||||
/**
|
||||
* https://www.reddit.com/r/redditdev/comments/jfltfx/comment/g9le48w/?utm_source=reddit&utm_medium=web2x&context=3
|
||||
* */
|
||||
async getManySubreddits(subs: (Subreddit | string)[]): Promise<Listing<Subreddit>> {
|
||||
// parse all names
|
||||
const names = subs.map(x => {
|
||||
if(typeof x !== 'string') {
|
||||
return x.display_name;
|
||||
}
|
||||
try {
|
||||
return parseSubredditName(x);
|
||||
} catch (err: any) {
|
||||
return x;
|
||||
}
|
||||
});
|
||||
|
||||
return await this.oauthRequest({uri: '/api/info', method: 'get', qs: { sr_name: names.join(',')}}) as Listing<Subreddit>;
|
||||
}
|
||||
|
||||
async assignUserFlairByTemplateId(options: { flairTemplateId: string, username: string, subredditName: string }): Promise<any> {
|
||||
return await this.oauthRequest({
|
||||
uri: `/r/${options.subredditName}/api/selectflair`,
|
||||
method: 'post',
|
||||
form: {
|
||||
api_type: 'json',
|
||||
name: options.username,
|
||||
flair_template_id: options.flairTemplateId,
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class RequestTrackingSnoowrap extends ExtendedSnoowrap {
|
||||
requestCount: number = 0;
|
||||
|
||||
oauthRequest(...args: any) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import Snoowrap, {RedditUser} from "snoowrap";
|
||||
import Snoowrap, {Listing, RedditUser} from "snoowrap";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import Comment from "snoowrap/dist/objects/Comment";
|
||||
import {Duration, DurationUnitsObjectType} from "dayjs/plugin/duration";
|
||||
@@ -13,11 +13,17 @@ import {
|
||||
TypedActivityStates
|
||||
} from "../Common/interfaces";
|
||||
import {
|
||||
compareDurationValue, comparisonTextOp,
|
||||
compareDurationValue,
|
||||
comparisonTextOp, escapeRegex, getActivityAuthorName,
|
||||
isActivityWindowCriteria,
|
||||
normalizeName, parseDuration,
|
||||
parseDurationComparison, parseGenericValueComparison, parseGenericValueOrPercentComparison, parseSubredditName,
|
||||
truncateStringToLength
|
||||
normalizeName,
|
||||
parseDuration,
|
||||
parseDurationComparison,
|
||||
parseGenericValueComparison,
|
||||
parseGenericValueOrPercentComparison,
|
||||
parseRuleResultsToMarkdownSummary, parseStringToRegex,
|
||||
parseSubredditName,
|
||||
truncateStringToLength, windowToActivityWindowCriteria
|
||||
} from "../util";
|
||||
import UserNotes from "../Subreddit/UserNotes";
|
||||
import {Logger} from "winston";
|
||||
@@ -25,6 +31,7 @@ import InvalidRegexError from "./InvalidRegexError";
|
||||
import SimpleError from "./SimpleError";
|
||||
import {AuthorCriteria} from "../Author/Author";
|
||||
import {URL} from "url";
|
||||
import {isStatusError} from "./Errors";
|
||||
|
||||
export const BOT_LINK = 'https://www.reddit.com/r/ContextModBot/comments/otz396/introduction_to_contextmodbot';
|
||||
|
||||
@@ -37,14 +44,16 @@ export interface AuthorActivitiesOptions {
|
||||
chunkSize?: number,
|
||||
// TODO maybe move this into window
|
||||
keepRemoved?: boolean,
|
||||
[key: string]: any,
|
||||
}
|
||||
|
||||
export async function getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
|
||||
export async function getActivities(listingFunc: (limit: number) => Promise<Listing<Submission | Comment>>, options: AuthorActivitiesOptions): Promise<Array<Submission | Comment>> {
|
||||
|
||||
const {
|
||||
chunkSize: cs = 100,
|
||||
window: optWindow,
|
||||
keepRemoved = true,
|
||||
...restFetchOptions
|
||||
} = options;
|
||||
|
||||
let satisfiedCount: number | undefined,
|
||||
@@ -58,33 +67,56 @@ export async function getAuthorActivities(user: RedditUser, options: AuthorTyped
|
||||
let includes: string[] = [];
|
||||
let excludes: string[] = [];
|
||||
|
||||
if (isActivityWindowCriteria(optWindow)) {
|
||||
const {
|
||||
satisfyOn = 'any',
|
||||
count,
|
||||
duration,
|
||||
subreddits: {
|
||||
include = [],
|
||||
exclude = [],
|
||||
} = {},
|
||||
} = optWindow;
|
||||
const strongWindow = windowToActivityWindowCriteria(optWindow);
|
||||
|
||||
includes = include.map(x => parseSubredditName(x).toLowerCase());
|
||||
excludes = exclude.map(x => parseSubredditName(x).toLowerCase());
|
||||
const {
|
||||
satisfyOn = 'any',
|
||||
count,
|
||||
duration: oDuration,
|
||||
subreddits: {
|
||||
include = [],
|
||||
exclude = [],
|
||||
} = {},
|
||||
} = strongWindow;
|
||||
|
||||
if (includes.length > 0 && excludes.length > 0) {
|
||||
// TODO add logger so this can be logged...
|
||||
// this.logger.warn('include and exclude both specified, exclude will be ignored');
|
||||
}
|
||||
satisfiedCount = count;
|
||||
durVal = duration;
|
||||
satisfy = satisfyOn
|
||||
} else if (typeof optWindow === 'number') {
|
||||
satisfiedCount = optWindow;
|
||||
} else {
|
||||
durVal = optWindow as DurationVal;
|
||||
satisfy = satisfyOn;
|
||||
satisfiedCount = count;
|
||||
includes = include;
|
||||
excludes = exclude;
|
||||
durVal = oDuration;
|
||||
|
||||
if (includes.length > 0 && excludes.length > 0) {
|
||||
// TODO add logger so this can be logged...
|
||||
// this.logger.warn('include and exclude both specified, exclude will be ignored');
|
||||
}
|
||||
|
||||
// if (isActivityWindowCriteria(optWindow)) {
|
||||
// const {
|
||||
// satisfyOn = 'any',
|
||||
// count,
|
||||
// duration,
|
||||
// subreddits: {
|
||||
// include = [],
|
||||
// exclude = [],
|
||||
// } = {},
|
||||
// } = optWindow;
|
||||
//
|
||||
// includes = include.map(x => parseSubredditName(x).toLowerCase());
|
||||
// excludes = exclude.map(x => parseSubredditName(x).toLowerCase());
|
||||
//
|
||||
// if (includes.length > 0 && excludes.length > 0) {
|
||||
// // TODO add logger so this can be logged...
|
||||
// // this.logger.warn('include and exclude both specified, exclude will be ignored');
|
||||
// }
|
||||
// satisfiedCount = count;
|
||||
// durVal = duration;
|
||||
// satisfy = satisfyOn
|
||||
// } else if (typeof optWindow === 'number') {
|
||||
// satisfiedCount = optWindow;
|
||||
// } else {
|
||||
// durVal = optWindow as DurationVal;
|
||||
// }
|
||||
|
||||
// if count is less than max limit (100) go ahead and just get that many. may result in faster response time for low numbers
|
||||
if (satisfiedCount !== undefined) {
|
||||
chunkSize = Math.min(chunkSize, satisfiedCount);
|
||||
@@ -118,19 +150,8 @@ export async function getAuthorActivities(user: RedditUser, options: AuthorTyped
|
||||
}
|
||||
|
||||
let items: Array<Submission | Comment> = [];
|
||||
//let count = 1;
|
||||
let listing;
|
||||
switch (options.type) {
|
||||
case 'comment':
|
||||
listing = await user.getComments({limit: chunkSize});
|
||||
break;
|
||||
case 'submission':
|
||||
listing = await user.getSubmissions({limit: chunkSize});
|
||||
break;
|
||||
default:
|
||||
listing = await user.getOverview({limit: chunkSize});
|
||||
break;
|
||||
}
|
||||
|
||||
let listing = await listingFunc(chunkSize);
|
||||
let hitEnd = false;
|
||||
let offset = chunkSize;
|
||||
while (!hitEnd) {
|
||||
@@ -205,12 +226,35 @@ export async function getAuthorActivities(user: RedditUser, options: AuthorTyped
|
||||
|
||||
if (!hitEnd) {
|
||||
offset += chunkSize;
|
||||
listing = await listing.fetchMore({amount: chunkSize});
|
||||
listing = await listing.fetchMore({amount: chunkSize, ...restFetchOptions});
|
||||
}
|
||||
}
|
||||
return Promise.resolve(items);
|
||||
}
|
||||
|
||||
export async function getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
|
||||
|
||||
const listFunc = (chunkSize: number): Promise<Listing<Submission | Comment>> => {
|
||||
switch (options.type) {
|
||||
case 'comment':
|
||||
return user.getComments({limit: chunkSize});
|
||||
case 'submission':
|
||||
return user.getSubmissions({limit: chunkSize});
|
||||
default:
|
||||
return user.getOverview({limit: chunkSize});
|
||||
}
|
||||
};
|
||||
try {
|
||||
return await getActivities(listFunc, options);
|
||||
} catch (err: any) {
|
||||
if(isStatusError(err) && err.statusCode === 404) {
|
||||
throw new SimpleError('Reddit returned a 404 for user history. Likely this user is shadowbanned.');
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const getAuthorComments = async (user: RedditUser, options: AuthorActivitiesOptions): Promise<Comment[]> => {
|
||||
return await getAuthorActivities(user, {...options, type: 'comment'}) as unknown as Promise<Comment[]>;
|
||||
}
|
||||
@@ -305,185 +349,239 @@ export const renderContent = async (template: string, data: (Submission | Commen
|
||||
};
|
||||
}, {});
|
||||
|
||||
const view = {item: templateData, rules: normalizedRuleResults};
|
||||
const view = {item: templateData, ruleSummary: parseRuleResultsToMarkdownSummary(ruleResults), rules: normalizedRuleResults};
|
||||
const rendered = Mustache.render(template, view) as string;
|
||||
return he.decode(rendered);
|
||||
}
|
||||
|
||||
export const testAuthorCriteria = async (item: (Comment | Submission), authorOpts: AuthorCriteria, include = true, userNotes: UserNotes) => {
|
||||
// @ts-ignore
|
||||
const author: RedditUser = await item.author;
|
||||
for (const k of Object.keys(authorOpts)) {
|
||||
// @ts-ignore
|
||||
if (authorOpts[k] !== undefined) {
|
||||
switch (k) {
|
||||
case 'name':
|
||||
const authPass = () => {
|
||||
// @ts-ignore
|
||||
for (const n of authorOpts[k]) {
|
||||
if (n.toLowerCase() === author.name.toLowerCase()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const authResult = authPass();
|
||||
if ((include && !authResult) || (!include && authResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'flairCssClass':
|
||||
const css = await item.author_flair_css_class;
|
||||
const cssPass = () => {
|
||||
// @ts-ignore
|
||||
for (const c of authorOpts[k]) {
|
||||
if (c === css) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const cssResult = cssPass();
|
||||
if ((include && !cssResult) || (!include && cssResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'flairText':
|
||||
const text = await item.author_flair_text;
|
||||
const textPass = () => {
|
||||
// @ts-ignore
|
||||
for (const c of authorOpts[k]) {
|
||||
if (c === text) {
|
||||
return
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const textResult = textPass();
|
||||
if ((include && !textResult) || (!include && textResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'isMod':
|
||||
const mods: RedditUser[] = await item.subreddit.getModerators();
|
||||
const isModerator = mods.some(x => x.name === item.author.name);
|
||||
const modMatch = authorOpts.isMod === isModerator;
|
||||
if ((include && !modMatch) || (!include && modMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'age':
|
||||
const ageTest = compareDurationValue(parseDurationComparison(await authorOpts.age as string), dayjs.unix(await item.author.created));
|
||||
if ((include && !ageTest) || (!include && ageTest)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'linkKarma':
|
||||
const lkCompare = parseGenericValueOrPercentComparison(await authorOpts.linkKarma as string);
|
||||
let lkMatch;
|
||||
if (lkCompare.isPercent) {
|
||||
// @ts-ignore
|
||||
const tk = author.total_karma as number;
|
||||
lkMatch = comparisonTextOp(author.link_karma / tk, lkCompare.operator, lkCompare.value / 100);
|
||||
} else {
|
||||
lkMatch = comparisonTextOp(author.link_karma, lkCompare.operator, lkCompare.value);
|
||||
}
|
||||
if ((include && !lkMatch) || (!include && lkMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'commentKarma':
|
||||
const ckCompare = parseGenericValueOrPercentComparison(await authorOpts.commentKarma as string);
|
||||
let ckMatch;
|
||||
if (ckCompare.isPercent) {
|
||||
// @ts-ignore
|
||||
const ck = author.total_karma as number;
|
||||
ckMatch = comparisonTextOp(author.comment_karma / ck, ckCompare.operator, ckCompare.value / 100);
|
||||
} else {
|
||||
ckMatch = comparisonTextOp(author.comment_karma, ckCompare.operator, ckCompare.value);
|
||||
}
|
||||
if ((include && !ckMatch) || (!include && ckMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'totalKarma':
|
||||
const tkCompare = parseGenericValueComparison(await authorOpts.totalKarma as string);
|
||||
if (tkCompare.isPercent) {
|
||||
throw new SimpleError(`'totalKarma' value on AuthorCriteria cannot be a percentage`);
|
||||
}
|
||||
// @ts-ignore
|
||||
const totalKarma = author.total_karma as number;
|
||||
const tkMatch = comparisonTextOp(totalKarma, tkCompare.operator, tkCompare.value);
|
||||
if ((include && !tkMatch) || (!include && tkMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'verified':
|
||||
const vMatch = await author.has_verified_mail === authorOpts.verified as boolean;
|
||||
if ((include && !vMatch) || (!include && vMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'userNotes':
|
||||
const notes = await userNotes.getUserNotes(item.author);
|
||||
const notePass = () => {
|
||||
for (const noteCriteria of authorOpts[k] as UserNoteCriteria[]) {
|
||||
const {count = '>= 1', search = 'current', type} = noteCriteria;
|
||||
const {
|
||||
value,
|
||||
operator,
|
||||
isPercent,
|
||||
extra = ''
|
||||
} = parseGenericValueOrPercentComparison(count);
|
||||
const order = extra.includes('asc') ? 'ascending' : 'descending';
|
||||
switch (search) {
|
||||
case 'current':
|
||||
if (notes.length > 0 && notes[notes.length - 1].noteType === type) {
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case 'consecutive':
|
||||
let orderedNotes = notes;
|
||||
if (order === 'descending') {
|
||||
orderedNotes = [...notes];
|
||||
orderedNotes.reverse();
|
||||
}
|
||||
let currCount = 0;
|
||||
for (const note of orderedNotes) {
|
||||
if (note.noteType === type) {
|
||||
currCount++;
|
||||
} else {
|
||||
currCount = 0;
|
||||
}
|
||||
if (isPercent) {
|
||||
throw new SimpleError(`When comparing UserNotes with 'consecutive' search 'count' cannot be a percentage. Given: ${count}`);
|
||||
}
|
||||
if (comparisonTextOp(currCount, operator, value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 'total':
|
||||
if (isPercent) {
|
||||
if (comparisonTextOp(notes.filter(x => x.noteType === type).length / notes.length, operator, value / 100)) {
|
||||
return true;
|
||||
}
|
||||
} else if (comparisonTextOp(notes.filter(x => x.noteType === type).length, operator, value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const noteResult = notePass();
|
||||
if ((include && !noteResult) || (!include && noteResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
const {shadowBanned, ...rest} = authorOpts;
|
||||
|
||||
if(shadowBanned !== undefined) {
|
||||
try {
|
||||
// @ts-ignore
|
||||
await item.author.fetch();
|
||||
// user is not shadowbanned
|
||||
// if criteria specifies they SHOULD be shadowbanned then return false now
|
||||
if(shadowBanned) {
|
||||
return false;
|
||||
}
|
||||
} catch (err: any) {
|
||||
if(isStatusError(err) && err.statusCode === 404) {
|
||||
// user is shadowbanned
|
||||
// if criteria specifies they should not be shadowbanned then return false now
|
||||
if(!shadowBanned) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
||||
try {
|
||||
const authorName = getActivityAuthorName(item.author);
|
||||
|
||||
for (const k of Object.keys(rest)) {
|
||||
// @ts-ignore
|
||||
if (authorOpts[k] !== undefined) {
|
||||
switch (k) {
|
||||
case 'name':
|
||||
const authPass = () => {
|
||||
// @ts-ignore
|
||||
for (const n of authorOpts[k]) {
|
||||
if (n.toLowerCase() === authorName.toLowerCase()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const authResult = authPass();
|
||||
if ((include && !authResult) || (!include && authResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'flairCssClass':
|
||||
const css = await item.author_flair_css_class;
|
||||
const cssPass = () => {
|
||||
// @ts-ignore
|
||||
for (const c of authorOpts[k]) {
|
||||
if (c === css) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const cssResult = cssPass();
|
||||
if ((include && !cssResult) || (!include && cssResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'flairText':
|
||||
const text = await item.author_flair_text;
|
||||
const textPass = () => {
|
||||
// @ts-ignore
|
||||
for (const c of authorOpts[k]) {
|
||||
if (c === text) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const textResult = textPass();
|
||||
if ((include && !textResult) || (!include && textResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'isMod':
|
||||
const mods: RedditUser[] = await item.subreddit.getModerators();
|
||||
const isModerator = mods.some(x => x.name === authorName);
|
||||
const modMatch = authorOpts.isMod === isModerator;
|
||||
if ((include && !modMatch) || (!include && modMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'age':
|
||||
const ageTest = compareDurationValue(parseDurationComparison(await authorOpts.age as string), dayjs.unix(await item.author.created));
|
||||
if ((include && !ageTest) || (!include && ageTest)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'linkKarma':
|
||||
const lkCompare = parseGenericValueOrPercentComparison(await authorOpts.linkKarma as string);
|
||||
let lkMatch;
|
||||
if (lkCompare.isPercent) {
|
||||
// @ts-ignore
|
||||
const tk = await item.author.total_karma as number;
|
||||
lkMatch = comparisonTextOp(item.author.link_karma / tk, lkCompare.operator, lkCompare.value / 100);
|
||||
} else {
|
||||
lkMatch = comparisonTextOp(item.author.link_karma, lkCompare.operator, lkCompare.value);
|
||||
}
|
||||
if ((include && !lkMatch) || (!include && lkMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'commentKarma':
|
||||
const ckCompare = parseGenericValueOrPercentComparison(await authorOpts.commentKarma as string);
|
||||
let ckMatch;
|
||||
if (ckCompare.isPercent) {
|
||||
// @ts-ignore
|
||||
const ck = await item.author.total_karma as number;
|
||||
ckMatch = comparisonTextOp(item.author.comment_karma / ck, ckCompare.operator, ckCompare.value / 100);
|
||||
} else {
|
||||
ckMatch = comparisonTextOp(item.author.comment_karma, ckCompare.operator, ckCompare.value);
|
||||
}
|
||||
if ((include && !ckMatch) || (!include && ckMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'totalKarma':
|
||||
const tkCompare = parseGenericValueComparison(await authorOpts.totalKarma as string);
|
||||
if (tkCompare.isPercent) {
|
||||
throw new SimpleError(`'totalKarma' value on AuthorCriteria cannot be a percentage`);
|
||||
}
|
||||
// @ts-ignore
|
||||
const totalKarma = await item.author.total_karma as number;
|
||||
const tkMatch = comparisonTextOp(totalKarma, tkCompare.operator, tkCompare.value);
|
||||
if ((include && !tkMatch) || (!include && tkMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'verified':
|
||||
const vMatch = await item.author.has_verified_mail === authorOpts.verified as boolean;
|
||||
if ((include && !vMatch) || (!include && vMatch)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'description':
|
||||
// @ts-ignore
|
||||
const desc = await item.author.subreddit?.display_name.public_description;
|
||||
const dVals = authorOpts[k] as string[];
|
||||
let passed = false;
|
||||
for(const val of dVals) {
|
||||
let reg = parseStringToRegex(val, 'i');
|
||||
if(reg === undefined) {
|
||||
reg = parseStringToRegex(`/.*${escapeRegex(val.trim())}.*/`, 'i');
|
||||
if(reg === undefined) {
|
||||
throw new SimpleError(`Could not convert 'description' value to a valid regex: ${authorOpts[k] as string}`);
|
||||
}
|
||||
}
|
||||
if(reg.test(desc)) {
|
||||
passed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(!passed) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case 'userNotes':
|
||||
const notes = await userNotes.getUserNotes(item.author);
|
||||
const notePass = () => {
|
||||
for (const noteCriteria of authorOpts[k] as UserNoteCriteria[]) {
|
||||
const {count = '>= 1', search = 'current', type} = noteCriteria;
|
||||
const {
|
||||
value,
|
||||
operator,
|
||||
isPercent,
|
||||
extra = ''
|
||||
} = parseGenericValueOrPercentComparison(count);
|
||||
const order = extra.includes('asc') ? 'ascending' : 'descending';
|
||||
switch (search) {
|
||||
case 'current':
|
||||
if (notes.length > 0 && notes[notes.length - 1].noteType === type) {
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case 'consecutive':
|
||||
let orderedNotes = notes;
|
||||
if (order === 'descending') {
|
||||
orderedNotes = [...notes];
|
||||
orderedNotes.reverse();
|
||||
}
|
||||
let currCount = 0;
|
||||
for (const note of orderedNotes) {
|
||||
if (note.noteType === type) {
|
||||
currCount++;
|
||||
} else {
|
||||
currCount = 0;
|
||||
}
|
||||
if (isPercent) {
|
||||
throw new SimpleError(`When comparing UserNotes with 'consecutive' search 'count' cannot be a percentage. Given: ${count}`);
|
||||
}
|
||||
if (comparisonTextOp(currCount, operator, value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 'total':
|
||||
if (isPercent) {
|
||||
if (comparisonTextOp(notes.filter(x => x.noteType === type).length / notes.length, operator, value / 100)) {
|
||||
return true;
|
||||
}
|
||||
} else if (comparisonTextOp(notes.filter(x => x.noteType === type).length, operator, value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const noteResult = notePass();
|
||||
if ((include && !noteResult) || (!include && noteResult)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
} catch (err: any) {
|
||||
if(isStatusError(err) && err.statusCode === 404) {
|
||||
throw new SimpleError('Reddit returned a 404 while trying to retrieve User profile. It is likely this user is shadowbanned.');
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface ItemContent {
|
||||
@@ -503,8 +601,9 @@ export const itemContentPeek = async (item: (Comment | Submission), peekLength =
|
||||
submissionTitle = item.title;
|
||||
peek = `${truncatePeek(item.title)} by ${author} https://reddit.com${item.permalink}`;
|
||||
|
||||
} else if (item instanceof Comment) {
|
||||
content = truncatePeek(item.body);
|
||||
} else {
|
||||
// replace newlines with spaces to make peek more compact
|
||||
content = truncatePeek(item.body.replaceAll('\n', ' '));
|
||||
peek = `${truncatePeek(content)} by ${author} in https://reddit.com${item.permalink}`;
|
||||
}
|
||||
|
||||
@@ -606,18 +705,29 @@ export const getAttributionIdentifier = (sub: Submission, useParentMediaDomain =
|
||||
if (displayDomain === '') {
|
||||
displayDomain = domain;
|
||||
}
|
||||
if(domainIdents.length === 0 && domain !== '') {
|
||||
domainIdents.push(domain);
|
||||
}
|
||||
|
||||
return {display: displayDomain, domain, aliases: domainIdents, provider, mediaType};
|
||||
}
|
||||
|
||||
export const activityIsRemoved = (item: Submission | Comment): boolean => {
|
||||
if (item instanceof Submission) {
|
||||
// when automod filters a post it gets this category
|
||||
return item.banned_at_utc !== null && item.removed_by_category !== 'automod_filtered';
|
||||
if(item.can_mod_post) {
|
||||
if (item instanceof Submission) {
|
||||
// when automod filters a post it gets this category
|
||||
return item.banned_at_utc !== null && item.removed_by_category !== 'automod_filtered';
|
||||
}
|
||||
// when automod filters a comment item.removed === false
|
||||
// so if we want to processing filtered comments we need to check for this
|
||||
return item.banned_at_utc !== null && item.removed;
|
||||
} else {
|
||||
if (item instanceof Submission) {
|
||||
return item.removed_by_category === 'moderator' || item.removed_by_category === 'deleted';
|
||||
}
|
||||
// in subreddits the bot does not mod it is not possible to tell the difference between a comment that was removed by the user and one that was removed by a mod
|
||||
return item.body === '[removed]';
|
||||
}
|
||||
// when automod filters a comment item.removed === false
|
||||
// so if we want to processing filtered comments we need to check for this
|
||||
return item.banned_at_utc !== null && item.removed;
|
||||
}
|
||||
|
||||
export const activityIsFiltered = (item: Submission | Comment): boolean => {
|
||||
|
||||
70
src/Utils/StringMatching/CosineSimilarity.ts
Normal file
70
src/Utils/StringMatching/CosineSimilarity.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
// reproduced from https://github.com/sumn2u/string-comparison/blob/master/jscosine.js
|
||||
// https://sumn2u.medium.com/string-similarity-comparision-in-js-with-examples-4bae35f13968
|
||||
|
||||
interface StrMap {
|
||||
[key: string]: number
|
||||
}
|
||||
|
||||
interface BoolMap {
|
||||
[key: string]: boolean
|
||||
}
|
||||
|
||||
|
||||
function termFreqMap(str: string) {
|
||||
var words = str.split(' ');
|
||||
var termFreq: StrMap = {};
|
||||
words.forEach(function(w) {
|
||||
termFreq[w] = (termFreq[w] || 0) + 1;
|
||||
});
|
||||
return termFreq;
|
||||
}
|
||||
|
||||
function addKeysToDict(map: StrMap, dict: BoolMap) {
|
||||
for (var key in map) {
|
||||
dict[key] = true;
|
||||
}
|
||||
}
|
||||
|
||||
function termFreqMapToVector(map: StrMap, dict: StrMap): number[] {
|
||||
var termFreqVector = [];
|
||||
for (var term in dict) {
|
||||
termFreqVector.push(map[term] || 0);
|
||||
}
|
||||
return termFreqVector;
|
||||
}
|
||||
|
||||
function vecDotProduct(vecA: number[], vecB: number[]) {
|
||||
var product = 0;
|
||||
for (var i = 0; i < vecA.length; i++) {
|
||||
product += vecA[i] * vecB[i];
|
||||
}
|
||||
return product;
|
||||
}
|
||||
|
||||
function vecMagnitude(vec: number[]) {
|
||||
var sum = 0;
|
||||
for (var i = 0; i < vec.length; i++) {
|
||||
sum += vec[i] * vec[i];
|
||||
}
|
||||
return Math.sqrt(sum);
|
||||
}
|
||||
|
||||
function cosineSimilarity(vecA: number[], vecB: number[]) {
|
||||
return vecDotProduct(vecA, vecB) / (vecMagnitude(vecA) * vecMagnitude(vecB));
|
||||
}
|
||||
|
||||
const calculateCosineSimilarity = function textCosineSimilarity(strA: string, strB: string) {
|
||||
var termFreqA = termFreqMap(strA);
|
||||
var termFreqB = termFreqMap(strB);
|
||||
|
||||
var dict = {};
|
||||
addKeysToDict(termFreqA, dict);
|
||||
addKeysToDict(termFreqB, dict);
|
||||
|
||||
var termFreqVecA = termFreqMapToVector(termFreqA, dict);
|
||||
var termFreqVecB = termFreqMapToVector(termFreqB, dict);
|
||||
|
||||
return cosineSimilarity(termFreqVecA, termFreqVecB);
|
||||
}
|
||||
|
||||
export default calculateCosineSimilarity;
|
||||
19
src/Utils/StringMatching/levenSimilarity.ts
Normal file
19
src/Utils/StringMatching/levenSimilarity.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import leven from "leven";
|
||||
|
||||
const levenSimilarity = (valA: string, valB: string) => {
|
||||
let longer: string;
|
||||
let shorter: string;
|
||||
if (valA.length > valB.length) {
|
||||
longer = valA;
|
||||
shorter = valB;
|
||||
} else {
|
||||
longer = valB;
|
||||
shorter = valA;
|
||||
}
|
||||
|
||||
const distance = leven(longer, shorter);
|
||||
const diff = (distance / longer.length) * 100;
|
||||
return [distance, 100 - diff];
|
||||
}
|
||||
|
||||
export default levenSimilarity;
|
||||
59
src/Utils/ThirdParty/YoutubeClient.ts
vendored
Normal file
59
src/Utils/ThirdParty/YoutubeClient.ts
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
import {URLSearchParams} from "url";
|
||||
import fetch from "node-fetch";
|
||||
import {parseUsableLinkIdentifier} from "../../util";
|
||||
import dayjs from "dayjs";
|
||||
import {youtube, youtube_v3 } from '@googleapis/youtube';
|
||||
import Schema$CommentThread = youtube_v3.Schema$CommentThread;
|
||||
import {RepostItem} from "../../Common/interfaces";
|
||||
|
||||
const parseYtIdentifier = parseUsableLinkIdentifier();
|
||||
|
||||
export class YoutubeClient {
|
||||
apiKey: string;
|
||||
client: youtube_v3.Youtube
|
||||
|
||||
constructor(key: string) {
|
||||
this.apiKey = key;
|
||||
this.client = youtube({version: 'v3', auth: key});
|
||||
}
|
||||
|
||||
getVideoTopComments = async (url: string, maxResults: number = 50): Promise<Schema$CommentThread[]> => {
|
||||
|
||||
const videoId = parseYtIdentifier(url);
|
||||
|
||||
const res = await this.client.commentThreads.list({
|
||||
part: ['snippet'],
|
||||
videoId,
|
||||
maxResults: maxResults,
|
||||
textFormat: 'plainText',
|
||||
order: 'relevance',
|
||||
});
|
||||
|
||||
const items = res.data.items as Schema$CommentThread[];
|
||||
items.sort((a, b) => (a.snippet?.topLevelComment?.snippet?.likeCount as number) - (b.snippet?.topLevelComment?.snippet?.likeCount as number)).reverse();
|
||||
|
||||
return items;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const commentsAsRepostItems = (comments: Schema$CommentThread[]): RepostItem[] => {
|
||||
return comments.map((x) => {
|
||||
const textDisplay = x.snippet?.topLevelComment?.snippet?.textDisplay;
|
||||
const publishedAt = x.snippet?.topLevelComment?.snippet?.publishedAt;
|
||||
const id = x.snippet?.topLevelComment?.id;
|
||||
const videoId = x.snippet?.topLevelComment?.snippet?.videoId;
|
||||
|
||||
return {
|
||||
value: textDisplay as string,
|
||||
createdOn: dayjs(publishedAt as string).unix(),
|
||||
source: 'Youtube',
|
||||
id: x.snippet?.topLevelComment?.id as string,
|
||||
sourceUrl: `https://youtube.com/watch?v=${videoId}&lc=${id}`,
|
||||
score: x.snippet?.topLevelComment?.snippet?.likeCount as number,
|
||||
acquisitionType: 'external',
|
||||
itemType: 'comment'
|
||||
};
|
||||
})
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@ export const getLogger = (options: any, name = 'app'): Logger => {
|
||||
|
||||
const consoleTransport = new transports.Console({
|
||||
handleExceptions: true,
|
||||
// @ts-expect-error
|
||||
handleRejections: true,
|
||||
});
|
||||
|
||||
@@ -31,7 +30,6 @@ export const getLogger = (options: any, name = 'app'): Logger => {
|
||||
name: 'duplex',
|
||||
dump: false,
|
||||
handleExceptions: true,
|
||||
// @ts-expect-error
|
||||
handleRejections: true,
|
||||
}),
|
||||
...additionalTransports,
|
||||
|
||||
@@ -8,12 +8,13 @@ import passport from 'passport';
|
||||
import {Strategy as CustomStrategy} from 'passport-custom';
|
||||
import {OperatorConfig, BotConnection, LogInfo} from "../../Common/interfaces";
|
||||
import {
|
||||
buildCachePrefix,
|
||||
createCacheManager, filterLogBySubreddit,
|
||||
formatLogLineToHtml,
|
||||
intersect, isLogLineMinLevel,
|
||||
LogEntry, parseFromJsonOrYamlToObject, parseInstanceLogInfoName, parseInstanceLogName,
|
||||
parseSubredditLogName, permissions,
|
||||
randomId, sleep
|
||||
randomId, sleep, triggeredIndicator
|
||||
} from "../../util";
|
||||
import {Cache} from "cache-manager";
|
||||
import session, {Session, SessionData} from "express-session";
|
||||
@@ -42,6 +43,8 @@ import {booleanMiddle} from "../Common/middleware";
|
||||
import {BotInstance, CMInstance} from "../interfaces";
|
||||
import { URL } from "url";
|
||||
import {MESSAGE} from "triple-beam";
|
||||
import Autolinker from "autolinker";
|
||||
import path from "path";
|
||||
|
||||
const emitter = new EventEmitter();
|
||||
|
||||
@@ -119,9 +122,16 @@ const webClient = async (options: OperatorConfig) => {
|
||||
},
|
||||
web: {
|
||||
port,
|
||||
caching,
|
||||
caching: {
|
||||
prefix
|
||||
},
|
||||
invites: {
|
||||
maxAge: invitesMaxAge,
|
||||
},
|
||||
session: {
|
||||
provider,
|
||||
secret,
|
||||
maxAge: sessionMaxAge,
|
||||
},
|
||||
maxLogs,
|
||||
clients,
|
||||
@@ -134,8 +144,6 @@ const webClient = async (options: OperatorConfig) => {
|
||||
},
|
||||
} = options;
|
||||
|
||||
const connectedUsers: ConnectUserObj = {};
|
||||
|
||||
const webOps = operators.map(x => x.toLowerCase());
|
||||
|
||||
const logger = getLogger({defaultLabel: 'Web', ...options.logging}, 'Web');
|
||||
@@ -159,11 +167,15 @@ const webClient = async (options: OperatorConfig) => {
|
||||
throw new SimpleError(`Specified port for web interface (${port}) is in use or not available. Cannot start web server.`);
|
||||
}
|
||||
|
||||
if (provider.store === 'none') {
|
||||
logger.warn(`Cannot use 'none' for session store or else no one can use the interface...falling back to 'memory'`);
|
||||
provider.store = 'memory';
|
||||
if (caching.store === 'none') {
|
||||
logger.warn(`Cannot use 'none' for web caching or else no one can use the interface...falling back to 'memory'`);
|
||||
caching.store = 'memory';
|
||||
}
|
||||
//const webCache = createCacheManager(provider) as Cache;
|
||||
//const webCachePrefix = buildCachePrefix([prefix, 'web']);
|
||||
const webCache = createCacheManager({...caching, prefix: buildCachePrefix([prefix, 'web'])}) as Cache;
|
||||
|
||||
//const previousSessions = await webCache.get
|
||||
const connectedUsers: ConnectUserObj = {};
|
||||
|
||||
//<editor-fold desc=Session and Auth>
|
||||
/*
|
||||
@@ -217,9 +229,9 @@ const webClient = async (options: OperatorConfig) => {
|
||||
|
||||
const sessionObj = session({
|
||||
cookie: {
|
||||
maxAge: provider.ttl,
|
||||
maxAge: sessionMaxAge * 1000,
|
||||
},
|
||||
store: new CacheManagerStore(createCacheManager(provider) as Cache),
|
||||
store: new CacheManagerStore(webCache, {prefix: 'sess:'}),
|
||||
resave: false,
|
||||
saveUninitialized: false,
|
||||
secret,
|
||||
@@ -279,7 +291,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
return res.render('error', {error: errContent});
|
||||
}
|
||||
// @ts-ignore
|
||||
const invite = invites.get(req.session.inviteId) as inviteData;
|
||||
const invite = await webCache.get(`invite:${req.session.inviteId}`) as InviteData;
|
||||
const client = await Snoowrap.fromAuthCode({
|
||||
userAgent: `web:contextBot:web`,
|
||||
clientId: invite.clientId,
|
||||
@@ -290,7 +302,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
// @ts-ignore
|
||||
const user = await client.getMe();
|
||||
// @ts-ignore
|
||||
invites.delete(req.session.inviteId);
|
||||
await webCache.del(`invite:${req.session.inviteId}`);
|
||||
let data: any = {
|
||||
accessToken: client.accessToken,
|
||||
refreshToken: client.refreshToken,
|
||||
@@ -346,7 +358,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
});
|
||||
|
||||
let token = randomId();
|
||||
interface inviteData {
|
||||
interface InviteData {
|
||||
permissions: string[],
|
||||
subreddit?: string,
|
||||
instance?: string,
|
||||
@@ -355,7 +367,6 @@ const webClient = async (options: OperatorConfig) => {
|
||||
redirectUri: string
|
||||
creator: string
|
||||
}
|
||||
const invites: Map<string, inviteData> = new Map();
|
||||
|
||||
const helperAuthed = async (req: express.Request, res: express.Response, next: Function) => {
|
||||
|
||||
@@ -386,14 +397,14 @@ const webClient = async (options: OperatorConfig) => {
|
||||
});
|
||||
});
|
||||
|
||||
app.getAsync('/auth/invite', (req, res) => {
|
||||
app.getAsync('/auth/invite', async (req, res) => {
|
||||
const {invite: inviteId} = req.query;
|
||||
|
||||
if(inviteId === undefined) {
|
||||
return res.render('error', {error: '`invite` param is missing from URL'});
|
||||
}
|
||||
const invite = invites.get(inviteId as string);
|
||||
if(invite === undefined) {
|
||||
const invite = await webCache.get(`invite:${inviteId}`) as InviteData | undefined | null;
|
||||
if(invite === undefined || invite === null) {
|
||||
return res.render('error', {error: 'Invite with the given id does not exist'});
|
||||
}
|
||||
|
||||
@@ -429,7 +440,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
}
|
||||
|
||||
const inviteId = code || randomId();
|
||||
invites.set(inviteId, {
|
||||
await webCache.set(`invite:${inviteId}`, {
|
||||
permissions,
|
||||
clientId: (ci || clientId).trim(),
|
||||
clientSecret: (ce || clientSecret).trim(),
|
||||
@@ -437,7 +448,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
instance,
|
||||
subreddit,
|
||||
creator: (req.user as Express.User).name,
|
||||
});
|
||||
}, {ttl: invitesMaxAge * 1000});
|
||||
return res.send(inviteId);
|
||||
});
|
||||
|
||||
@@ -446,8 +457,8 @@ const webClient = async (options: OperatorConfig) => {
|
||||
if(inviteId === undefined) {
|
||||
return res.render('error', {error: '`invite` param is missing from URL'});
|
||||
}
|
||||
const invite = invites.get(inviteId as string);
|
||||
if(invite === undefined) {
|
||||
const invite = await webCache.get(`invite:${inviteId}`) as InviteData | undefined | null;
|
||||
if(invite === undefined || invite === null) {
|
||||
return res.render('error', {error: 'Invite with the given id does not exist'});
|
||||
}
|
||||
|
||||
@@ -562,7 +573,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
try {
|
||||
server = await app.listen(port);
|
||||
io = new SocketServer(server);
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
logger.error('Error occurred while initializing web or socket.io server', err);
|
||||
err.logged = true;
|
||||
throw err;
|
||||
@@ -643,9 +654,9 @@ const webClient = async (options: OperatorConfig) => {
|
||||
req.session.level = 'verbose';
|
||||
req.session.sort = 'descending';
|
||||
req.session.save();
|
||||
// @ts-ignore
|
||||
connectedUsers[req.session.id] = {};
|
||||
}
|
||||
// @ts-ignore
|
||||
connectedUsers[req.session.id] = {};
|
||||
next();
|
||||
}
|
||||
|
||||
@@ -751,7 +762,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
},
|
||||
}).json() as any;
|
||||
|
||||
} catch(err) {
|
||||
} catch(err: any) {
|
||||
logger.error(`Error occurred while retrieving bot information. Will update heartbeat -- ${err.message}`, {instance: instance.friendly});
|
||||
refreshClient(clients.find(x => normalizeUrl(x.host) === instance.normalUrl) as BotConnection);
|
||||
return res.render('offline', {
|
||||
@@ -802,8 +813,10 @@ const webClient = async (options: OperatorConfig) => {
|
||||
});
|
||||
|
||||
app.getAsync('/config', async (req: express.Request, res: express.Response) => {
|
||||
const {format = 'json'} = req.query as any;
|
||||
res.render('config', {
|
||||
title: `Configuration Editor`
|
||||
title: `Configuration Editor`,
|
||||
format,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -822,6 +835,73 @@ const webClient = async (options: OperatorConfig) => {
|
||||
return res.send(resp);
|
||||
});
|
||||
|
||||
app.getAsync('/events', [ensureAuthenticatedApi, defaultSession, instanceWithPermissions, botWithPermissions, createUserToken], async (req: express.Request, res: express.Response) => {
|
||||
const {subreddit} = req.query as any;
|
||||
const resp = await got.get(`${(req.instance as CMInstance).normalUrl}/events`, {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${req.token}`,
|
||||
},
|
||||
searchParams: {
|
||||
subreddit,
|
||||
bot: req.bot?.botName
|
||||
}
|
||||
}).json() as [any];
|
||||
|
||||
return res.render('events', {
|
||||
data: resp.map((x) => {
|
||||
const {timestamp, activity: {peek, link}, ruleResults = [], actionResults = [], ...rest} = x;
|
||||
const time = dayjs(timestamp).local().format('YY-MM-DD HH:mm:ss z');
|
||||
const formattedPeek = Autolinker.link(peek, {
|
||||
email: false,
|
||||
phone: false,
|
||||
mention: false,
|
||||
hashtag: false,
|
||||
stripPrefix: false,
|
||||
sanitizeHtml: true,
|
||||
});
|
||||
const formattedRuleResults = ruleResults.map((y: any) => {
|
||||
const {triggered, result, ...restY} = y;
|
||||
let t = triggeredIndicator(false);
|
||||
if(triggered === null) {
|
||||
t = 'Skipped';
|
||||
} else if(triggered === true) {
|
||||
t = triggeredIndicator(true);
|
||||
}
|
||||
return {
|
||||
...restY,
|
||||
triggered: t,
|
||||
result: result || '-'
|
||||
};
|
||||
});
|
||||
const formattedActionResults = actionResults.map((y: any) => {
|
||||
const {run, runReason, success, result, dryRun, ...restA} = y;
|
||||
let res = '';
|
||||
if(!run) {
|
||||
res = `Not Run - ${runReason === undefined ? '(No Reason)' : runReason}`;
|
||||
} else {
|
||||
res = `${triggeredIndicator(success)}${result !== undefined ? ` - ${result}` : ''}`;
|
||||
}
|
||||
return {
|
||||
...restA,
|
||||
dryRun: dryRun ? ' (DRYRUN)' : '',
|
||||
result: res
|
||||
};
|
||||
});
|
||||
return {
|
||||
...rest,
|
||||
timestamp: time,
|
||||
activity: {
|
||||
link,
|
||||
peek: formattedPeek,
|
||||
},
|
||||
ruleResults: formattedRuleResults,
|
||||
actionResults: formattedActionResults
|
||||
}
|
||||
}),
|
||||
title: `${subreddit !== undefined ? `${subreddit} ` : ''}Actioned Events`
|
||||
});
|
||||
});
|
||||
|
||||
app.getAsync('/logs/settings/update',[ensureAuthenticated], async (req: express.Request, res: express.Response) => {
|
||||
const e = req.query;
|
||||
for (const [setting, val] of Object.entries(req.query)) {
|
||||
@@ -912,7 +992,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
}
|
||||
}).json() as object;
|
||||
io.to(session.id).emit('opStats', resp);
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
logger.error(`Could not retrieve stats ${err.message}`, {instance: bot.friendly});
|
||||
clearInterval(interval);
|
||||
}
|
||||
@@ -950,7 +1030,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
}
|
||||
}).json() as object;
|
||||
return {success: true, ...resp};
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
return {success: false, error: err.message};
|
||||
}
|
||||
}
|
||||
@@ -1019,7 +1099,7 @@ const webClient = async (options: OperatorConfig) => {
|
||||
// botStat.indicator = 'red';
|
||||
// }
|
||||
logger.verbose(`Heartbeat detected`, {instance: botStat.friendly});
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
botStat.error = err.message;
|
||||
logger.error(`Heartbeat response from ${botStat.friendly} was not ok: ${err.message}`, {instance: botStat.friendly});
|
||||
} finally {
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
import {BotStats, BotStatusResponse, SubredditDataResponse} from "./interfaces";
|
||||
import {ManagerStats, RunningState} from "../../Subreddit/Manager";
|
||||
import {Invokee, RunState} from "../../Common/interfaces";
|
||||
import {cacheStats} from "../../util";
|
||||
import {RunningState} from "../../Subreddit/Manager";
|
||||
import {Invokee, ManagerStats, RunState} from "../../Common/interfaces";
|
||||
import {cacheStats, createHistoricalStatsDisplay} from "../../util";
|
||||
import {createHistoricalDefaults, historicalDefaults} from "../../Common/defaults";
|
||||
|
||||
const managerStats: ManagerStats = {
|
||||
actionsRun: 0,
|
||||
actionsRunSinceStart: 0,
|
||||
actionsRunSinceStartTotal: 0,
|
||||
actionsRunTotal: 0,
|
||||
cache: {
|
||||
currentKeyCount: 0,
|
||||
isShared: false,
|
||||
@@ -18,22 +15,12 @@ const managerStats: ManagerStats = {
|
||||
totalRequests: 0,
|
||||
types: cacheStats()
|
||||
},
|
||||
checksRunSinceStartTotal: 0,
|
||||
checksRunTotal: 0,
|
||||
checksTriggered: 0,
|
||||
checksTriggeredSinceStart: 0,
|
||||
checksTriggeredSinceStartTotal: 0,
|
||||
checksTriggeredTotal: 0,
|
||||
historical: {
|
||||
lastReload: createHistoricalStatsDisplay(createHistoricalDefaults()),
|
||||
allTime: createHistoricalStatsDisplay(createHistoricalDefaults()),
|
||||
},
|
||||
eventsAvg: 0,
|
||||
eventsCheckedSinceStartTotal: 0,
|
||||
eventsCheckedTotal: 0,
|
||||
rulesAvg: 0,
|
||||
rulesCachedSinceStartTotal: 0,
|
||||
rulesCachedTotal: 0,
|
||||
rulesRunSinceStartTotal: 0,
|
||||
rulesRunTotal: 0,
|
||||
rulesTriggeredSinceStartTotal: 0,
|
||||
rulesTriggeredTotal: 0
|
||||
};
|
||||
const botStats: BotStats = {
|
||||
apiAvg: '-',
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import {ManagerStats, RunningState} from "../../Subreddit/Manager";
|
||||
import {RunningState} from "../../Subreddit/Manager";
|
||||
import {ManagerStats} from "../../Common/interfaces";
|
||||
|
||||
export interface BotStats {
|
||||
startedAtHuman: string,
|
||||
|
||||
@@ -31,3 +31,26 @@ export const botRoute = (required = true) => async (req: Request, res: Response,
|
||||
}
|
||||
return next();
|
||||
}
|
||||
|
||||
export const subredditRoute = (required = true) => async (req: Request, res: Response, next: Function) => {
|
||||
|
||||
const bot = req.serverBot;
|
||||
|
||||
const {subreddit} = req.query as any;
|
||||
if(subreddit === undefined && required === false) {
|
||||
next();
|
||||
} else {
|
||||
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
|
||||
if (!isOperator && !realManagers.includes(subreddit)) {
|
||||
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
|
||||
}
|
||||
const manager = bot.subManagers.find(x => x.displayLabel === subreddit);
|
||||
if (manager === undefined) {
|
||||
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
|
||||
}
|
||||
|
||||
req.manager = manager;
|
||||
|
||||
next();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ const action = async (req: express.Request, res: express.Response) => {
|
||||
if (type === 'unmoderated') {
|
||||
const activities = await manager.subreddit.getUnmoderated({limit: 100});
|
||||
for (const a of activities.reverse()) {
|
||||
await manager.queue.push({
|
||||
await manager.firehose.push({
|
||||
checkType: a instanceof Submission ? 'Submission' : 'Comment',
|
||||
activity: a,
|
||||
});
|
||||
@@ -75,7 +75,7 @@ const action = async (req: express.Request, res: express.Response) => {
|
||||
} else {
|
||||
const activities = await manager.subreddit.getModqueue({limit: 100});
|
||||
for (const a of activities.reverse()) {
|
||||
await manager.queue.push({
|
||||
await manager.firehose.push({
|
||||
checkType: a instanceof Submission ? 'Submission' : 'Comment',
|
||||
activity: a,
|
||||
});
|
||||
@@ -83,7 +83,7 @@ const action = async (req: express.Request, res: express.Response) => {
|
||||
}
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
if (!(err instanceof LoggedError)) {
|
||||
mLogger.error(err, {subreddit: manager.displayLabel});
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ const addBot = () => {
|
||||
req.botApp.logger.error(err);
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
if (newBot.error === undefined) {
|
||||
newBot.error = err.message;
|
||||
result.error = err.message;
|
||||
|
||||
@@ -1,31 +1,51 @@
|
||||
import {Request, Response} from 'express';
|
||||
import {authUserCheck, botRoute} from "../../../middleware";
|
||||
import {authUserCheck, botRoute, subredditRoute} from "../../../middleware";
|
||||
import Submission from "snoowrap/dist/objects/Submission";
|
||||
import winston from 'winston';
|
||||
import {COMMENT_URL_ID, parseLinkIdentifier, SUBMISSION_URL_ID} from "../../../../../util";
|
||||
import {booleanMiddle} from "../../../../Common/middleware";
|
||||
import {Manager} from "../../../../../Subreddit/Manager";
|
||||
import {ActionedEvent} from "../../../../../Common/interfaces";
|
||||
|
||||
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
|
||||
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);
|
||||
|
||||
const config = async (req: Request, res: Response) => {
|
||||
const bot = req.serverBot;
|
||||
|
||||
const {subreddit} = req.query as any;
|
||||
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
|
||||
if (!isOperator && !realManagers.includes(subreddit)) {
|
||||
return res.status(400).send('Cannot retrieve config for subreddit you do not manage or is not run by the bot')
|
||||
}
|
||||
const manager = bot.subManagers.find(x => x.displayLabel === subreddit);
|
||||
if (manager === undefined) {
|
||||
return res.status(400).send('Cannot retrieve config for subreddit you do not manage or is not run by the bot')
|
||||
}
|
||||
const manager = req.manager as Manager;
|
||||
|
||||
// @ts-ignore
|
||||
const wiki = await manager.subreddit.getWikiPage(manager.wikiLocation).fetch();
|
||||
return res.send(wiki.content_md);
|
||||
};
|
||||
export const configRoute = [authUserCheck(), botRoute(), config];
|
||||
export const configRoute = [authUserCheck(), botRoute(), subredditRoute(), config];
|
||||
|
||||
const actionedEvents = async (req: Request, res: Response) => {
|
||||
|
||||
let managers: Manager[] = [];
|
||||
const manager = req.manager as Manager | undefined;
|
||||
if(manager !== undefined) {
|
||||
managers.push(manager);
|
||||
} else {
|
||||
for(const manager of req.serverBot.subManagers) {
|
||||
if((req.user?.realManagers as string[]).includes(manager.displayLabel)) {
|
||||
managers.push(manager);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let events: ActionedEvent[] = [];
|
||||
for(const m of managers) {
|
||||
if(m.resources !== undefined) {
|
||||
events = events.concat(await m.resources.getActionedEvents());
|
||||
}
|
||||
}
|
||||
|
||||
events.sort((a, b) => b.timestamp - a.timestamp);
|
||||
|
||||
return res.json(events);
|
||||
};
|
||||
export const actionedEventsRoute = [authUserCheck(), botRoute(), subredditRoute(false), actionedEvents];
|
||||
|
||||
const action = async (req: Request, res: Response) => {
|
||||
const bot = req.serverBot;
|
||||
|
||||
@@ -46,7 +46,7 @@ const logs = (subLogMap: Map<string, LogEntry[]>) => {
|
||||
console.log('Request closed detected with "close" listener');
|
||||
res.destroy();
|
||||
return;
|
||||
} catch (e) {
|
||||
} catch (e: any) {
|
||||
if (e.code !== 'ECONNRESET') {
|
||||
logger.error(e);
|
||||
}
|
||||
|
||||
@@ -160,13 +160,19 @@ const status = () => {
|
||||
submissions: acc.checks.submissions + curr.checks.submissions,
|
||||
comments: acc.checks.comments + curr.checks.comments,
|
||||
},
|
||||
eventsCheckedTotal: acc.eventsCheckedTotal + curr.stats.eventsCheckedTotal,
|
||||
checksRunTotal: acc.checksRunTotal + curr.stats.checksRunTotal,
|
||||
checksTriggeredTotal: acc.checksTriggeredTotal + curr.stats.checksTriggeredTotal,
|
||||
rulesRunTotal: acc.rulesRunTotal + curr.stats.rulesRunTotal,
|
||||
rulesCachedTotal: acc.rulesCachedTotal + curr.stats.rulesCachedTotal,
|
||||
rulesTriggeredTotal: acc.rulesTriggeredTotal + curr.stats.rulesTriggeredTotal,
|
||||
actionsRunTotal: acc.actionsRunTotal + curr.stats.actionsRunTotal,
|
||||
historical: {
|
||||
allTime: {
|
||||
eventsCheckedTotal: acc.historical.allTime.eventsCheckedTotal + curr.stats.historical.allTime.eventsCheckedTotal,
|
||||
eventsActionedTotal: acc.historical.allTime.eventsActionedTotal + curr.stats.historical.allTime.eventsActionedTotal,
|
||||
checksRunTotal: acc.historical.allTime.checksRunTotal + curr.stats.historical.allTime.checksRunTotal,
|
||||
checksFromCacheTotal: acc.historical.allTime.checksFromCacheTotal + curr.stats.historical.allTime.checksFromCacheTotal,
|
||||
checksTriggeredTotal: acc.historical.allTime.checksTriggeredTotal + curr.stats.historical.allTime.checksTriggeredTotal,
|
||||
rulesRunTotal: acc.historical.allTime.rulesRunTotal + curr.stats.historical.allTime.rulesRunTotal,
|
||||
rulesCachedTotal: acc.historical.allTime.rulesCachedTotal + curr.stats.historical.allTime.rulesCachedTotal,
|
||||
rulesTriggeredTotal: acc.historical.allTime.rulesTriggeredTotal + curr.stats.historical.allTime.rulesTriggeredTotal,
|
||||
actionsRunTotal: acc.historical.allTime.actionsRunTotal + curr.stats.historical.allTime.actionsRunTotal,
|
||||
}
|
||||
},
|
||||
maxWorkers: acc.maxWorkers + curr.maxWorkers,
|
||||
subMaxWorkers: acc.subMaxWorkers + curr.subMaxWorkers,
|
||||
globalMaxWorkers: acc.globalMaxWorkers + curr.globalMaxWorkers,
|
||||
@@ -178,13 +184,19 @@ const status = () => {
|
||||
submissions: 0,
|
||||
comments: 0,
|
||||
},
|
||||
eventsCheckedTotal: 0,
|
||||
checksRunTotal: 0,
|
||||
checksTriggeredTotal: 0,
|
||||
rulesRunTotal: 0,
|
||||
rulesCachedTotal: 0,
|
||||
rulesTriggeredTotal: 0,
|
||||
actionsRunTotal: 0,
|
||||
historical: {
|
||||
allTime: {
|
||||
eventsCheckedTotal: 0,
|
||||
eventsActionedTotal: 0,
|
||||
checksRunTotal: 0,
|
||||
checksFromCacheTotal: 0,
|
||||
checksTriggeredTotal: 0,
|
||||
rulesRunTotal: 0,
|
||||
rulesCachedTotal: 0,
|
||||
rulesTriggeredTotal: 0,
|
||||
actionsRunTotal: 0,
|
||||
}
|
||||
},
|
||||
maxWorkers: 0,
|
||||
subMaxWorkers: 0,
|
||||
globalMaxWorkers: 0,
|
||||
@@ -221,7 +233,8 @@ const status = () => {
|
||||
}, cumRaw);
|
||||
const cacheReq = subManagerData.reduce((acc, curr) => acc + curr.stats.cache.totalRequests, 0);
|
||||
const cacheMiss = subManagerData.reduce((acc, curr) => acc + curr.stats.cache.totalMiss, 0);
|
||||
const aManagerWithDefaultResources = bot.subManagers.find(x => x.resources !== undefined && x.resources.cacheSettingsHash === 'default');
|
||||
const sharedSub = subManagerData.find(x => x.stats.cache.isShared);
|
||||
const sharedCount = sharedSub !== undefined ? sharedSub.stats.cache.currentKeyCount : 0;
|
||||
let allManagerData: any = {
|
||||
name: 'All',
|
||||
status: bot.running ? 'RUNNING' : 'NOT RUNNING',
|
||||
@@ -243,7 +256,7 @@ const status = () => {
|
||||
stats: {
|
||||
...rest,
|
||||
cache: {
|
||||
currentKeyCount: aManagerWithDefaultResources !== undefined ? await aManagerWithDefaultResources.resources.getCacheKeyCount() : 'N/A',
|
||||
currentKeyCount: sharedCount + subManagerData.reduce((acc, curr) => curr.stats.cache.isShared ? acc : acc + curr.stats.cache.currentKeyCount,0),
|
||||
isShared: false,
|
||||
totalRequests: cacheReq,
|
||||
totalMiss: cacheMiss,
|
||||
|
||||
@@ -22,7 +22,7 @@ import SimpleError from "../../Utils/SimpleError";
|
||||
import {heartbeat} from "./routes/authenticated/applicationRoutes";
|
||||
import logs from "./routes/authenticated/user/logs";
|
||||
import status from './routes/authenticated/user/status';
|
||||
import {actionRoute, configRoute} from "./routes/authenticated/user";
|
||||
import {actionedEventsRoute, actionRoute, configRoute} from "./routes/authenticated/user";
|
||||
import action from "./routes/authenticated/user/action";
|
||||
import {authUserCheck, botRoute} from "./middleware";
|
||||
import {opStats} from "../Common/util";
|
||||
@@ -114,7 +114,7 @@ const rcbServer = async function (options: OperatorConfig) {
|
||||
try {
|
||||
httpServer = await server.listen(port);
|
||||
io = new SocketServer(httpServer);
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
logger.error('Error occurred while initializing web or socket.io server', err);
|
||||
err.logged = true;
|
||||
throw err;
|
||||
@@ -189,6 +189,8 @@ const rcbServer = async function (options: OperatorConfig) {
|
||||
|
||||
server.getAsync('/config', ...configRoute);
|
||||
|
||||
server.getAsync('/events', ...actionedEventsRoute);
|
||||
|
||||
server.getAsync('/action', ...action);
|
||||
|
||||
server.getAsync('/check', ...actionRoute);
|
||||
@@ -204,7 +206,7 @@ const rcbServer = async function (options: OperatorConfig) {
|
||||
if(newApp.error === undefined) {
|
||||
try {
|
||||
await newApp.initBots(causedBy);
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
if(newApp.error === undefined) {
|
||||
newApp.error = err.message;
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ a {
|
||||
|
||||
.loading {
|
||||
height: 35px;
|
||||
fill: black;
|
||||
fill: white;
|
||||
display: none;
|
||||
}
|
||||
|
||||
@@ -12,10 +12,6 @@ a {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
.dark .loading {
|
||||
fill: white;
|
||||
}
|
||||
|
||||
.sub {
|
||||
display: none;
|
||||
}
|
||||
@@ -91,3 +87,58 @@ a {
|
||||
pointer-events: initial;
|
||||
text-decoration: initial;
|
||||
}
|
||||
|
||||
.blue {
|
||||
color: rgb(97, 175, 239)
|
||||
}
|
||||
.red {
|
||||
color: rgb(255 123 133);
|
||||
}
|
||||
.green {
|
||||
color: rgb(170 255 109);
|
||||
}
|
||||
.purple {
|
||||
color: rgb(224 124 253);
|
||||
}
|
||||
.yellow {
|
||||
color: rgb(253 198 94);
|
||||
}
|
||||
|
||||
.breadcrumb {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#breadcrumbs::before,
|
||||
.breadcrumb:not(:last-child)::after {
|
||||
content: '›';
|
||||
margin: 0 0.2rem;
|
||||
}
|
||||
|
||||
.breadcrumb.array::before {
|
||||
content: '[]';
|
||||
}
|
||||
|
||||
.breadcrumb.object::before {
|
||||
content: '{}';
|
||||
}
|
||||
|
||||
#problems {
|
||||
background-color: #474646;
|
||||
padding: 10px;
|
||||
user-select: text;
|
||||
}
|
||||
|
||||
.problem {
|
||||
align-items: center;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
padding: 0.25rem;
|
||||
}
|
||||
|
||||
.problem-text {
|
||||
margin-left: 0.5rem;
|
||||
}
|
||||
|
||||
.problem .codicon-warning {
|
||||
color: orange;
|
||||
}
|
||||
|
||||
10
src/Web/assets/public/questionsymbol.svg
Normal file
10
src/Web/assets/public/questionsymbol.svg
Normal file
@@ -0,0 +1,10 @@
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<defs>
|
||||
<symbol id="q" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
stroke-width="2"
|
||||
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
|
||||
</symbol>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 486 B |
1
src/Web/assets/public/tailwind.min.css
vendored
Normal file
1
src/Web/assets/public/tailwind.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
2
src/Web/assets/public/yaml/1565.entry.js
Normal file
2
src/Web/assets/public/yaml/1565.entry.js
Normal file
File diff suppressed because one or more lines are too long
2
src/Web/assets/public/yaml/1569.entry.js
Normal file
2
src/Web/assets/public/yaml/1569.entry.js
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";(self.webpackChunkdemo=self.webpackChunkdemo||[]).push([[1569],{1569:(e,t,n)=>{n.r(t),n.d(t,{conf:()=>s,language:()=>o});var s={comments:{lineComment:"#"}},o={defaultToken:"keyword",ignoreCase:!0,tokenPostfix:".azcli",str:/[^#\s]/,tokenizer:{root:[{include:"@comment"},[/\s-+@str*\s*/,{cases:{"@eos":{token:"key.identifier",next:"@popall"},"@default":{token:"key.identifier",next:"@type"}}}],[/^-+@str*\s*/,{cases:{"@eos":{token:"key.identifier",next:"@popall"},"@default":{token:"key.identifier",next:"@type"}}}]],type:[{include:"@comment"},[/-+@str*\s*/,{cases:{"@eos":{token:"key.identifier",next:"@popall"},"@default":"key.identifier"}}],[/@str+\s*/,{cases:{"@eos":{token:"string",next:"@popall"},"@default":"string"}}]],comment:[[/#.*$/,{cases:{"@eos":{token:"comment",next:"@popall"}}}]]}}}}]);
|
||||
//# sourceMappingURL=1569.entry.js.map
|
||||
2
src/Web/assets/public/yaml/1585.entry.js
Normal file
2
src/Web/assets/public/yaml/1585.entry.js
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";(self.webpackChunkdemo=self.webpackChunkdemo||[]).push([[1585],{1585:(e,s,o)=>{o.r(s),o.d(s,{conf:()=>n,language:()=>l});var n={brackets:[["{","}"],["[","]"],["(",")"]],autoClosingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}],surroundingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}]},l={defaultToken:"",tokenPostfix:".dockerfile",variable:/\${?[\w]+}?/,tokenizer:{root:[{include:"@whitespace"},{include:"@comment"},[/(ONBUILD)(\s+)/,["keyword",""]],[/(ENV)(\s+)([\w]+)/,["keyword","",{token:"variable",next:"@arguments"}]],[/(FROM|MAINTAINER|RUN|EXPOSE|ENV|ADD|ARG|VOLUME|LABEL|USER|WORKDIR|COPY|CMD|STOPSIGNAL|SHELL|HEALTHCHECK|ENTRYPOINT)/,{token:"keyword",next:"@arguments"}]],arguments:[{include:"@whitespace"},{include:"@strings"},[/(@variable)/,{cases:{"@eos":{token:"variable",next:"@popall"},"@default":"variable"}}],[/\\/,{cases:{"@eos":"","@default":""}}],[/./,{cases:{"@eos":{token:"",next:"@popall"},"@default":""}}]],whitespace:[[/\s+/,{cases:{"@eos":{token:"",next:"@popall"},"@default":""}}]],comment:[[/(^#.*$)/,"comment","@popall"]],strings:[[/\\'$/,"","@popall"],[/\\'/,""],[/'$/,"string","@popall"],[/'/,"string","@stringBody"],[/"$/,"string","@popall"],[/"/,"string","@dblStringBody"]],stringBody:[[/[^\\\$']/,{cases:{"@eos":{token:"string",next:"@popall"},"@default":"string"}}],[/\\./,"string.escape"],[/'$/,"string","@popall"],[/'/,"string","@pop"],[/(@variable)/,"variable"],[/\\$/,"string"],[/$/,"string","@popall"]],dblStringBody:[[/[^\\\$"]/,{cases:{"@eos":{token:"string",next:"@popall"},"@default":"string"}}],[/\\./,"string.escape"],[/"$/,"string","@popall"],[/"/,"string","@pop"],[/(@variable)/,"variable"],[/\\$/,"string"],[/$/,"string","@popall"]]}}}}]);
|
||||
//# sourceMappingURL=1585.entry.js.map
|
||||
2
src/Web/assets/public/yaml/1593.entry.js
Normal file
2
src/Web/assets/public/yaml/1593.entry.js
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";(self.webpackChunkdemo=self.webpackChunkdemo||[]).push([[1593],{1593:(e,s,o)=>{o.r(s),o.d(s,{conf:()=>t,language:()=>E});var t={comments:{blockComment:["(*","*)"]},brackets:[["{","}"],["[","]"],["(",")"]],autoClosingPairs:[{open:"[",close:"]"},{open:"{",close:"}"},{open:"(",close:")"},{open:"(*",close:"*)"},{open:"<*",close:"*>"},{open:"'",close:"'",notIn:["string","comment"]},{open:'"',close:'"',notIn:["string","comment"]}]},E={defaultToken:"",tokenPostfix:".m3",brackets:[{token:"delimiter.curly",open:"{",close:"}"},{token:"delimiter.parenthesis",open:"(",close:")"},{token:"delimiter.square",open:"[",close:"]"}],keywords:["AND","ANY","ARRAY","AS","BEGIN","BITS","BRANDED","BY","CASE","CONST","DIV","DO","ELSE","ELSIF","END","EVAL","EXCEPT","EXCEPTION","EXIT","EXPORTS","FINALLY","FOR","FROM","GENERIC","IF","IMPORT","IN","INTERFACE","LOCK","LOOP","METHODS","MOD","MODULE","NOT","OBJECT","OF","OR","OVERRIDES","PROCEDURE","RAISE","RAISES","READONLY","RECORD","REF","REPEAT","RETURN","REVEAL","SET","THEN","TO","TRY","TYPE","TYPECASE","UNSAFE","UNTIL","UNTRACED","VALUE","VAR","WHILE","WITH"],reservedConstNames:["ABS","ADR","ADRSIZE","BITSIZE","BYTESIZE","CEILING","DEC","DISPOSE","FALSE","FIRST","FLOAT","FLOOR","INC","ISTYPE","LAST","LOOPHOLE","MAX","MIN","NARROW","NEW","NIL","NUMBER","ORD","ROUND","SUBARRAY","TRUE","TRUNC","TYPECODE","VAL"],reservedTypeNames:["ADDRESS","ANY","BOOLEAN","CARDINAL","CHAR","EXTENDED","INTEGER","LONGCARD","LONGINT","LONGREAL","MUTEX","NULL","REAL","REFANY","ROOT","TEXT"],operators:["+","-","*","/","&","^","."],relations:["=","#","<","<=",">",">=","<:",":"],delimiters:["|","..","=>",",",";",":="],symbols:/[>=<#.,:;+\-*/&^]+/,escapes:/\\(?:[\\fnrt"']|[0-7]{3})/,tokenizer:{root:[[/_\w*/,"invalid"],[/[a-zA-Z][a-zA-Z0-9_]*/,{cases:{"@keywords":{token:"keyword.$0"},"@reservedConstNames":{token:"constant.reserved.$0"},"@reservedTypeNames":{token:"type.reserved.$0"},"@default":"identifier"}}],{include:"@whitespace"},[/[{}()\[\]]/,"@brackets"],[/[0-9]+\.[0-9]+(?:[DdEeXx][\+\-]?[0-9]+)?/,"number.float"],[/[0-9]+(?:\_[0-9a-fA-F]+)?L?/,"number"],[/@symbols/,{cases:{"@operators":"operators","@relations":"operators","@delimiters":"delimiter","@default":"invalid"}}],[/'[^\\']'/,"string.char"],[/(')(@escapes)(')/,["string.char","string.escape","string.char"]],[/'/,"invalid"],[/"([^"\\]|\\.)*$/,"invalid"],[/"/,"string.text","@text"]],text:[[/[^\\"]+/,"string.text"],[/@escapes/,"string.escape"],[/\\./,"invalid"],[/"/,"string.text","@pop"]],comment:[[/\(\*/,"comment","@push"],[/\*\)/,"comment","@pop"],[/./,"comment"]],pragma:[[/<\*/,"keyword.pragma","@push"],[/\*>/,"keyword.pragma","@pop"],[/./,"keyword.pragma"]],whitespace:[[/[ \t\r\n]+/,"white"],[/\(\*/,"comment","@comment"],[/<\*/,"keyword.pragma","@pragma"]]}}}}]);
|
||||
//# sourceMappingURL=1593.entry.js.map
|
||||
2
src/Web/assets/public/yaml/1594.entry.js
Normal file
2
src/Web/assets/public/yaml/1594.entry.js
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";(self.webpackChunkdemo=self.webpackChunkdemo||[]).push([[1594],{1594:(e,o,n)=>{n.r(o),n.d(o,{conf:()=>t,language:()=>r});var t={comments:{lineComment:"'"},brackets:[["(",")"],["[","]"],["If","EndIf"],["While","EndWhile"],["For","EndFor"],["Sub","EndSub"]],autoClosingPairs:[{open:'"',close:'"',notIn:["string","comment"]},{open:"(",close:")",notIn:["string","comment"]},{open:"[",close:"]",notIn:["string","comment"]}]},r={defaultToken:"",tokenPostfix:".sb",ignoreCase:!0,brackets:[{token:"delimiter.array",open:"[",close:"]"},{token:"delimiter.parenthesis",open:"(",close:")"},{token:"keyword.tag-if",open:"If",close:"EndIf"},{token:"keyword.tag-while",open:"While",close:"EndWhile"},{token:"keyword.tag-for",open:"For",close:"EndFor"},{token:"keyword.tag-sub",open:"Sub",close:"EndSub"}],keywords:["Else","ElseIf","EndFor","EndIf","EndSub","EndWhile","For","Goto","If","Step","Sub","Then","To","While"],tagwords:["If","Sub","While","For"],operators:[">","<","<>","<=",">=","And","Or","+","-","*","/","="],identifier:/[a-zA-Z_][\w]*/,symbols:/[=><:+\-*\/%\.,]+/,escapes:/\\(?:[abfnrtv\\"']|x[0-9A-Fa-f]{1,4}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})/,tokenizer:{root:[{include:"@whitespace"},[/(@identifier)(?=[.])/,"type"],[/@identifier/,{cases:{"@keywords":{token:"keyword.$0"},"@operators":"operator","@default":"variable.name"}}],[/([.])(@identifier)/,{cases:{$2:["delimiter","type.member"],"@default":""}}],[/\d*\.\d+/,"number.float"],[/\d+/,"number"],[/[()\[\]]/,"@brackets"],[/@symbols/,{cases:{"@operators":"operator","@default":"delimiter"}}],[/"([^"\\]|\\.)*$/,"string.invalid"],[/"/,"string","@string"]],whitespace:[[/[ \t\r\n]+/,""],[/(\').*$/,"comment"]],string:[[/[^\\"]+/,"string"],[/@escapes/,"string.escape"],[/\\./,"string.escape.invalid"],[/"C?/,"string","@pop"]]}}}}]);
|
||||
//# sourceMappingURL=1594.entry.js.map
|
||||
2
src/Web/assets/public/yaml/162.entry.js
Normal file
2
src/Web/assets/public/yaml/162.entry.js
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";(self.webpackChunkdemo=self.webpackChunkdemo||[]).push([[162],{162:(e,s,t)=>{t.r(s),t.d(s,{conf:()=>n,language:()=>r});var n={comments:{lineComment:"#"},brackets:[["{","}"],["[","]"],["(",")"]],autoClosingPairs:[{open:"'",close:"'",notIn:["string"]},{open:'"',close:'"',notIn:["string"]},{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"}]},r={defaultToken:"",tokenPostfix:".rq",brackets:[{token:"delimiter.curly",open:"{",close:"}"},{token:"delimiter.parenthesis",open:"(",close:")"},{token:"delimiter.square",open:"[",close:"]"},{token:"delimiter.angle",open:"<",close:">"}],keywords:["add","as","asc","ask","base","by","clear","construct","copy","create","data","delete","desc","describe","distinct","drop","false","filter","from","graph","group","having","in","insert","limit","load","minus","move","named","not","offset","optional","order","prefix","reduced","select","service","silent","to","true","undef","union","using","values","where","with"],builtinFunctions:["a","abs","avg","bind","bnode","bound","ceil","coalesce","concat","contains","count","datatype","day","encode_for_uri","exists","floor","group_concat","hours","if","iri","isblank","isiri","isliteral","isnumeric","isuri","lang","langmatches","lcase","max","md5","min","minutes","month","now","rand","regex","replace","round","sameterm","sample","seconds","sha1","sha256","sha384","sha512","str","strafter","strbefore","strdt","strends","strlang","strlen","strstarts","struuid","substr","sum","timezone","tz","ucase","uri","uuid","year"],ignoreCase:!0,tokenizer:{root:[[/<[^\s\u00a0>]*>?/,"tag"],{include:"@strings"},[/#.*/,"comment"],[/[{}()\[\]]/,"@brackets"],[/[;,.]/,"delimiter"],[/[_\w\d]+:(\.(?=[\w_\-\\%])|[:\w_-]|\\[-\\_~.!$&'()*+,;=/?#@%]|%[a-f\d][a-f\d])*/,"tag"],[/:(\.(?=[\w_\-\\%])|[:\w_-]|\\[-\\_~.!$&'()*+,;=/?#@%]|%[a-f\d][a-f\d])+/,"tag"],[/[$?]?[_\w\d]+/,{cases:{"@keywords":{token:"keyword"},"@builtinFunctions":{token:"predefined.sql"},"@default":"identifier"}}],[/\^\^/,"operator.sql"],[/\^[*+\-<>=&|^\/!?]*/,"operator.sql"],[/[*+\-<>=&|\/!?]/,"operator.sql"],[/@[a-z\d\-]*/,"metatag.html"],[/\s+/,"white"]],strings:[[/'([^'\\]|\\.)*$/,"string.invalid"],[/'$/,"string.sql","@pop"],[/'/,"string.sql","@stringBody"],[/"([^"\\]|\\.)*$/,"string.invalid"],[/"$/,"string.sql","@pop"],[/"/,"string.sql","@dblStringBody"]],stringBody:[[/[^\\']+/,"string.sql"],[/\\./,"string.escape"],[/'/,"string.sql","@pop"]],dblStringBody:[[/[^\\"]+/,"string.sql"],[/\\./,"string.escape"],[/"/,"string.sql","@pop"]]}}}}]);
|
||||
//# sourceMappingURL=162.entry.js.map
|
||||
2
src/Web/assets/public/yaml/1623.entry.js
Normal file
2
src/Web/assets/public/yaml/1623.entry.js
Normal file
File diff suppressed because one or more lines are too long
2
src/Web/assets/public/yaml/1649.entry.js
Normal file
2
src/Web/assets/public/yaml/1649.entry.js
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";(self.webpackChunkdemo=self.webpackChunkdemo||[]).push([[1649],{1649:(e,t,n)=>{n.r(t),n.d(t,{conf:()=>o,language:()=>s});var o={brackets:[["{","}"],["[","]"],["(",")"]],autoClosingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}],surroundingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}]},s={tokenPostfix:".tcl",specialFunctions:["set","unset","rename","variable","proc","coroutine","foreach","incr","append","lappend","linsert","lreplace"],mainFunctions:["if","then","elseif","else","case","switch","while","for","break","continue","return","package","namespace","catch","exit","eval","expr","uplevel","upvar"],builtinFunctions:["file","info","concat","join","lindex","list","llength","lrange","lsearch","lsort","split","array","parray","binary","format","regexp","regsub","scan","string","subst","dict","cd","clock","exec","glob","pid","pwd","close","eof","fblocked","fconfigure","fcopy","fileevent","flush","gets","open","puts","read","seek","socket","tell","interp","after","auto_execok","auto_load","auto_mkindex","auto_reset","bgerror","error","global","history","load","source","time","trace","unknown","unset","update","vwait","winfo","wm","bind","event","pack","place","grid","font","bell","clipboard","destroy","focus","grab","lower","option","raise","selection","send","tk","tkwait","tk_bisque","tk_focusNext","tk_focusPrev","tk_focusFollowsMouse","tk_popup","tk_setPalette"],symbols:/[=><!~?:&|+\-*\/\^%]+/,brackets:[{open:"(",close:")",token:"delimiter.parenthesis"},{open:"{",close:"}",token:"delimiter.curly"},{open:"[",close:"]",token:"delimiter.square"}],escapes:/\\(?:[abfnrtv\\"'\[\]\{\};\$]|x[0-9A-Fa-f]{1,4}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})/,variables:/(?:\$+(?:(?:\:\:?)?[a-zA-Z_]\w*)+)/,tokenizer:{root:[[/[a-zA-Z_]\w*/,{cases:{"@specialFunctions":{token:"keyword.flow",next:"@specialFunc"},"@mainFunctions":"keyword","@builtinFunctions":"variable","@default":"operator.scss"}}],[/\s+\-+(?!\d|\.)\w*|{\*}/,"metatag"],{include:"@whitespace"},[/[{}()\[\]]/,"@brackets"],[/@symbols/,"operator"],[/\$+(?:\:\:)?\{/,{token:"identifier",next:"@nestedVariable"}],[/@variables/,"type.identifier"],[/\.(?!\d|\.)[\w\-]*/,"operator.sql"],[/\d+(\.\d+)?/,"number"],[/\d+/,"number"],[/;/,"delimiter"],[/"/,{token:"string.quote",bracket:"@open",next:"@dstring"}],[/'/,{token:"string.quote",bracket:"@open",next:"@sstring"}]],dstring:[[/\[/,{token:"@brackets",next:"@nestedCall"}],[/\$+(?:\:\:)?\{/,{token:"identifier",next:"@nestedVariable"}],[/@variables/,"type.identifier"],[/[^\\$\[\]"]+/,"string"],[/@escapes/,"string.escape"],[/"/,{token:"string.quote",bracket:"@close",next:"@pop"}]],sstring:[[/\[/,{token:"@brackets",next:"@nestedCall"}],[/\$+(?:\:\:)?\{/,{token:"identifier",next:"@nestedVariable"}],[/@variables/,"type.identifier"],[/[^\\$\[\]']+/,"string"],[/@escapes/,"string.escape"],[/'/,{token:"string.quote",bracket:"@close",next:"@pop"}]],whitespace:[[/[ \t\r\n]+/,"white"],[/#.*\\$/,{token:"comment",next:"@newlineComment"}],[/#.*(?!\\)$/,"comment"]],newlineComment:[[/.*\\$/,"comment"],[/.*(?!\\)$/,{token:"comment",next:"@pop"}]],nestedVariable:[[/[^\{\}\$]+/,"type.identifier"],[/\}/,{token:"identifier",next:"@pop"}]],nestedCall:[[/\[/,{token:"@brackets",next:"@nestedCall"}],[/\]/,{token:"@brackets",next:"@pop"}],{include:"root"}],specialFunc:[[/"/,{token:"string",next:"@dstring"}],[/'/,{token:"string",next:"@sstring"}],[/\S+/,{token:"type",next:"@pop"}]]}}}}]);
|
||||
//# sourceMappingURL=1649.entry.js.map
|
||||
2
src/Web/assets/public/yaml/1941.entry.js
Normal file
2
src/Web/assets/public/yaml/1941.entry.js
Normal file
File diff suppressed because one or more lines are too long
2
src/Web/assets/public/yaml/2014.entry.js
Normal file
2
src/Web/assets/public/yaml/2014.entry.js
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";(self.webpackChunkdemo=self.webpackChunkdemo||[]).push([[2014],{1218:(e,n,t)=>{t.r(n),t.d(n,{conf:()=>r,language:()=>o});var r={comments:{lineComment:"#"},brackets:[["{","}"],["[","]"],["(",")"]],autoClosingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}],surroundingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}],folding:{offSide:!0}},o={tokenPostfix:".yaml",brackets:[{token:"delimiter.bracket",open:"{",close:"}"},{token:"delimiter.square",open:"[",close:"]"}],keywords:["true","True","TRUE","false","False","FALSE","null","Null","Null","~"],numberInteger:/(?:0|[+-]?[0-9]+)/,numberFloat:/(?:0|[+-]?[0-9]+)(?:\.[0-9]+)?(?:e[-+][1-9][0-9]*)?/,numberOctal:/0o[0-7]+/,numberHex:/0x[0-9a-fA-F]+/,numberInfinity:/[+-]?\.(?:inf|Inf|INF)/,numberNaN:/\.(?:nan|Nan|NAN)/,numberDate:/\d{4}-\d\d-\d\d([Tt ]\d\d:\d\d:\d\d(\.\d+)?(( ?[+-]\d\d?(:\d\d)?)|Z)?)?/,escapes:/\\(?:[btnfr\\"']|[0-7][0-7]?|[0-3][0-7]{2})/,tokenizer:{root:[{include:"@whitespace"},{include:"@comment"},[/%[^ ]+.*$/,"meta.directive"],[/---/,"operators.directivesEnd"],[/\.{3}/,"operators.documentEnd"],[/[-?:](?= )/,"operators"],{include:"@anchor"},{include:"@tagHandle"},{include:"@flowCollections"},{include:"@blockStyle"},[/@numberInteger(?![ \t]*\S+)/,"number"],[/@numberFloat(?![ \t]*\S+)/,"number.float"],[/@numberOctal(?![ \t]*\S+)/,"number.octal"],[/@numberHex(?![ \t]*\S+)/,"number.hex"],[/@numberInfinity(?![ \t]*\S+)/,"number.infinity"],[/@numberNaN(?![ \t]*\S+)/,"number.nan"],[/@numberDate(?![ \t]*\S+)/,"number.date"],[/(".*?"|'.*?'|.*?)([ \t]*)(:)( |$)/,["type","white","operators","white"]],{include:"@flowScalars"},[/[^#]+/,{cases:{"@keywords":"keyword","@default":"string"}}]],object:[{include:"@whitespace"},{include:"@comment"},[/\}/,"@brackets","@pop"],[/,/,"delimiter.comma"],[/:(?= )/,"operators"],[/(?:".*?"|'.*?'|[^,\{\[]+?)(?=: )/,"type"],{include:"@flowCollections"},{include:"@flowScalars"},{include:"@tagHandle"},{include:"@anchor"},{include:"@flowNumber"},[/[^\},]+/,{cases:{"@keywords":"keyword","@default":"string"}}]],array:[{include:"@whitespace"},{include:"@comment"},[/\]/,"@brackets","@pop"],[/,/,"delimiter.comma"],{include:"@flowCollections"},{include:"@flowScalars"},{include:"@tagHandle"},{include:"@anchor"},{include:"@flowNumber"},[/[^\],]+/,{cases:{"@keywords":"keyword","@default":"string"}}]],multiString:[[/^( +).+$/,"string","@multiStringContinued.$1"]],multiStringContinued:[[/^( *).+$/,{cases:{"$1==$S2":"string","@default":{token:"@rematch",next:"@popall"}}}]],whitespace:[[/[ \t\r\n]+/,"white"]],comment:[[/#.*$/,"comment"]],flowCollections:[[/\[/,"@brackets","@array"],[/\{/,"@brackets","@object"]],flowScalars:[[/"([^"\\]|\\.)*$/,"string.invalid"],[/'([^'\\]|\\.)*$/,"string.invalid"],[/'[^']*'/,"string"],[/"/,"string","@doubleQuotedString"]],doubleQuotedString:[[/[^\\"]+/,"string"],[/@escapes/,"string.escape"],[/\\./,"string.escape.invalid"],[/"/,"string","@pop"]],blockStyle:[[/[>|][0-9]*[+-]?$/,"operators","@multiString"]],flowNumber:[[/@numberInteger(?=[ \t]*[,\]\}])/,"number"],[/@numberFloat(?=[ \t]*[,\]\}])/,"number.float"],[/@numberOctal(?=[ \t]*[,\]\}])/,"number.octal"],[/@numberHex(?=[ \t]*[,\]\}])/,"number.hex"],[/@numberInfinity(?=[ \t]*[,\]\}])/,"number.infinity"],[/@numberNaN(?=[ \t]*[,\]\}])/,"number.nan"],[/@numberDate(?=[ \t]*[,\]\}])/,"number.date"]],tagHandle:[[/\![^ ]*/,"tag"]],anchor:[[/[&*][^ ]+/,"namespace"]]}}}}]);
|
||||
//# sourceMappingURL=2014.entry.js.map
|
||||
2
src/Web/assets/public/yaml/2271.entry.js
Normal file
2
src/Web/assets/public/yaml/2271.entry.js
Normal file
File diff suppressed because one or more lines are too long
2
src/Web/assets/public/yaml/2287.entry.js
Normal file
2
src/Web/assets/public/yaml/2287.entry.js
Normal file
File diff suppressed because one or more lines are too long
2
src/Web/assets/public/yaml/2388.entry.js
Normal file
2
src/Web/assets/public/yaml/2388.entry.js
Normal file
File diff suppressed because one or more lines are too long
2
src/Web/assets/public/yaml/2501.entry.js
Normal file
2
src/Web/assets/public/yaml/2501.entry.js
Normal file
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user