Compare commits

...

112 Commits

Author SHA1 Message Date
FoxxMD
dac6541e28 Merge branch 'edge' 2021-11-01 16:12:43 -04:00
FoxxMD
2504a34a34 refactor(docker): Remove old steps from node-canvas dep
Not using node-canvas anymore so dockerfile can be simplified #48
2021-11-01 15:55:12 -04:00
FoxxMD
e19639ad0d fix(ui): Forgot to add tailwin css to git
Oops!
2021-11-01 15:06:50 -04:00
FoxxMD
b8084e02b5 Merge branch 'colorImprovements' into edge 2021-11-01 14:58:01 -04:00
FoxxMD
97906281e6 Merge branch 'edge' 2021-11-01 14:55:10 -04:00
FoxxMD
2cea119657 feat(ui): Add tooltip descriptions for depleted/limit reset stats
Closes #36
2021-11-01 14:52:35 -04:00
FoxxMD
6f16d289dd feat(ui): Add lang attribute for html
Closes #35
2021-11-01 14:22:20 -04:00
FoxxMD
a96575c6b3 refactor(ui): Use local tailwind css asset
Closes #34
2021-11-01 14:19:47 -04:00
FoxxMD
0a82e83352 refactor(ui)!: Implement colorbind color changes for rest of UI and remove dark/light mode
* Apply changes made in ac409dce to the rest of the application
* Remove dark/light mode -- now always dark mode (easier to maintain this way)
* Remove dependency on tailwind dark css
2021-11-01 14:14:49 -04:00
FoxxMD
d5e1cdec61 fix(criteria): Improve criteria filtering for removed/deleted activities and logging
* Use different logging messages when criteria is not available due to mod permissions (property not available to non-mods)
* Change logging level for missing/unavailable criteria to reduce logging noise. On unavailable use debug, on missing use warn
* Improve activity removed/deleted detection based on whether activity is moddable by current user
2021-11-01 13:25:54 -04:00
FoxxMD
ef40c25b09 feat(attribution): Add additional subreddit and activity filtering functionality to criteria
* Refactor subreddit filtering with include/exclude to use subreddit state
* Add submissionState and commentState filters
2021-11-01 11:26:55 -04:00
FoxxMD
6370a2976a Merge branch 'edge' into colorImprovements 2021-11-01 10:23:40 -04:00
FoxxMD
d8180299ea fix(author): Fix missing true return statement for author flair check 2021-11-01 10:23:22 -04:00
FoxxMD
ac409dce3d POC color improvements for color blindness
Needs another pass and consolidate/clean up for pages other than status
2021-10-20 21:12:31 -04:00
FoxxMD
56c007c20d feat(author): Implement author profile description regex/string testing
May test "description" in authorIs as a regular express of string literal -- or as an array of the aforementioned values
2021-10-20 19:52:46 -04:00
FoxxMD
487f13f704 Merge branch 'edge' 2021-10-12 11:56:51 -04:00
FoxxMD
00b9d87cdc Remove unused types package 2021-10-12 11:56:29 -04:00
FoxxMD
2c797e0b9b docs(image): Add documentation for image comparison
#26
2021-10-12 11:10:14 -04:00
FoxxMD
4a2b27bfbf fix(recent): Revert debug change for concurrency on activity comparisons 2021-10-12 11:08:53 -04:00
FoxxMD
463a4dc0eb feat(image): Implement perceptual hashing image comparison
* Generate perceptual hashes using blockhash-js of images that can be cache/stored
* Take advantage of reddit thumbnail code ImageData to hash lower-res to begin with (but represent full url)
* Refactor imageDetection config so hash and pixel approaches have different configs
* Cache phash results to reduce reddit traffic and speed up performance

Addresses cpu/memory issues with pixel comparison. Allow pixel for finer comparisons if needed using tiered thresholds. Closes #26
2021-10-11 15:28:48 -04:00
FoxxMD
4b3bea661d Merge branch 'imageComparisonOptimization' into edge
# Conflicts:
#	src/Rule/RecentActivityRule.ts
2021-10-08 13:50:49 -04:00
FoxxMD
976f310f51 refactor(image): Remove resemblejs dependency
* Refactor image comparison to use pixelmatch only so resemblejs can be removed (too much memory usage)
* Heavier usage of sharp to get images into same dimensions prior to pixelmatch
* Refactor image conversion into ImageData to clean up utils/recent activity rule
2021-10-08 13:49:46 -04:00
FoxxMD
4d8d3dc266 fix(docker): Add libvips dependency 2021-10-08 13:47:35 -04:00
FoxxMD
ce9e678c4c fix(image): clone sharp instance so converting between outputs doesn't cause issues 2021-10-07 18:06:05 -04:00
FoxxMD
8cf30b6b7d refactor: Introduce staggered startup for bots/polling to decrease load on host/reddit and improve image comparison performance
* Implement staggered startup for bots (reddit accounts, top-level)
* Implement staggered startup for managers (subreddits) and subreddit polling
* Introduce random -1/+1 second to polling interval for every stream to ensure none are synced so there is no instantaneous spike in cpu/traffic/memory on host/reddit
* Add user-configurable stagger interval for shared mod polling
* Implement second image comparison approach with pixelmatch for reduced memory usage when image dimensions are exactly the same
* Use sharp to resize images to 400 width max when using resemblejs to reduce memory usage
2021-10-07 17:13:27 -04:00
FoxxMD
2b6d08f8a5 Dummy commit
To get github actions to run
2021-10-06 18:03:26 -04:00
FoxxMD
f8fc63991f feat(image): Leverage reddit image previews for download/comparison
* Refactor image acquisition/parsing to use on-demand fetching and track different resolutions
* Try to use smaller previews (under 1000px), when possible, for comparing images and downloading
* Do image comparisons in parallel
2021-10-06 17:33:56 -04:00
FoxxMD
d96a1f677c refactor(image): Profile image comparison analysis time to debug statement 2021-10-06 14:14:36 -04:00
FoxxMD
b14689791c refactor(image): Document resemble compare options for performance
* Disable color comparison is slower than nothing
* Restrict comparison to a max of 800x800 pixels is slower than nothing
2021-10-06 14:13:50 -04:00
FoxxMD
b70c877e44 refactor(polling): Improve processed list configuration/usage
* Change 'after' type to string duration for friendlier configuration
* Decrease list size trigger === limit instead of 2x (not necessary to have a list that big for polling new)
* Increase initial shared mod polling to max limit (100)
2021-10-06 12:54:23 -04:00
FoxxMD
041655376a feat(filter): Implement age test for comment/submission
Closes #27
2021-10-06 12:54:23 -04:00
FoxxMD
e1eab7696b feat(polling): Implement memory bloat mitigation for long-running SnooStorm polling
The list of processed activities SnooStorm uses to ensure only new activities are emitted when polling is never cleared. MayorMonty/Snoostorm#35

To mitigate the memory bloat this creates when RCB runs for a long time on high-volume subreddits implement user-configurable (with defaults) behavior for clearing the processed activity list. Default values ensure clearing the list does not interfere with checking for new activities.
2021-10-05 14:45:10 -04:00
FoxxMD
65d1d36d53 feat(cache): Implement batching for subreddit info
When using full criteria for subreddit state we can save a ton of api calls by get info for all uncached subreddits at the same time rather than individually
2021-09-29 16:20:02 -04:00
FoxxMD
120d776fc2 feat(ui): Display subreddit and subreddit crit statistics in cache call breakdown popup 2021-09-29 10:47:33 -04:00
FoxxMD
425e16295b refactor(cache): Better cache usage for some known state conditions
* Don't store subreddit state cache results for now since nothing computationally expensive or requires api requests
* Return early on item state check if there is nothing to check so we don't store an empty result in cache
2021-09-29 10:47:05 -04:00
FoxxMD
dd7e9d72cc fix(subreddit): Fix subreddit state testing edge case and undefined criteria
* allow both over_18 and over18 criteria in case user accidentally used name from sub/comm state
* correctly determine if subreddit property exists when testing
* fix cache hit subreddit name logging
2021-09-29 10:21:16 -04:00
FoxxMD
55535ddd62 fix(regex): Fix regex generation for simple subreddit strings
* Trim value before parsing
* If not a valid regex string then when generating regex from simple string add qualifiers for beginning/end of string so any matches must be exact
2021-09-29 09:22:40 -04:00
FoxxMD
631e21452c Merge branch 'edge' 2021-09-28 16:36:13 -04:00
FoxxMD
be6fa4dd50 fix(cache): Fix accidental re-use of maps 2021-09-24 16:13:58 -04:00
FoxxMD
0d7a82836f refactor(cache): Move bot usage stats into cache
* Moving into cache means stats will persist after restart (yay!)
* Refactored stats structure to be simpler
2021-09-24 15:24:19 -04:00
FoxxMD
d9a59b6824 feat(recent): Print log statement when image processing is causing rule to take a long time 2021-09-23 13:23:58 -04:00
FoxxMD
ddbf8c3189 fix(recent): Actually use filtered activities when using submission as reference 2021-09-23 12:58:43 -04:00
FoxxMD
8393c471b2 fix(image): Dynamically import resemblejs for better compatibility on systems not supporting node-canvas
* By dynamically importing the module any user not using image comparison will not be affected by a lack of node-canvas dependency
* try-catch on import and provide a helpful error message about node-canvas dep
2021-09-23 10:38:34 -04:00
FoxxMD
fe66a2e8f7 fix(docker): Update build to build node-canvas from source 2021-09-23 10:08:26 -04:00
FoxxMD
4b0284102d fix: Improve image comparison threshold and results for typescript 2021-09-22 22:15:00 -04:00
FoxxMD
95529f14a8 feat(recent): Implement pixel-level image comparison when using a reference (image) submission 2021-09-22 16:52:56 -04:00
FoxxMD
26af2c4e4d fix(recent): don't include submission being checked when filtering by reference 2021-09-22 10:29:06 -04:00
FoxxMD
044c293f34 fix(attribution): Update aggregateOn defaults to align with expected behavior
Majority of mods that have used this rule assume it does not aggregate on reddit domains by default (only external links), which is reasonable.
So update the default to follow this assumption.
2021-09-22 10:11:25 -04:00
FoxxMD
a082c9e593 doc(attribution): Remove unused useSubmissionAsReference property 2021-09-22 09:36:44 -04:00
FoxxMD
4f3685a1f5 Merge branch 'edge' 2021-09-21 15:18:38 -04:00
FoxxMD
e242c36c09 fix(tooling): Fix tag pattern for git cliff 2021-09-21 15:18:26 -04:00
FoxxMD
d2d945db2c Merge branch 'edge' 2021-09-21 15:08:28 -04:00
FoxxMD
c5018183e0 fix(attribution): Improve parsing of domain type to fix bug with galleries
* Add `redditMedia` as distinct domain type from `self` for more granular aggregation
* Use `redditMedia` to fix bug where video and galleries were being counted as `media`
2021-09-20 16:34:29 -04:00
FoxxMD
c5358f196d feat(author): Handle shadowbanned users
* Allow checking if user is shadowbanned via authorIs (AuthorCriteria)
* try-catch on history get or author criteria to try to detect shadowbanned user for a more descriptive error
2021-09-20 13:49:35 -04:00
FoxxMD
1d9f8245f9 feat(tooling): scope-based sorting with BC note for git cliff generation 2021-09-20 11:51:25 -04:00
FoxxMD
20b37f3a40 Initial git cliff config 2021-09-20 11:03:37 -04:00
FoxxMD
910f7f79ef Merge branch 'edge' 2021-09-20 10:54:32 -04:00
FoxxMD
641892cd3e fix: Fix activity push to manager
Should only be using firehose
2021-09-20 09:37:32 -04:00
FoxxMD
1dfb9779e7 feat(attribution): Allow specifying aggregateOn filter when using domain blacklist
May not make sense all the time but a properly configured config could take advantage of this
2021-09-17 15:14:36 -04:00
FoxxMD
40111c54a2 feat(message): Add a markdown formatted 'ruleSummary' property to content template data 2021-09-17 14:38:39 -04:00
FoxxMD
b4745e3b45 feat(message): Implement arbitrary message recipient to enable modmail
* Can send message to any entity (user/subreddit) using 'to' property, or leave unspecified to send to author of activity
* Parse entity type (user or subreddit) from to value and ensure its in a valid format we can understand with regex
2021-09-17 13:36:28 -04:00
FoxxMD
838da497ce feat: Add more detail to actioned events and logging for action results 2021-09-17 12:46:00 -04:00
FoxxMD
01755eada5 feat: De-dup activities from different polling sources
Previously CM would process the same activity multiple times if it was ingested from two different polling sources (modqueue and unmoderated/newSub). Introduce queue control flow to ensure activity is de-duped or refreshed before processing if this scenario occurs.

* Use a queue (firehose) to bottleneck all activities from different sources before pushing to worker queues
* Keep track of items currently ingested but not completely processed and use firehose to de-dupe queued items (flag to refresh) or re-queue if currently processing (and flag to refresh)
2021-09-17 11:50:49 -04:00
FoxxMD
1ff59ad6e8 feat: Add report count comparison to comment/submission state 2021-09-17 10:21:46 -04:00
FoxxMD
d8fd8e6140 feat: Add score (karma) comparison to comment/submission state 2021-09-17 10:13:21 -04:00
FoxxMD
255ffdb417 fix(recent): Deduplicate present subreddits 2021-09-16 16:48:00 -04:00
FoxxMD
f0199366a0 feat(history)!: Implement subreddit state and subreddit name parsing
* Implement total threshold to compare filtered activities against window activities

BREAKING CHANGE: include/exclude now filters POST activity window and all comparisons are done on those filtered activities against window activities
2021-09-16 15:36:06 -04:00
FoxxMD
20c724cab5 fix: Fix bug where non-media domains were not counted for attribution rule 2021-09-16 15:33:59 -04:00
FoxxMD
a670975f14 feat(repeat activity): Implement subreddit state and regex parsing 2021-09-16 14:12:16 -04:00
FoxxMD
ee13feaf57 feat(recent activity): Implement subreddit state and regex parsing for recent activity
* SubredditState can be used to check some subreddit attributes alongside, or in place of, a subreddit name
* Regex parsing for subreddit name string in recent activity
2021-09-16 13:34:19 -04:00
FoxxMD
23a24b4448 feat(regex)!: Simplify regex parsing from config
Reduce regex complexity in config by parsing a normal regex straight from config string value (including flags)

BREAKING CHANGE: regex must now be enclosed in forward slashes, flags must be on regex value, and regexFlags property has been removed
2021-09-16 10:53:33 -04:00
FoxxMD
a11b667d5e Merge branch 'edge' 2021-09-13 16:16:55 -04:00
FoxxMD
269b1620b9 fix(regex): fix when to add match sample to summary
Only include summary if there were non-zero matches
2021-09-07 15:04:30 -04:00
FoxxMD
6dee734440 fix(recent activity): fix subreddit summary in result
* Only include if any subreddits with activity found
* Use correct subreddit array
2021-09-07 15:03:56 -04:00
FoxxMD
3aea422eff fix(cache): check manager has cache (valid config) before trying to get actioned events 2021-09-07 14:56:03 -04:00
FoxxMD
e707e5a9a8 fix(manager): revert commented notification line from debugging 2021-09-07 14:07:00 -04:00
FoxxMD
2a24eea3a5 fix(rule): fix regex rule matching default behavior and improve log message
* Default to global flag if none specified so that all matches per activity are found
* Improve result message section ordering and display a sample of up to 4 found matches
2021-09-07 14:06:30 -04:00
FoxxMD
8ad8297c0e feat(rule): improve recent activity result (log message) by listing on subreddits with found activity 2021-09-07 14:04:26 -04:00
FoxxMD
0b94a14ac1 feat(ui): improve actioned events interactions
* Refactor api to get all accessible events, sorted by time, when subreddit is not specified
* Add subreddit name to actioned event data to differentiate between events
* Show actioned events link in "All" subreddit view
* Remove user-select css style (left over from config template)
* Format timestamp to be more human friendly
* Remove success/triggered text and just use checkmarks (same as log)
2021-09-07 13:33:35 -04:00
FoxxMD
a04e0d2a9b fix(cache): Set actioned events not to expire in cache 2021-09-07 13:26:30 -04:00
FoxxMD
3a1348c370 feat(ui): move actioned events link to "Actions Run" statistic
More intuitive location
2021-09-07 12:59:51 -04:00
FoxxMD
507818037f feat(cache): refactor actioned events into cache for persistence and make number stored configurable
* refactor actioned events into bot-configured cache so they can be persisted between restarts
* add config params for actionedEventsMax and actionedEventsDefault to allow defining defaults at operator/bot/subreddit level
2021-09-07 12:55:19 -04:00
FoxxMD
2c1f6daf4f Implement load config from URL for editor 2021-09-01 10:15:46 -04:00
FoxxMD
fef79472fe re-add missing heartbeat and improve bot exception handling
* Missed heartbeat during client-server refactor somehow...oops. Re-add heartbeat behavior
* Refactor nanny functionality to use date check rather than loop -- behaves same as heartbeat now
* use http retry handling in nanny to handle reddit outages
* try-catch on nanny and heartbeat for better exception handling at bot-level
* await health loop so we can catch bot-level exceptions in app to prevent entire app from crashing
2021-08-31 11:02:03 -04:00
FoxxMD
885e3fa765 Merge branch 'edge' 2021-08-26 16:04:01 -04:00
FoxxMD
0b2c0e6451 Add karma threshold for recent activity rule 2021-08-26 12:04:17 -04:00
FoxxMD
15806b5f1f Add regex documentation 2021-08-26 11:26:59 -04:00
FoxxMD
bf42cdf356 Fix author criteria hash 2021-08-25 15:26:41 -04:00
FoxxMD
e21acd86db Fix maxAge optional 2021-08-25 12:57:02 -04:00
FoxxMD
5dca1c9602 Refactor caching ttl data type to be more intuitive and flexible
* (BC) instead of 0 being 'disabled' must now be 'false'
* (BC) 0 now means cache indefinitely
* 'true' is an alias for '0'
2021-08-25 12:12:41 -04:00
FoxxMD
5274584d92 Improve readability/functionality for caching functions
* Make keys for readable by using plaintext for unique values and only hashing objects
* Improve author criteria caching by excluding item identifier from hash since result should be same at subreddit-level
2021-08-25 11:49:28 -04:00
FoxxMD
1d386c53a5 Make usernote author usage blind to author type
If it has to handle cached activities this will help
2021-08-25 11:30:19 -04:00
FoxxMD
d6e351b195 Fix missing TTL settings 2021-08-25 11:21:55 -04:00
FoxxMD
ea32dc0b62 Fix shared cache namespacing and key count
* Add special SHARED namespace for subreddits using default cache -- remove ns when cache is dedicated
* Check for redis cache type and include prefix pattern when getting key count
2021-08-24 15:59:15 -04:00
FoxxMD
dca57bb19e Fix key count display
Correctly count shared and non-shared
2021-08-24 15:58:04 -04:00
FoxxMD
43919f7f9c Fix connected users since cache can now be persistent 2021-08-24 15:57:48 -04:00
FoxxMD
a176b51148 Fix storing cache user result 2021-08-24 15:08:21 -04:00
FoxxMD
75ac5297df Refactor caching prefix building to provide unique defaults 2021-08-24 14:36:13 -04:00
FoxxMD
0ef2b99bd6 Refactor web to use more caching
* Implement invite code/data to use caching
* refactor operator config so session and invite use same cache instance
2021-08-24 14:10:12 -04:00
FoxxMD
9596a476b5 Improve caching for redis
* Re-add operator-level caching config so a global default cache config can be defined
* Expand provider options with index property so additional, redis specific, params can be provided
* namespace (prefix) bot and subreddit-level (When not shared) redis connections
* refactor subreddit and author name usage since it differs when objects are deserialized
* as type guard for submission based on instance type OR object shape hint since deserialized activities are plain objects
2021-08-24 13:10:13 -04:00
FoxxMD
92f52cada5 Better implementation of user cache result
* When check is triggered also store rule results in cache -- makes actioned events more complete when actions run from cached result
* Add config option to toggle run actions on/off from cached check (Defaults to on)
2021-08-24 10:30:37 -04:00
FoxxMD
a482e852c5 Add empty state to actioned events view 2021-08-24 10:24:38 -04:00
FoxxMD
e9055e5205 Better display for actioned events
* Use checks triggered display instead of own events actions stats (since it already exists and is the same)
* More visual separation between action events using shadowed boxes
* Move timestamp and title into a header (more visual distinction) and use short hyperlink
* Move rule summary up into check field
2021-08-23 16:58:06 -04:00
FoxxMD
df2c40d9c1 Use local time format for actioned events timestamp 2021-08-23 16:56:25 -04:00
FoxxMD
fc4eeb47fa Replace newlines with spaces to make comment peeks more compact 2021-08-23 16:49:40 -04:00
FoxxMD
9fb3eaa611 Implement actioned event history
* Refactor action return value to return results/success/error
* Store action event data after triggered from manager
* Display last 25 actioned events in ui
2021-08-23 14:33:40 -04:00
FoxxMD
23394ab5c2 Use clearer wording for scope error 2021-08-23 12:16:32 -04:00
FoxxMD
5417b26417 Read operator config as json5/yaml 2021-08-23 12:10:39 -04:00
FoxxMD
b6d638d6c5 Implement easier way to determine if reddit api error is due to insufficient oauth scope 2021-08-23 12:00:49 -04:00
FoxxMD
af1dd09e2d Remove batch usernote actions for now 2021-08-23 11:51:01 -04:00
FoxxMD
c42e56c68f Fix window duration when no activities are returned from history rule subreddit filtering 2021-08-20 16:36:52 -04:00
FoxxMD
561a007850 Fix instance qs name on check url 2021-08-20 16:36:07 -04:00
84 changed files with 7904 additions and 1803 deletions

View File

@@ -1,8 +1,10 @@
FROM node:16-alpine3.12
FROM node:16-alpine3.14
ENV TZ=Etc/GMT
RUN apk update
# vips required to run sharp library for image comparison
RUN echo "http://dl-4.alpinelinux.org/alpine/v3.14/community" >> /etc/apk/repositories \
&& apk --update add vips
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone

View File

@@ -26,6 +26,7 @@ Some feature highlights:
* Author criteria (name, css flair/text, age, karma, moderator status, and [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes))
* Activity state (removed, locked, distinguished, etc.)
* Rules and Actions support named references (write once, reference anywhere)
* [**Image Comparisons**](/docs/imageComparison.md) via fingerprinting and/or pixel differences
* Global/subreddit-level **API caching**
* Support for [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) as criteria or Actions (writing notes)
* Docker container support

67
cliff.toml Normal file
View File

@@ -0,0 +1,67 @@
# configuration file for git-cliff (0.1.0)
[changelog]
# changelog header
header = """
# Changelog
All notable changes to this project will be documented in this file.\n
"""
# template for the changelog body
# https://tera.netlify.app/docs/#introduction
body = """
{% if version %}\
## [{{ version | replace(from="v", to="") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
{% else %}\
## [unreleased]
{% endif %}\
{% for group, commits in commits | group_by(attribute="group") %}
### {{ group | upper_first }}
{% for commit in commits
| filter(attribute="scope")
| sort(attribute="scope") %}
- *({{commit.scope}})* {{ commit.message | upper_first }}
{%- if commit.breaking %}
{% raw %} {% endraw %}- **BREAKING**: {{commit.breaking_description}}
{%- endif -%}
{%- endfor -%}
{%- for commit in commits %}
{%- if commit.scope -%}
{% else -%}
- *(No Category)* {{ commit.message | upper_first }}
{% if commit.breaking -%}
{% raw %} {% endraw %}- **BREAKING**: {{commit.breaking_description}}
{% endif -%}
{% endif -%}
{% endfor -%}
{% endfor %}
"""
# remove the leading and trailing whitespaces from the template
trim = true
# changelog footer
footer = """
<!-- generated by git-cliff -->
"""
[git]
# allow only conventional commits
# https://www.conventionalcommits.org
conventional_commits = true
# regex for parsing and grouping commits
commit_parsers = [
{ message = "^feat", group = "Features"},
{ message = "^fix", group = "Bug Fixes"},
{ message = "^doc", group = "Documentation"},
{ message = "^perf", group = "Performance"},
{ message = "^refactor", group = "Refactor"},
{ message = "^style", group = "Styling"},
{ message = "^test", group = "Testing"},
{ message = "^chore\\(release\\): prepare for", skip = true},
{ message = "^chore", group = "Miscellaneous Tasks"},
{ body = ".*security", group = "Security"},
]
# filter out the commits that are not matched by commit parsers
filter_commits = false
# glob pattern for matching git tags
tag_pattern = "[0-9]*"
# regex for skipping tags
skip_tags = "v0.1.0-beta.1"

View File

@@ -18,6 +18,7 @@
* [Activities `window`](#activities-window)
* [Comparisons](#thresholds-and-comparisons)
* [Activity Templating](/docs/actionTemplating.md)
* [Image Comparisons](#image-comparisons)
* [Best Practices](#best-practices)
* [Named Rules](#named-rules)
* [Rule Order](#rule-order)
@@ -100,7 +101,7 @@ Find detailed descriptions of all the Rules, with examples, below:
* [Repeat Activity](/docs/examples/repeatActivity)
* [History](/docs/examples/history)
* [Author](/docs/examples/author)
* Regex
* [Regex](/docs/examples/regex)
### Rule Set
@@ -268,6 +269,12 @@ The duration value compares a time range from **now** to `duration value` time i
Refer to [duration values in activity window documentation](/docs/activitiesWindow.md#duration-values) as well as the individual rule/criteria schema to see what this duration is comparing against.
### Image Comparisons
ContextMod implements two methods for comparing **image content**, perceptual hashing and pixel-to-pixel comparisons. Comparisons can be used to filter activities in some activities.
See [image comparison documentation](/docs/imageComparison.md) for a full reference.
## Best Practices
### Named Rules

View File

@@ -16,6 +16,7 @@ This directory contains example of valid, ready-to-go configurations for Context
* [Repeat Activity](/docs/examples/repeatActivity)
* [History](/docs/examples/history)
* [Author](/docs/examples/author)
* [Regex](/docs/examples/regex)
* [Toolbox User Notes](/docs/examples/userNotes)
* [Advanced Concepts](/docs/examples/advancedConcepts)
* [Rule Sets](/docs/examples/advancedConcepts/ruleSets.json5)

View File

@@ -0,0 +1,20 @@
The **Regex** rule matches on text content from a comment or submission in the same way automod uses regex. The rule, however, provides additional functionality automod does not:
* Can set the **number** of matches that trigger the rule (`matchThreshold`)
Which can then be used in conjunction with a [`window`](https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md) to match against activities from the history of the Author of the Activity being checked (including the Activity being checked):
* Can set the **number of Activities** that meet the `matchThreshold` to trigger the rule (`activityMatchThreshold`)
* Can set the **number of total matches** across all Activities to trigger the rule (`totalMatchThreshold`)
* Can set the **type of Activities** to check (`lookAt`)
* When an Activity is a Submission can **specify which parts of the Submission to match against** IE title, body, and/or url (`testOn`)
### Examples
* [Trigger if regex matches against the current activity](/docs/examples/regex/matchAnyCurrentActivity.json5)
* [Trigger if regex matches 5 times against the current activity](/docs/examples/regex/matchThresholdCurrentActivity.json5)
* [Trigger if regex matches against any part of a Submission](/docs/examples/regex/matchSubmissionParts.json5)
* [Trigger if regex matches any of Author's last 10 activities](/docs/examples/regex/matchHistoryActivity.json5)
* [Trigger if regex matches at least 3 of Author's last 10 activities](/docs/examples/regex/matchActivityThresholdHistory.json5)
* [Trigger if there are 5 regex matches in the Author's last 10 activities](/docs/examples/regex/matchTotalHistoryActivity.json5)
* [Trigger if there are 5 regex matches in the Author's last 10 comments](/docs/examples/regex/matchSubsetHistoryActivity.json5)

View File

@@ -0,0 +1,20 @@
// goes inside
// "rules": []
{
"name": "swear",
"kind": "regex",
"criteria": [
// triggers if more than 3 activities in the last 10 match the regex
{
"regex": "/fuck|shit|damn/",
// this differs from "totalMatchThreshold"
//
// activityMatchThreshold => # of activities from window must match regex
// totalMatchThreshold => # of matches across all activities from window must match regex
"activityMatchThreshold": "> 3",
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
"window": 10,
},
]
}

View File

@@ -0,0 +1,14 @@
// goes inside
// "rules": []
{
"name": "swear",
"kind": "regex",
"criteria": [
// triggers if current activity has more than 0 matches
{
"regex": "/fuck|shit|damn/",
// if "matchThreshold" is not specified it defaults to this -- default behavior is to trigger if there are any matches
// "matchThreshold": "> 0"
},
]
}

View File

@@ -0,0 +1,15 @@
// goes inside
// "rules": []
{
"name": "swear",
"kind": "regex",
"criteria": [
// triggers if any activity in the last 10 (including current activity) match the regex
{
"regex": "/fuck|shit|damn/",
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
"window": 10,
},
]
}

View File

@@ -0,0 +1,19 @@
// goes inside
// "rules": []
{
"name": "swear",
"kind": "regex",
"criteria": [
{
// triggers if the current activity has more than 0 matches
// if the activity is a submission then matches against title, body, and url
// if "testOn" is not provided then `title, body` are the defaults
"regex": "/fuck|shit|damn/",
"testOn": [
"title",
"body",
"url"
]
},
]
}

View File

@@ -0,0 +1,23 @@
// goes inside
// "rules": []
{
"name": "swear",
"kind": "regex",
"criteria": [
// triggers if there are more than 5 regex matches in the last 10 activities (comments only)
{
"regex": "/fuck|shit|damn/",
// this differs from "activityMatchThreshold"
//
// activityMatchThreshold => # of activities from window must match regex
// totalMatchThreshold => # of matches across all activities from window must match regex
"totalMatchThreshold": "> 5",
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
"window": 10,
// determines which activities from window to consider
//defaults to "all" (submissions and comments)
"lookAt": "comments",
},
]
}

View File

@@ -0,0 +1,13 @@
// goes inside
// "rules": []
{
"name": "swear",
"kind": "regex",
"criteria": [
{
"regex": "/fuck|shit|damn/",
// triggers if current activity has greater than 5 matches
"matchThreshold": "> 5"
},
]
}

View File

@@ -0,0 +1,21 @@
// goes inside
// "rules": []
{
"name": "swear",
"kind": "regex",
"criteria": [
// triggers if there are more than 5 regex matches in the last 10 activities (comments or submission)
{
// triggers if there are more than 5 *total matches* across the last 10 activities
"regex": "/fuck|shit|damn/",
// this differs from "activityMatchThreshold"
//
// activityMatchThreshold => # of activities from window must match regex
// totalMatchThreshold => # of matches across all activities from window must match regex
"totalMatchThreshold": "> 5",
// if `window` is specified it tells the rule to check the current activity as well as the activities returned from `window`
// learn more about `window` here https://github.com/FoxxMD/context-mod/blob/master/docs/activitiesWindow.md
"window": 10,
},
]
}

201
docs/imageComparison.md Normal file
View File

@@ -0,0 +1,201 @@
# Overview
ContextMod supports comparing image content, for the purpose of detecting duplicates, with two different but complimentary systems. Image comparison behavior is available for the following rules:
* [Recent Activity](/docs/examples/recentActivity)
* Repeat Activity (In-progress)
To enable comparisons reference the example below (at the top-level of your rule) and configure as needed:
```json5
{
"name": "ruleWithImageDetection",
"kind": "recentActivity",
// Add block below...
//
"imageDetection": {
// enables image comparison
"enable": true,
// The difference, in percentage, between the reference submission and the submissions being checked
// must be less than this number to consider the images "the same"
"threshold": 5,
// optional
// set the behavior for determining if image comparison should occur on a URL:
//
// "extension" => try image detection if URL ends in a known image extension (jpeg, gif, png, bmp, etc.)
// "unknown" => try image detection if URL ends in known image extension OR there is no extension OR the extension is unknown (not video, html, doc, etc...)
// "all" => ALWAYS try image detection, regardless of URL extension
//
// if fetchBehavior is not defined then "extension" is the default
"fetchBehavior": "extension",
},
//
// And above ^^^
...
}
```
**Perceptual Hashing** (`hash`) and **Pixel Comparisons** (`pixel`) may be used at the same time. Refer to the documentation below to see how they interact.
**Note:** Regardless of `fetchBehavior`, if the response from the URL does not indicate it is an image then image detection will not occur. IE Response `Content-Type` must contain `image`
## Prerequisites
Both image comparison systems require [Sharp](https://sharp.pixelplumbing.com/) as a dependency. Most modern operating systems running Node.js >= 12.13.0 do not require installing additional dependencies in order to use Sharp.
If you are using the docker image for ContextMod (`foxxmd/context-mod`) Sharp is built-in.
If you are installing ContextMod using npm then **Sharp should be installed automatically as an optional dependency.**
**If you do not want to install it automatically** install ContextMod with the following command:
```
npm install --no-optional
```
If you are using ContextMod as part of a larger project you may want to require Sharp in your own package:
```
npm install sharp@0.29.1 --save
```
# Comparison Systems
## Perceptual Hashing
[Perceptual Hashing](https://en.wikipedia.org/wiki/Perceptual_hashing) creates a text fingerprint of an image by:
* Dividing up the image into a grid
* Using an algorithm to derive a value from the pixels in each grid
* Adding up all the values to create a unique string (the "fingerprint")
An example of how a perceptual hash can work [can be found here.](https://www.hackerfactor.com/blog/?/archives/432-Looks-Like-It.html)
ContextMod uses [blockhash-js](https://github.com/commonsmachinery/blockhash-js) which is a javascript implementation of the algorithm described in the paper [Block Mean Value Based Image Perceptual Hashing by Bian Yang, Fan Gu and Xiamu Niu.](https://ieeexplore.ieee.org/document/4041692)
**Advantages**
* Low memory requirements and not CPU intensive
* Does not require any image transformations
* Hash results can be stored to make future comparisons even faster and skip downloading images (cached by url)
* Resolution-independent
**Disadvantages**
* Hash is weak when image differences are based only on color
* Hash is weak when image contains lots of text
* Higher accuracy requires larger calculation (more bits required)
**When should I use it?**
* General duplicate detection
* Comparing many images
* Comparing the same images often
### How To Use
If `imageDetection.enable` is `true` then hashing is enabled by default and no further configuration is required.
To further configure hashing refer to this code block:
```json5
{
"name": "ruleWithImageDetectionAndConfiguredHashing",
"kind": "recentActivity",
"imageDetection": {
"enable": true,
// Add block below...
//
"hash": {
// enable or disable hash comparisons (enabled by default)
"enable": true,
// determines accuracy of hash and granularity of hash comparison (comparison to other hashes)
// the higher the bits the more accurate the comparison
//
// NOTE: Hashes of different sizes (bits) cannot be compared. If you are caching hashes make sure all rules where results may be shared use the same bit count to ensure hashes can be compared. Otherwise hashes will be recomputed.
"bits": 32, // default is 32 if not defined
//
// number of seconds to cache an image hash
"ttl": 60, // default is 60 if not defined
//
// "High Confidence" Threshold
// If the difference in comparison is equal to or less than this number the images are considered the same and pixel comparison WILL NOT occur
//
// Defaults to the parent-level `threshold` value if not present
//
// Use null if you want pixel comparison to ALWAYS occur (softThreshold must be present)
"hardThreshold": 5,
//
// "Low Confidence" Threshold -- only used if `pixel` is enabled
// If the difference in comparison is:
//
// 1) equal to or less than this value and
// 2) the value is greater than `hardThreshold`
//
// the images will be compared using the `pixel` method
"softThreshold": 0,
},
//
// And above ^^^
//"pixel": {...}
},
//...
```
## Pixel Comparison
This approach is as straight forward as it sounds. Both images are compared, pixel by pixel, to determine the difference between the two. ContextMod uses [pixelmatch](https://github.com/mapbox/pixelmatch) to do the comparison.
**Advantages**
* Extremely accurate, high-confidence on difference percentage
* Strong when comparing text-based images or color-only differences
**Disadvantages**
* High memory requirements (10-30MB per comparison) and CPU intensive
* Weak against similar images with different aspect ratios
* Requires image transformations (resize, crop) before comparison
* Can only store image-to-image results (no single image fingerprints)
**When should I use it?**
* Require very high accuracy in comparison results
* Comparing mostly text-based images or subtle color/detail differences
* As a secondary, high-confidence confirmation of comparison result after hashing
### How To Use
By default pixel comparisons **are not enabled.** They must be explicitly enabled in configuration.
Pixel comparisons will be performed in either of these scenarios:
* pixel is enabled, hashing is enabled and `hash.softThreshold` is defined
* When a comparison occurs that is less different than `softThreshold` but more different then `hardThreshold` (or `"hardThreshold": null`), then pixel comparison will occur as a high-confidence check
* Example
* hash comparison => 7% difference
* `"softThreshold": 10`
* `"hardThreshold": 4`
* `hash.enable` is `false` and `pixel.enable` is true
* hashing is skipped entirely and only pixel comparisons are performed
To configure pixel comparisons refer to this code block:
```json5
{
"name": "ruleWithImageDetectionAndPixelEnabled",
"kind": "recentActivity",
"imageDetection": {
//"hash": {...}
"pixel": {
// enable or disable pixel comparisons (disabled by default)
"enable": true,
// if the comparison difference percentage is equal to or less than this value the images are considered the same
//
// if not defined the value from imageDetection.threshold will be used
"threshold": 5
}
},
//...
```

1349
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -26,6 +26,7 @@
"license": "ISC",
"dependencies": {
"@awaitjs/express": "^0.8.0",
"@stdlib/regexp-regexp": "^0.0.6",
"ajv": "^7.2.4",
"async": "^3.2.0",
"autolinker": "^3.14.3",
@@ -49,9 +50,11 @@
"got": "^11.8.2",
"he": "^1.2.0",
"http-proxy": "^1.18.1",
"image-size": "^1.0.0",
"js-yaml": "^4.1.0",
"json5": "^2.2.0",
"jsonwebtoken": "^8.5.1",
"leven": "^3.1.0",
"lodash": "^4.17.21",
"lru-cache": "^6.0.0",
"monaco-editor": "^0.27.0",
@@ -60,11 +63,14 @@
"normalize-url": "^6.1.0",
"object-hash": "^2.2.0",
"p-event": "^4.2.0",
"p-map": "^4.0.0",
"passport": "^0.4.1",
"passport-custom": "^1.1.1",
"passport-jwt": "^4.0.0",
"pixelmatch": "^5.2.1",
"pretty-print-json": "^1.0.3",
"safe-stable-stringify": "^1.1.1",
"set-random-interval": "^1.1.0",
"snoostorm": "^1.5.2",
"snoowrap": "^1.23.0",
"socket.io": "^4.1.3",
@@ -100,10 +106,15 @@
"@types/object-hash": "^2.1.0",
"@types/passport": "^1.0.7",
"@types/passport-jwt": "^3.0.6",
"@types/pixelmatch": "^5.2.4",
"@types/sharp": "^0.29.2",
"@types/tcp-port-used": "^1.0.0",
"@types/triple-beam": "^1.3.2",
"ts-auto-guard": "*",
"ts-json-schema-generator": "^0.93.0",
"typescript-json-schema": "^0.50.1"
},
"optionalDependencies": {
"sharp": "^0.29.1"
}
}

View File

@@ -2,23 +2,33 @@ import {ActionJson, ActionConfig} from "./index";
import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../Rule";
import {ActionProcessResult} from "../Common/interfaces";
export class ApproveAction extends Action {
getKind() {
return 'Approve';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
//snoowrap typing issue, thinks comments can't be locked
// @ts-ignore
if (item.approved) {
this.logger.warn('Item is already approved');
return {
dryRun,
success: false,
result: 'Item is already approved'
}
}
if (!dryRun) {
// @ts-ignore
await item.approve();
}
return {
dryRun,
success: true,
}
}
}

View File

@@ -3,7 +3,7 @@ import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../Rule";
import {renderContent} from "../Utils/SnoowrapUtils";
import {Footer} from "../Common/interfaces";
import {ActionProcessResult, Footer} from "../Common/interfaces";
export class BanAction extends Action {
@@ -33,7 +33,7 @@ export class BanAction extends Action {
return 'Ban';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
const content = this.message === undefined ? undefined : await this.resources.getContent(this.message, item.subreddit);
const renderedBody = content === undefined ? undefined : await renderContent(content, item, ruleResults, this.resources.userNotes);
@@ -58,6 +58,11 @@ export class BanAction extends Action {
duration: this.duration
});
}
return {
dryRun,
success: true,
result: `Banned ${item.author.name} ${durText}${this.reason !== undefined ? ` (${this.reason})` : ''}`
};
}
}

View File

@@ -2,8 +2,9 @@ import Action, {ActionJson, ActionOptions} from "./index";
import {Comment} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {renderContent} from "../Utils/SnoowrapUtils";
import {Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
import {ActionProcessResult, Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
import {RuleResult} from "../Rule";
import {truncateStringToLength} from "../util";
export class CommentAction extends Action {
content: string;
@@ -32,7 +33,7 @@ export class CommentAction extends Action {
return 'Comment';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
const content = await this.resources.getContent(this.content, item.subreddit);
const body = await renderContent(content, item, ruleResults, this.resources.userNotes);
@@ -44,7 +45,11 @@ export class CommentAction extends Action {
if(item.archived) {
this.logger.warn('Cannot comment because Item is archived');
return;
return {
dryRun,
success: false,
result: 'Cannot comment because Item is archived'
};
}
let reply: Comment;
if(!dryRun) {
@@ -62,6 +67,19 @@ export class CommentAction extends Action {
// @ts-ignore
await reply.distinguish({sticky: this.sticky});
}
let modifiers = [];
if(this.distinguish) {
modifiers.push('Distinguished');
}
if(this.sticky) {
modifiers.push('Stickied');
}
const modifierStr = modifiers.length === 0 ? '' : `[${modifiers.join(' | ')}]`;
return {
dryRun,
success: true,
result: `${modifierStr}${this.lock ? ' - Locked Author\'s Activity - ' : ''}${truncateStringToLength(100)(body)}`
};
}
}

View File

@@ -2,24 +2,34 @@ import {ActionJson, ActionConfig} from "./index";
import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../Rule";
import {ActionProcessResult} from "../Common/interfaces";
export class LockAction extends Action {
getKind() {
return 'Lock';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
//snoowrap typing issue, thinks comments can't be locked
// @ts-ignore
if (item.locked) {
this.logger.warn('Item is already locked');
return {
dryRun,
success: false,
result: 'Item is already locked'
};
}
if (!dryRun) {
//snoowrap typing issue, thinks comments can't be locked
// @ts-ignore
await item.lock();
}
return {
dryRun,
success: true
}
}
}

View File

@@ -2,9 +2,17 @@ import Action, {ActionJson, ActionOptions} from "./index";
import {Comment, ComposeMessageParams} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {renderContent} from "../Utils/SnoowrapUtils";
import {Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
import {ActionProcessResult, Footer, RequiredRichContent, RichContent} from "../Common/interfaces";
import {RuleResult} from "../Rule";
import {boolToString} from "../util";
import {
asSubmission,
boolToString,
isSubmission,
parseRedditEntity,
REDDIT_ENTITY_REGEX_URL,
truncateStringToLength
} from "../util";
import SimpleError from "../Utils/SimpleError";
export class MessageAction extends Action {
content: string;
@@ -14,6 +22,7 @@ export class MessageAction extends Action {
footer?: false | string;
title?: string;
to?: string;
asSubreddit: boolean;
constructor(options: MessageActionOptions) {
@@ -23,7 +32,9 @@ export class MessageAction extends Action {
asSubreddit,
title,
footer,
to,
} = options;
this.to = to;
this.footer = footer;
this.content = content;
this.asSubreddit = asSubreddit;
@@ -34,7 +45,7 @@ export class MessageAction extends Action {
return 'Message';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
const content = await this.resources.getContent(this.content);
const body = await renderContent(content, item, ruleResults, this.resources.userNotes);
@@ -42,19 +53,38 @@ export class MessageAction extends Action {
const footer = await this.resources.generateFooter(item, this.footer);
const renderedContent = `${body}${footer}`;
// @ts-ignore
const author = await item.author.fetch() as RedditUser;
let recipient = item.author.name;
if(this.to !== undefined) {
// parse to value
try {
const entityData = parseRedditEntity(this.to);
if(entityData.type === 'user') {
recipient = entityData.name;
} else {
recipient = `/r/${entityData.name}`;
}
} catch (err) {
this.logger.error(`'to' field for message was not in a valid format. See ${REDDIT_ENTITY_REGEX_URL} for valid examples`);
this.logger.error(err);
err.logged = true;
throw err;
}
if(recipient.includes('/r/') && this.asSubreddit) {
throw new SimpleError(`Cannot send a message as a subreddit to another subreddit. Requested recipient: ${recipient}`);
}
}
const msgOpts: ComposeMessageParams = {
to: author,
to: recipient,
text: renderedContent,
// @ts-ignore
fromSubreddit: this.asSubreddit ? await item.subreddit.fetch() : undefined,
subject: this.title || `Concerning your ${item instanceof Submission ? 'Submission' : 'Comment'}`,
subject: this.title || `Concerning your ${isSubmission(item) ? 'Submission' : 'Comment'}`,
};
const msgPreview = `\r\n
TO: ${author.name}\r\n
TO: ${recipient}\r\n
Subject: ${msgOpts.subject}\r\n
Sent As Modmail: ${boolToString(this.asSubreddit)}\r\n\r\n
${renderedContent}`;
@@ -64,6 +94,11 @@ export class MessageAction extends Action {
if (!dryRun) {
await this.client.composeMessage(msgOpts);
}
return {
dryRun,
success: true,
result: truncateStringToLength(200)(msgPreview)
}
}
}
@@ -73,6 +108,24 @@ export interface MessageActionConfig extends RequiredRichContent, Footer {
* */
asSubreddit: boolean
/**
* Entity to send message to.
*
* If not present Message be will sent to the Author of the Activity being checked.
*
* Valid formats:
*
* * `aUserName` -- send to /u/aUserName
* * `u/aUserName` -- send to /u/aUserName
* * `r/aSubreddit` -- sent to modmail of /r/aSubreddit
*
* **Note:** Reddit does not support sending a message AS a subreddit TO another subreddit
*
* @pattern ^\s*(\/[ru]\/|[ru]\/)*(\w+)*\s*$
* @examples ["aUserName","u/aUserName","r/aSubreddit"]
* */
to?: string
/**
* The title of the message
*

View File

@@ -3,24 +3,33 @@ import Action from "./index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../Rule";
import {activityIsRemoved} from "../Utils/SnoowrapUtils";
import {ActionProcessResult} from "../Common/interfaces";
export class RemoveAction extends Action {
getKind() {
return 'Remove';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
// issue with snoowrap typings, doesn't think prop exists on Submission
// @ts-ignore
if (activityIsRemoved(item)) {
this.logger.warn('Item is already removed');
return;
return {
dryRun,
success: false,
result: 'Item is already removed',
}
}
if (!dryRun) {
// @ts-ignore
await item.remove();
}
return {
dryRun,
success: true,
}
}
}

View File

@@ -4,7 +4,7 @@ import Snoowrap, {Comment, Submission} from "snoowrap";
import {truncateStringToLength} from "../util";
import {renderContent} from "../Utils/SnoowrapUtils";
import {RuleResult} from "../Rule";
import {RichContent} from "../Common/interfaces";
import {ActionProcessResult, RichContent} from "../Common/interfaces";
// https://www.reddit.com/dev/api/oauth#POST_api_report
// denotes 100 characters maximum
@@ -23,7 +23,7 @@ export class ReportAction extends Action {
return 'Report';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
const content = await this.resources.getContent(this.content, item.subreddit);
const renderedContent = await renderContent(content, item, ruleResults, this.resources.userNotes);
@@ -33,6 +33,12 @@ export class ReportAction extends Action {
// @ts-ignore
await item.report({reason: truncatedContent});
}
return {
dryRun,
success: true,
result: truncatedContent
};
}
}

View File

@@ -2,6 +2,7 @@ import {SubmissionActionConfig} from "./index";
import Action, {ActionJson, ActionOptions} from "../index";
import Snoowrap, {Comment, Submission} from "snoowrap";
import {RuleResult} from "../../Rule";
import {ActionProcessResult} from "../../Common/interfaces";
export class FlairAction extends Action {
text: string;
@@ -20,7 +21,17 @@ export class FlairAction extends Action {
return 'Flair';
}
async process(item: Comment | Submission, ruleResults: RuleResult[]): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
let flairParts = [];
if(this.text !== '') {
flairParts.push(`Text: ${this.text}`);
}
if(this.css !== '') {
flairParts.push(`CSS: ${this.css}`);
}
const flairSummary = flairParts.length === 0 ? 'No flair (unflaired)' : flairParts.join(' | ');
this.logger.verbose(flairSummary);
if (item instanceof Submission) {
if(!this.dryRun) {
// @ts-ignore
@@ -28,6 +39,16 @@ export class FlairAction extends Action {
}
} else {
this.logger.warn('Cannot flair Comment');
return {
dryRun,
success: false,
result: 'Cannot flair Comment',
}
}
return {
dryRun,
success: true,
result: flairSummary
}
}
}

View File

@@ -5,6 +5,7 @@ import {renderContent} from "../Utils/SnoowrapUtils";
import {RuleResult} from "../Rule";
import {UserNote, UserNoteJson} from "../Subreddit/UserNotes";
import Submission from "snoowrap/dist/objects/Submission";
import {ActionProcessResult} from "../Common/interfaces";
export class UserNoteAction extends Action {
@@ -24,7 +25,7 @@ export class UserNoteAction extends Action {
return 'User Note';
}
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionProcessResult> {
const dryRun = runtimeDryrun || this.dryRun;
const content = await this.resources.getContent(this.content, item.subreddit);
const renderedContent = await renderContent(content, item, ruleResults, this.resources.userNotes);
@@ -35,7 +36,11 @@ export class UserNoteAction extends Action {
const existingNote = notes.find((x) => x.link.includes(item.id));
if (existingNote) {
this.logger.info(`Will not add note because one already exists for this Activity (${existingNote.time.local().format()}) and allowDuplicate=false`);
return;
return {
dryRun,
success: false,
result: `Will not add note because one already exists for this Activity (${existingNote.time.local().format()}) and allowDuplicate=false`
};
}
}
if (!dryRun) {
@@ -43,6 +48,11 @@ export class UserNoteAction extends Action {
} else if (!await this.resources.userNotes.warningExists(this.type)) {
this.logger.warn(`UserNote type '${this.type}' does not exist. If you meant to use this please add it through Toolbox first.`);
}
return {
success: true,
dryRun,
result: `(${this.type}) ${renderedContent}`
}
}
}

View File

@@ -2,9 +2,10 @@ import Snoowrap, {Comment, Submission} from "snoowrap";
import {Logger} from "winston";
import {RuleResult} from "../Rule";
import {SubredditResources} from "../Subreddit/SubredditResources";
import {ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
import {ActionProcessResult, ActionResult, ChecksActivityState, TypedActivityStates} from "../Common/interfaces";
import Author, {AuthorOptions} from "../Author/Author";
import {mergeArr} from "../util";
import LoggedError from "../Utils/LoggedError";
export abstract class Action {
name?: string;
@@ -53,47 +54,61 @@ export abstract class Action {
return this.name === this.getKind() ? this.getKind() : `${this.getKind()} - ${this.name}`;
}
async handle(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<void> {
async handle(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionResult> {
const dryRun = runtimeDryrun || this.dryRun;
let actionRun = false;
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
if (!itemPass) {
this.logger.verbose(`Activity did not pass 'itemIs' test, Action not run`);
return;
}
const authorRun = async () => {
let actRes: ActionResult = {
kind: this.getKind(),
name: this.getActionUniqueName(),
run: false,
dryRun,
success: false,
};
try {
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
if (!itemPass) {
this.logger.verbose(`Activity did not pass 'itemIs' test, Action not run`);
actRes.runReason = `Activity did not pass 'itemIs' test, Action not run`;
return actRes;
}
if (this.authorIs.include !== undefined && this.authorIs.include.length > 0) {
for (const auth of this.authorIs.include) {
if (await this.resources.testAuthorCriteria(item, auth)) {
await this.process(item, ruleResults, runtimeDryrun);
return true;
actRes.run = true;
const results = await this.process(item, ruleResults, runtimeDryrun);
return {...actRes, ...results};
}
}
this.logger.verbose('Inclusive author criteria not matched, Action not run');
return false;
}
if (!actionRun && this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
actRes.runReason = 'Inclusive author criteria not matched';
return actRes;
} else if (this.authorIs.exclude !== undefined && this.authorIs.exclude.length > 0) {
for (const auth of this.authorIs.exclude) {
if (await this.resources.testAuthorCriteria(item, auth, false)) {
await this.process(item, ruleResults, runtimeDryrun);
return true;
actRes.run = true;
const results = await this.process(item, ruleResults, runtimeDryrun);
return {...actRes, ...results};
}
}
this.logger.verbose('Exclusive author criteria not matched, Action not run');
return false;
actRes.runReason = 'Exclusive author criteria not matched';
return actRes;
}
return null;
actRes.run = true;
const results = await this.process(item, ruleResults, runtimeDryrun);
return {...actRes, ...results};
} catch (err) {
if(!(err instanceof LoggedError)) {
this.logger.error(`Encountered error while running`, err);
}
actRes.success = false;
actRes.result = err.message;
return actRes;
}
const authorRunResults = await authorRun();
if (null === authorRunResults) {
await this.process(item, ruleResults, runtimeDryrun);
} else if (!authorRunResults) {
return;
}
this.logger.verbose(`${dryRun ? 'DRYRUN - ' : ''}Done`);
}
abstract process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryun?: boolean): Promise<void>;
abstract process(item: Comment | Submission, ruleResults: RuleResult[], runtimeDryun?: boolean): Promise<ActionProcessResult>;
}
export interface ActionOptions extends ActionConfig {

View File

@@ -3,8 +3,8 @@ import dayjs, {Dayjs} from "dayjs";
import {getLogger} from "./Utils/loggerFactory";
import {Invokee, OperatorConfig} from "./Common/interfaces";
import Bot from "./Bot";
import {castArray} from "lodash";
import LoggedError from "./Utils/LoggedError";
import {sleep} from "./util";
export class App {
@@ -53,8 +53,11 @@ export class App {
}
async onTerminate(reason = 'The application was shutdown') {
for(const m of this.bots) {
//await m.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
for(const b of this.bots) {
for(const m of b.subManagers) {
await m.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
}
//await b.notificationManager.handle('runStateChanged', 'Application Shutdown', reason);
}
}
@@ -64,6 +67,7 @@ export class App {
try {
await b.testClient();
await b.buildManagers();
await sleep(2000);
b.runManagers(causedBy).catch((err) => {
this.logger.error(`Unexpected error occurred while running Bot ${b.botName}. Bot must be re-built to restart`);
if (!err.logged || !(err instanceof LoggedError)) {

View File

@@ -1,5 +1,6 @@
import {DurationComparor, UserNoteCriteria} from "../Rule";
import {CompareValue, CompareValueOrPercent} from "../Common/interfaces";
import {UserNoteCriteria} from "../Rule";
import {CompareValue, CompareValueOrPercent, DurationComparor} from "../Common/interfaces";
import {parseStringToRegex} from "../util";
/**
* If present then these Author criteria are checked before running the rule. If criteria fails then the rule is skipped.
@@ -99,6 +100,24 @@ export interface AuthorCriteria {
* Does Author's account have a verified email?
* */
verified?: boolean
/**
* Is the author shadowbanned?
*
* This is determined by trying to retrieve the author's profile. If a 404 is returned it is likely they are shadowbanned
* */
shadowBanned?: boolean
/**
* An (array of) string/regular expression to test contents of an Author's profile description against
*
* If no flags are specified then the **insensitive** flag is used by default
*
* If using an array then if **any** value in the array passes the description test passes
*
* @examples [["/test$/i", "look for this string literal"]]
* */
description?: string | string[]
}
export class Author implements AuthorCriteria {
@@ -112,6 +131,8 @@ export class Author implements AuthorCriteria {
linkKarma?: string;
totalKarma?: string;
verified?: boolean;
shadowBanned?: boolean;
description?: string[];
constructor(options: AuthorCriteria) {
this.name = options.name;
@@ -123,6 +144,8 @@ export class Author implements AuthorCriteria {
this.commentKarma = options.commentKarma;
this.linkKarma = options.linkKarma;
this.totalKarma = options.totalKarma;
this.shadowBanned = options.shadowBanned;
this.description = options.description === undefined ? undefined : Array.isArray(options.description) ? options.description : [options.description];
}
}

View File

@@ -1,9 +1,9 @@
import Snoowrap, {Subreddit} from "snoowrap";
import Snoowrap, {Comment, Submission, Subreddit} from "snoowrap";
import {Logger} from "winston";
import dayjs, {Dayjs} from "dayjs";
import {Duration} from "dayjs/plugin/duration";
import EventEmitter from "events";
import {BotInstanceConfig, Invokee, PAUSED, RUNNING, SYSTEM} from "../Common/interfaces";
import {BotInstanceConfig, Invokee, PAUSED, RUNNING, STOPPED, SYSTEM, USER} from "../Common/interfaces";
import {
createRetryHandler,
formatNumber,
@@ -15,7 +15,7 @@ import {
snooLogWrapper
} from "../util";
import {Manager} from "../Subreddit/Manager";
import {ProxiedSnoowrap} from "../Utils/SnoowrapClients";
import {ExtendedSnoowrap, ProxiedSnoowrap} from "../Utils/SnoowrapClients";
import {ModQueueStream, UnmoderatedStream} from "../Subreddit/Streams";
import {BotResourcesManager} from "../Subreddit/SubredditResources";
import LoggedError from "../Utils/LoggedError";
@@ -24,7 +24,7 @@ import pEvent from "p-event";
class Bot {
client!: Snoowrap;
client!: ExtendedSnoowrap;
logger!: Logger;
wikiLocation: string;
dryRun?: true | undefined;
@@ -33,12 +33,15 @@ class Bot {
excludeSubreddits: string[];
subManagers: Manager[] = [];
heartbeatInterval: number;
nextHeartbeat?: Dayjs;
nextHeartbeat: Dayjs = dayjs();
heartBeating: boolean = false;
softLimit: number | string = 250;
hardLimit: number | string = 50;
nannyMode?: 'soft' | 'hard';
nannyRunning: boolean = false;
nextNannyCheck: Dayjs = dayjs().add(10, 'second');
nannyRetryHandler: Function;
nextExpiration: Dayjs = dayjs();
botName?: string;
botLink?: string;
@@ -46,6 +49,7 @@ class Bot {
maxWorkers: number;
startedAt: Dayjs = dayjs();
sharedModqueue: boolean = false;
streamListedOnce: string[] = [];
apiSample: number[] = [];
apiRollingAvg: number = 0;
@@ -88,6 +92,7 @@ class Bot {
},
polling: {
sharedMod,
stagger,
},
queue: {
maxWorkers,
@@ -163,7 +168,7 @@ class Bot {
}
try {
this.client = proxy === undefined ? new Snoowrap(creds) : new ProxiedSnoowrap({...creds, proxy});
this.client = proxy === undefined ? new ExtendedSnoowrap(creds) : new ProxiedSnoowrap({...creds, proxy});
this.client.config({
warnings: true,
maxRetryAttempts: 5,
@@ -179,6 +184,7 @@ class Bot {
}
const retryHandler = createRetryHandler({maxRequestRetry: 8, maxOtherRetry: 1}, this.logger);
this.nannyRetryHandler = createRetryHandler({maxRequestRetry: 5, maxOtherRetry: 1}, this.logger);
const modStreamErrorListener = (name: string) => async (err: any) => {
this.logger.error('Polling error occurred', err);
@@ -195,12 +201,32 @@ class Bot {
}
}
const defaultUnmoderatedStream = new UnmoderatedStream(this.client, {subreddit: 'mod'});
const modStreamListingListener = (name: string) => async (listing: (Comment|Submission)[]) => {
// dole out in order they were received
if(!this.streamListedOnce.includes(name)) {
this.streamListedOnce.push(name);
return;
}
for(const i of listing) {
const foundManager = this.subManagers.find(x => x.subreddit.display_name === i.subreddit.display_name && x.modStreamCallbacks.get(name) !== undefined);
if(foundManager !== undefined) {
foundManager.modStreamCallbacks.get(name)(i);
if(stagger !== undefined) {
await sleep(stagger);
}
}
}
}
const defaultUnmoderatedStream = new UnmoderatedStream(this.client, {subreddit: 'mod', limit: 100, clearProcessed: { size: 100, retain: 100 }});
// @ts-ignore
defaultUnmoderatedStream.on('error', modStreamErrorListener('unmoderated'));
const defaultModqueueStream = new ModQueueStream(this.client, {subreddit: 'mod'});
defaultUnmoderatedStream.on('listing', modStreamListingListener('unmoderated'));
const defaultModqueueStream = new ModQueueStream(this.client, {subreddit: 'mod', limit: 100, clearProcessed: { size: 100, retain: 100 }});
// @ts-ignore
defaultModqueueStream.on('error', modStreamErrorListener('modqueue'));
defaultModqueueStream.on('listing', modStreamListingListener('modqueue'));
this.cacheManager.modStreams.set('unmoderated', defaultUnmoderatedStream);
this.cacheManager.modStreams.set('modqueue', defaultModqueueStream);
@@ -322,7 +348,7 @@ class Bot {
async destroy(causedBy: Invokee) {
this.logger.info('Stopping heartbeat and nanny processes, may take up to 5 seconds...');
const processWait = Promise.all([pEvent(this.emitter, 'heartbeatStopped'), pEvent(this.emitter, 'nannyStopped')]);
const processWait = pEvent(this.emitter, 'healthStopped');
this.running = false;
await processWait;
for (const manager of this.subManagers) {
@@ -333,7 +359,7 @@ class Bot {
async runModStreams(notify = false) {
for(const [k,v] of this.cacheManager.modStreams) {
if(!v.running && v.listeners('item').length > 0) {
if(!v.running && this.subManagers.some(x => x.modStreamCallbacks.get(k) !== undefined)) {
v.startInterval();
this.logger.info(`Starting default ${k.toUpperCase()} mod stream`);
if(notify) {
@@ -343,6 +369,7 @@ class Bot {
}
}
}
await sleep(2000);
}
}
}
@@ -355,150 +382,217 @@ class Bot {
for (const manager of this.subManagers) {
if (manager.validConfigLoaded && manager.botState.state !== RUNNING) {
await manager.start(causedBy, {reason: 'Caused by application startup'});
await sleep(2000);
}
}
await this.runModStreams();
this.running = true;
this.runApiNanny();
this.nextNannyCheck = dayjs().add(10, 'second');
this.nextHeartbeat = dayjs().add(this.heartbeatInterval, 'second');
await this.healthLoop();
}
async healthLoop() {
while (this.running) {
await sleep(5000);
if (!this.running) {
break;
}
if (dayjs().isSameOrAfter(this.nextNannyCheck)) {
try {
await this.runApiNanny();
this.nextNannyCheck = dayjs().add(10, 'second');
} catch (err) {
this.logger.info('Delaying next nanny check for 1 minute due to emitted error');
this.nextNannyCheck = dayjs().add(120, 'second');
}
}
if(dayjs().isSameOrAfter(this.nextHeartbeat)) {
try {
await this.heartbeat();
} catch (err) {
this.logger.error(`Error occurred during heartbeat check: ${err.message}`);
}
this.nextHeartbeat = dayjs().add(this.heartbeatInterval, 'second');
}
}
this.emitter.emit('healthStopped');
}
async heartbeat() {
const heartbeat = `HEARTBEAT -- API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ~${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion === undefined ? 'N/A' : this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`
this.logger.info(heartbeat);
for (const s of this.subManagers) {
if(s.botState.state === STOPPED && s.botState.causedBy === USER) {
this.logger.debug('Skipping config check/restart on heartbeat due to previously being stopped by user', {subreddit: s.displayLabel});
continue;
}
try {
const newConfig = await s.parseConfiguration();
if(newConfig || (s.queueState.state !== RUNNING && s.queueState.causedBy === SYSTEM))
{
await s.startQueue('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running queue'});
}
if(newConfig || (s.eventsState.state !== RUNNING && s.eventsState.causedBy === SYSTEM))
{
await s.startEvents('system', {reason: newConfig ? 'Config updated on heartbeat triggered reload' : 'Heartbeat detected non-running events'});
}
if(s.botState.state !== RUNNING && s.eventsState.state === RUNNING && s.queueState.state === RUNNING) {
s.botState = {
state: RUNNING,
causedBy: 'system',
}
}
} catch (err) {
this.logger.info('Stopping event polling to prevent activity processing queue from backing up. Will be restarted when config update succeeds.')
await s.stopEvents('system', {reason: 'Invalid config will cause events to pile up in queue. Will be restarted when config update succeeds (next heartbeat).'});
if(!(err instanceof LoggedError)) {
this.logger.error(err, {subreddit: s.displayLabel});
}
if(this.nextHeartbeat !== undefined) {
this.logger.info(`Will retry parsing config on next heartbeat (in ${dayjs.duration(this.nextHeartbeat.diff(dayjs())).humanize()})`, {subreddit: s.displayLabel});
}
}
}
await this.runModStreams(true);
}
async runApiNanny() {
try {
mainLoop:
while (this.running) {
for(let i = 0; i < 2; i++) {
await sleep(5000);
if (!this.running) {
break mainLoop;
}
}
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
const nowish = dayjs().add(10, 'second');
if (nowish.isAfter(this.nextExpiration)) {
// it's possible no api calls are being made because of a hard limit
// need to make an api call to update this
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
const nowish = dayjs().add(10, 'second');
if (nowish.isAfter(this.nextExpiration)) {
// it's possible no api calls are being made because of a hard limit
// need to make an api call to update this
let shouldRetry = true;
while (shouldRetry) {
try {
// @ts-ignore
await this.client.getMe();
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
}
const rollingSample = this.apiSample.slice(0, 7)
rollingSample.unshift(this.client.ratelimitRemaining);
this.apiSample = rollingSample;
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
if (this.apiSample[index + 1] !== undefined) {
const d = Math.abs(curr - this.apiSample[index + 1]);
if (d === 0) {
return [...acc, 0];
}
return [...acc, d / 10];
shouldRetry = false;
} catch (err) {
shouldRetry = await this.nannyRetryHandler(err);
if (!shouldRetry) {
throw err;
}
return acc;
}, []);
this.apiRollingAvg = diff.reduce((acc, curr) => acc + curr, 0) / diff.length; // api requests per second
this.depletedInSecs = this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
this.apiEstDepletion = dayjs.duration({seconds: this.depletedInSecs});
this.logger.debug(`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`);
let hardLimitHit = false;
if (typeof this.hardLimit === 'string') {
const hardDur = parseDuration(this.hardLimit);
hardLimitHit = hardDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
hardLimitHit = this.hardLimit > this.client.ratelimitRemaining;
}
if (hardLimitHit) {
if (this.nannyMode === 'hard') {
continue;
}
this.logger.info(`Detected HARD LIMIT of ${this.hardLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${this.apiRollingAvg}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info(`All subreddit event polling has been paused`, {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
m.pauseEvents('system');
m.notificationManager.handle('runStateChanged', 'Hard Limit Triggered', `Hard Limit of ${this.hardLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit event polling has been paused.`, 'system', 'warn');
}
this.nannyMode = 'hard';
continue;
}
let softLimitHit = false;
if (typeof this.softLimit === 'string') {
const softDur = parseDuration(this.softLimit);
softLimitHit = softDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
softLimitHit = this.softLimit > this.client.ratelimitRemaining;
}
if (softLimitHit) {
if (this.nannyMode === 'soft') {
continue;
}
this.logger.info(`Detected SOFT LIMIT of ${this.softLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info('Trying to detect heavy usage subreddits...', {leaf: 'Api Nanny'});
let threshold = 0.5;
let offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
if (offenders.length === 0) {
threshold = 0.25;
// reduce threshold
offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
}
if (offenders.length > 0) {
this.logger.info(`Slowing subreddits using >- ${threshold}req/s:`, {leaf: 'Api Nanny'});
for (const m of offenders) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
} else {
this.logger.info(`Couldn't detect specific offenders, slowing all...`, {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
}
this.nannyMode = 'soft';
continue;
}
if (this.nannyMode !== undefined) {
this.logger.info('Turning off due to better conditions...', {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
if (m.delayBy !== undefined) {
m.delayBy = undefined;
m.notificationManager.handle('runStateChanged', 'Normal Processing Resumed', 'Slow Mode has been turned off due to better API conditions', 'system');
}
if (m.queueState.state === PAUSED && m.queueState.causedBy === SYSTEM) {
m.startQueue('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
if (m.eventsState.state === PAUSED && m.eventsState.causedBy === SYSTEM) {
await m.startEvents('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
}
this.nannyMode = undefined;
}
}
this.nextExpiration = dayjs(this.client.ratelimitExpiration);
}
const rollingSample = this.apiSample.slice(0, 7)
rollingSample.unshift(this.client.ratelimitRemaining);
this.apiSample = rollingSample;
const diff = this.apiSample.reduceRight((acc: number[], curr, index) => {
if (this.apiSample[index + 1] !== undefined) {
const d = Math.abs(curr - this.apiSample[index + 1]);
if (d === 0) {
return [...acc, 0];
}
return [...acc, d / 10];
}
return acc;
}, []);
this.apiRollingAvg = diff.reduce((acc, curr) => acc + curr, 0) / diff.length; // api requests per second
this.depletedInSecs = this.client.ratelimitRemaining / this.apiRollingAvg; // number of seconds until current remaining limit is 0
this.apiEstDepletion = dayjs.duration({seconds: this.depletedInSecs});
this.logger.debug(`API Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`);
let hardLimitHit = false;
if (typeof this.hardLimit === 'string') {
const hardDur = parseDuration(this.hardLimit);
hardLimitHit = hardDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
hardLimitHit = this.hardLimit > this.client.ratelimitRemaining;
}
if (hardLimitHit) {
if (this.nannyMode === 'hard') {
return;
}
this.logger.info(`Detected HARD LIMIT of ${this.hardLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${this.apiRollingAvg}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info(`All subreddit event polling has been paused`, {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
m.pauseEvents('system');
m.notificationManager.handle('runStateChanged', 'Hard Limit Triggered', `Hard Limit of ${this.hardLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit event polling has been paused.`, 'system', 'warn');
}
this.nannyMode = 'hard';
return;
}
let softLimitHit = false;
if (typeof this.softLimit === 'string') {
const softDur = parseDuration(this.softLimit);
softLimitHit = softDur.asSeconds() > this.apiEstDepletion.asSeconds();
} else {
softLimitHit = this.softLimit > this.client.ratelimitRemaining;
}
if (softLimitHit) {
if (this.nannyMode === 'soft') {
return;
}
this.logger.info(`Detected SOFT LIMIT of ${this.softLimit} remaining`, {leaf: 'Api Nanny'});
this.logger.info(`API Remaining: ${this.client.ratelimitRemaining} | Usage Rolling Avg: ${formatNumber(this.apiRollingAvg)}/s | Est Depletion: ${this.apiEstDepletion.humanize()} (${formatNumber(this.depletedInSecs, {toFixed: 0})} seconds)`, {leaf: 'Api Nanny'});
this.logger.info('Trying to detect heavy usage subreddits...', {leaf: 'Api Nanny'});
let threshold = 0.5;
let offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
if (offenders.length === 0) {
threshold = 0.25;
// reduce threshold
offenders = this.subManagers.filter(x => {
const combinedPerSec = x.eventsRollingAvg + x.rulesUniqueRollingAvg;
return combinedPerSec > threshold;
});
}
if (offenders.length > 0) {
this.logger.info(`Slowing subreddits using >- ${threshold}req/s:`, {leaf: 'Api Nanny'});
for (const m of offenders) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
} else {
this.logger.info(`Couldn't detect specific offenders, slowing all...`, {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
m.delayBy = 1.5;
m.logger.info(`SLOW MODE (Currently ~${formatNumber(m.eventsRollingAvg + m.rulesUniqueRollingAvg)}req/sec)`, {leaf: 'Api Nanny'});
m.notificationManager.handle('runStateChanged', 'Soft Limit Triggered', `Soft Limit of ${this.softLimit} hit (API Remaining: ${this.client.ratelimitRemaining}). Subreddit queue processing will be slowed to 1.5 seconds per.`, 'system', 'warn');
}
}
this.nannyMode = 'soft';
return
}
if (this.nannyMode !== undefined) {
this.logger.info('Turning off due to better conditions...', {leaf: 'Api Nanny'});
for (const m of this.subManagers) {
if (m.delayBy !== undefined) {
m.delayBy = undefined;
m.notificationManager.handle('runStateChanged', 'Normal Processing Resumed', 'Slow Mode has been turned off due to better API conditions', 'system');
}
if (m.queueState.state === PAUSED && m.queueState.causedBy === SYSTEM) {
m.startQueue('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
if (m.eventsState.state === PAUSED && m.eventsState.causedBy === SYSTEM) {
await m.startEvents('system', {reason: 'API Nanny has been turned off due to better API conditions'});
}
}
this.nannyMode = undefined;
}
} catch (err) {
this.logger.error('Error occurred during nanny loop', err);
this.logger.error(`Error occurred during nanny loop: ${err.message}`);
throw err;
} finally {
this.logger.info('Nanny stopped');
this.emitter.emit('nannyStopped');
}
}
}

View File

@@ -1,6 +1,7 @@
import {Check, CheckOptions, userResultCacheDefault, UserResultCacheOptions} from "./index";
import {CommentState} from "../Common/interfaces";
import {CommentState, UserResultCache} from "../Common/interfaces";
import {Submission, Comment} from "snoowrap/dist/objects";
import {RuleResult} from "../Rule";
export interface CommentCheckOptions extends CheckOptions {
cacheUserResult?: UserResultCacheOptions;
@@ -9,20 +10,12 @@ export interface CommentCheckOptions extends CheckOptions {
export class CommentCheck extends Check {
itemIs: CommentState[];
cacheUserResult: Required<UserResultCacheOptions>;
constructor(options: CommentCheckOptions) {
super(options);
const {
itemIs = [],
cacheUserResult = {},
} = options;
this.cacheUserResult = {
...userResultCacheDefault,
...cacheUserResult
}
this.itemIs = itemIs;
this.logSummary();
}
@@ -31,7 +24,7 @@ export class CommentCheck extends Check {
super.logSummary('comment');
}
async getCacheResult(item: Submission | Comment): Promise<boolean | undefined> {
async getCacheResult(item: Submission | Comment): Promise<UserResultCache | undefined> {
if (this.cacheUserResult.enable) {
return await this.resources.getCommentCheckCacheResult(item as Comment, {
name: this.name,
@@ -42,13 +35,22 @@ export class CommentCheck extends Check {
return undefined;
}
async setCacheResult(item: Submission | Comment, result: boolean): Promise<void> {
async setCacheResult(item: Submission | Comment, result: UserResultCache): Promise<void> {
if (this.cacheUserResult.enable) {
const {result: outcome, ruleResults} = result;
const res: UserResultCache = {
result: outcome,
// don't need to cache rule results if check was not triggered
// since we only use rule results for actions
ruleResults: outcome ? ruleResults : []
};
await this.resources.setCommentCheckCacheResult(item as Comment, {
name: this.name,
authorIs: this.authorIs,
itemIs: this.itemIs
}, result, this.cacheUserResult.ttl)
}, res, this.cacheUserResult.ttl)
}
}
}

View File

@@ -1,6 +1,7 @@
import {Check, CheckOptions} from "./index";
import {SubmissionState} from "../Common/interfaces";
import {SubmissionState, UserResultCache} from "../Common/interfaces";
import {Submission, Comment} from "snoowrap/dist/objects";
import {RuleResult} from "../Rule";
export class SubmissionCheck extends Check {
itemIs: SubmissionState[];
@@ -15,11 +16,4 @@ export class SubmissionCheck extends Check {
logSummary() {
super.logSummary('submission');
}
async getCacheResult(item: Submission | Comment) {
return undefined;
}
async setCacheResult(item: Submission | Comment, result: boolean) {
}
}

View File

@@ -16,12 +16,13 @@ import {
truncateStringToLength
} from "../util";
import {
ActionResult,
ChecksActivityState,
CommentState,
JoinCondition,
JoinOperands,
SubmissionState,
TypedActivityStates
TypedActivityStates, UserResultCache
} from "../Common/interfaces";
import * as RuleSchema from '../Schema/Rule.json';
import * as RuleSetSchema from '../Schema/RuleSet.json';
@@ -45,6 +46,7 @@ export abstract class Check implements ICheck {
include: AuthorCriteria[],
exclude: AuthorCriteria[]
};
cacheUserResult: Required<UserResultCacheOptions>;
dryRun?: boolean;
notifyOnTrigger: boolean;
resources: SubredditResources;
@@ -62,6 +64,7 @@ export abstract class Check implements ICheck {
actions = [],
notifyOnTrigger = false,
subredditName,
cacheUserResult = {},
itemIs = [],
authorIs: {
include = [],
@@ -88,6 +91,10 @@ export abstract class Check implements ICheck {
exclude: exclude.map(x => new Author(x)),
include: include.map(x => new Author(x)),
}
this.cacheUserResult = {
...userResultCacheDefault,
...cacheUserResult
}
this.dryRun = dryRun;
for (const r of rules) {
if (r instanceof Rule || r instanceof RuleSet) {
@@ -170,10 +177,14 @@ export abstract class Check implements ICheck {
}
}
abstract getCacheResult(item: Submission | Comment) : Promise<boolean | undefined>;
abstract setCacheResult(item: Submission | Comment, result: boolean): void;
async getCacheResult(item: Submission | Comment) : Promise<UserResultCache | undefined> {
return undefined;
}
async runRules(item: Submission | Comment, existingResults: RuleResult[] = []): Promise<[boolean, RuleResult[]]> {
async setCacheResult(item: Submission | Comment, result: UserResultCache): Promise<void> {
}
async runRules(item: Submission | Comment, existingResults: RuleResult[] = []): Promise<[boolean, RuleResult[], boolean?]> {
try {
let allRuleResults: RuleResult[] = [];
let allResults: (RuleResult | RuleSetResult)[] = [];
@@ -182,7 +193,7 @@ export abstract class Check implements ICheck {
const cacheResult = await this.getCacheResult(item);
if(cacheResult !== undefined) {
this.logger.verbose(`Skipping rules run because result was found in cache, Check Triggered Result: ${cacheResult}`);
return [cacheResult, allRuleResults];
return [cacheResult.result, cacheResult.ruleResults, true];
}
const itemPass = await this.resources.testItemCriteria(item, this.itemIs);
@@ -264,23 +275,27 @@ export abstract class Check implements ICheck {
}
}
async runActions(item: Submission | Comment, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<Action[]> {
async runActions(item: Submission | Comment, ruleResults: RuleResult[], runtimeDryrun?: boolean): Promise<ActionResult[]> {
const dr = runtimeDryrun || this.dryRun;
this.logger.debug(`${dr ? 'DRYRUN - ' : ''}Running Actions`);
const runActions: Action[] = [];
const runActions: ActionResult[] = [];
for (const a of this.actions) {
if(!a.enabled) {
runActions.push({
kind: a.getKind(),
name: a.getActionUniqueName(),
run: false,
success: false,
runReason: 'Not enabled',
dryRun: (a.dryRun || dr) || false,
});
this.logger.info(`Action ${a.getActionUniqueName()} not run because it is not enabled.`);
continue;
}
try {
await a.handle(item, ruleResults, runtimeDryrun);
runActions.push(a);
} catch (err) {
this.logger.error(`Action ${a.getActionUniqueName()} encountered an error while running`, err);
}
const res = await a.handle(item, ruleResults, runtimeDryrun);
runActions.push(res);
}
this.logger.info(`${dr ? 'DRYRUN - ' : ''}Ran Actions: ${runActions.map(x => x.getActionUniqueName()).join(' | ')}`);
this.logger.info(`${dr ? 'DRYRUN - ' : ''}Ran Actions: ${runActions.map(x => x.name).join(' | ')}`);
return runActions;
}
}
@@ -337,6 +352,7 @@ export interface CheckOptions extends ICheck {
notifyOnTrigger?: boolean
resources: SubredditResources
client: Snoowrap
cacheUserResult?: UserResultCacheOptions;
}
export interface CheckJson extends ICheck {
@@ -371,6 +387,8 @@ export interface CheckJson extends ICheck {
* @default false
* */
notifyOnTrigger?: boolean,
cacheUserResult?: UserResultCacheOptions;
}
export interface SubmissionCheckJson extends CheckJson {
@@ -388,6 +406,9 @@ export interface SubmissionCheckJson extends CheckJson {
* 3. The rule results are not likely to change while cache is valid
* */
export interface UserResultCacheOptions {
/**
* @default false
* */
enable?: boolean,
/**
* The amount of time, in seconds, to cache this result
@@ -396,17 +417,23 @@ export interface UserResultCacheOptions {
* @examples [60]
* */
ttl?: number,
/**
* In the event the cache returns a triggered result should the actions for the check also be run?
*
* @default true
* */
runActions?: boolean
}
export const userResultCacheDefault: Required<UserResultCacheOptions> = {
enable: false,
ttl: 60,
runActions: true,
}
export interface CommentCheckJson extends CheckJson {
kind: 'comment'
itemIs?: CommentState[]
cacheUserResult?: UserResultCacheOptions
}
export type CheckStructuredJson = SubmissionCheckStructuredJson | CommentCheckStructuredJson;

248
src/Common/ImageData.ts Normal file
View File

@@ -0,0 +1,248 @@
import fetch from "node-fetch";
import {Submission} from "snoowrap/dist/objects";
import {URL} from "url";
import {absPercentDifference, getSharpAsync, isValidImageURL} from "../util";
import sizeOf from "image-size";
import SimpleError from "../Utils/SimpleError";
import {Sharp} from "sharp";
import {blockhash} from "./blockhash/blockhash";
export interface ImageDataOptions {
width?: number,
height?: number,
url: string,
variants?: ImageData[]
}
class ImageData {
width?: number
height?: number
url: URL
variants: ImageData[] = []
preferredResolution?: [number, number]
sharpImg!: Sharp
hashResult!: string
actualResolution?: [number, number]
constructor(data: ImageDataOptions, aggressive = false) {
this.width = data.width;
this.height = data.height;
this.url = new URL(data.url);
if (!aggressive && !isValidImageURL(`${this.url.origin}${this.url.pathname}`)) {
throw new Error('URL did not end with a valid image extension');
}
this.variants = data.variants || [];
}
async data(format = 'raw'): Promise<Buffer> {
// @ts-ignore
return await (await this.sharp()).clone().toFormat(format).toBuffer();
}
async hash(bits: number, useVariantIfPossible = true): Promise<string> {
if(this.hashResult === undefined) {
let ref: ImageData | undefined;
if(useVariantIfPossible && this.preferredResolution !== undefined) {
ref = this.getSimilarResolutionVariant(this.preferredResolution[0], this.preferredResolution[1]);
}
if(ref === undefined) {
ref = this;
}
this.hashResult = await blockhash((await ref.sharp()).clone(), bits);
}
return this.hashResult;
}
async sharp(): Promise<Sharp> {
if (this.sharpImg === undefined) {
try {
const response = await fetch(this.url.toString())
if (response.ok) {
const ct = response.headers.get('Content-Type');
if (ct !== null && ct.includes('image')) {
const sFunc = await getSharpAsync();
// if image is animated then we want to extract the first frame and convert it to a regular image
// so we can compare two static images later (also because sharp can't use resize() on animated images)
if(['gif','webp'].some(x => ct.includes(x))) {
this.sharpImg = await sFunc(await (await sFunc(await response.buffer(), {pages: 1, animated: false})).png().toBuffer());
} else {
this.sharpImg = await sFunc(await response.buffer());
}
const meta = await this.sharpImg.metadata();
if (this.width === undefined || this.height === undefined) {
this.width = meta.width;
this.height = meta.height;
}
this.actualResolution = [meta.width as number, meta.height as number];
} else {
throw new SimpleError(`Content-Type for fetched URL ${this.url} did not contain "image"`);
}
} else {
throw new SimpleError(`URL response was not OK: (${response.status})${response.statusText}`);
}
} catch (err) {
if(!(err instanceof SimpleError)) {
throw new Error(`Error occurred while fetching response from URL: ${err.message}`);
} else {
throw err;
}
}
}
return this.sharpImg;
}
get pixels() {
if (this.actualResolution !== undefined) {
return this.actualResolution[0] * this.actualResolution[1];
}
if (this.width === undefined || this.height === undefined) {
return undefined;
}
return this.width * this.height;
}
get hasDimensions() {
return this.width !== undefined && this.height !== undefined;
}
get baseUrl() {
return `${this.url.origin}${this.url.pathname}`;
}
setPreferredResolutionByWidth(prefWidth: number) {
let height: number | undefined = undefined,
width: number | undefined = undefined;
if (this.variants.length === 0) {
return;
}
for (const v of this.variants) {
if (v.hasDimensions && (v.width as number) <= prefWidth) {
width = v.width as number;
height = v.height as number;
}
}
if (width !== undefined) {
this.preferredResolution = [width, (height as number)];
}
}
getSimilarResolutionVariant(width: number, height: number, allowablePercentDiff = 0): ImageData | undefined {
if (this.variants.length === 0) {
return undefined;
}
return this.variants.find(x => {
return x.hasDimensions && (absPercentDifference(width, x.width as number) <= allowablePercentDiff) && (absPercentDifference(height, x.height as number) <= allowablePercentDiff);
});
}
isSameDimensions(otherImage: ImageData) {
if (!this.hasDimensions || !otherImage.hasDimensions) {
return false;
}
return this.width === otherImage.width && this.height === otherImage.height;
}
async sameAspectRatio(otherImage: ImageData) {
let thisRes = this.actualResolution;
let otherRes = otherImage.actualResolution;
if(thisRes === undefined) {
const tMeta = await (await this.sharp()).metadata();
const thisMeta = {width: tMeta.width as number, height: tMeta.height as number };
this.actualResolution = [thisMeta.width, thisMeta.height];
thisRes = this.actualResolution;
}
if(otherRes === undefined) {
const otherMeta = await (await otherImage.sharp()).metadata();
otherRes = [otherMeta.width as number, otherMeta.height as number];
}
const thisRatio = thisRes[0] / thisRes[1];
const otherRatio = otherRes[0] / otherRes[1];
// a little leeway
return Math.abs(thisRatio - otherRatio) < 0.1;
}
static async dimensionsFromMetadata(img: Sharp) {
const {width, height, ...rest} = await img.metadata();
return {width: width as number, height: height as number};
}
async normalizeImagesForComparison(compareLibrary: ('pixel' | 'resemble'), imgToCompare: ImageData): Promise<[Sharp, Sharp, number, number]> {
const sFunc = await getSharpAsync();
let refImage = this as ImageData;
let compareImage = imgToCompare;
if (this.preferredResolution !== undefined) {
const matchingVariant = compareImage.getSimilarResolutionVariant(this.preferredResolution[0], this.preferredResolution[1]);
if (matchingVariant !== undefined) {
compareImage = matchingVariant;
refImage = this.getSimilarResolutionVariant(this.preferredResolution[0], this.preferredResolution[1]) as ImageData;
}
}
let refSharp = (await refImage.sharp()).clone();
let refMeta = await ImageData.dimensionsFromMetadata(refSharp);
let compareSharp = (await compareImage.sharp()).clone();
let compareMeta = await ImageData.dimensionsFromMetadata(compareSharp);
// if dimensions on not the same we need to crop or resize before final resize
if (refMeta.width !== compareMeta.width || refMeta.height !== compareMeta.height) {
const thisRatio = refMeta.width / (refMeta.height);
const otherRatio = compareMeta.width / compareMeta.height;
const sameRatio = Math.abs(thisRatio - otherRatio) < 0.04;
if (sameRatio) {
// then resize first since its most likely the same image
// can be fairly sure a downscale will get pixels close to the same
if (refMeta.width > compareMeta.width) {
refSharp = sFunc(await refSharp.resize(compareMeta.width, null, {fit: 'outside'}).toBuffer());
} else {
compareSharp = sFunc(await compareSharp.resize(refMeta.width, null, {fit: 'outside'}).toBuffer());
}
refMeta = await ImageData.dimensionsFromMetadata(refSharp);
compareMeta = await ImageData.dimensionsFromMetadata(compareSharp);
}
// find smallest common dimensions
const sWidth = refMeta.width <= compareMeta.width ? refMeta.width : compareMeta.width;
const sHeight = refMeta.height <= compareMeta.height ? refMeta.height : compareMeta.height;
// crop if necessary
if(sWidth !== refMeta.width || sHeight !== refMeta.height) {
refSharp = sFunc(await refSharp.extract({left: 0, top: 0, width: sWidth, height: sHeight}).toBuffer());
}
if(sWidth !== compareMeta.width || sHeight !== compareMeta.height) {
compareSharp = sFunc(await compareSharp.extract({left: 0, top: 0, width: sWidth, height: sHeight}).toBuffer());
}
}
// final resize to reduce memory/cpu usage during comparison
refSharp = sFunc(await refSharp.resize(400, null, {fit: 'outside'}).toBuffer());
compareSharp = sFunc(await compareSharp.resize(400, null, {fit: 'outside'}).toBuffer());
const {width, height} = await ImageData.dimensionsFromMetadata(refSharp);
return [refSharp, compareSharp, width, height];
}
static fromSubmission(sub: Submission, aggressive = false): ImageData {
const url = new URL(sub.url);
const data: any = {
url,
};
let variants = [];
if (sub.preview !== undefined && sub.preview.enabled && sub.preview.images.length > 0) {
const firstImg = sub.preview.images[0];
const ref = sub.preview.images[0].source;
data.width = ref.width;
data.height = ref.height;
variants = firstImg.resolutions.map(x => new ImageData(x));
data.variants = variants;
}
return new ImageData(data, aggressive);
}
}
export default ImageData;

View File

@@ -0,0 +1,234 @@
// Perceptual image hash calculation tool based on algorithm descibed in
// Block Mean Value Based Image Perceptual Hashing by Bian Yang, Fan Gu and Xiamu Niu
//
// Copyright 2014 Commons Machinery http://commonsmachinery.se/
// Distributed under an MIT license, please see LICENSE in the top dir.
// https://github.com/commonsmachinery/blockhash-js/blob/master/index.js
import {Sharp} from "sharp";
interface BlockImageData {
data: Buffer,
width: number,
height: number
}
var one_bits = [0, 1, 1, 2, 1, 2, 2, 3, 1, 2, 2, 3, 2, 3, 3, 4];
/* Calculate the hamming distance for two hashes in hex format */
export const hammingDistance = (hash1: string, hash2: string) => {
var d = 0;
var i;
if (hash1.length !== hash2.length) {
throw new Error("Can't compare hashes with different length");
}
for (i = 0; i < hash1.length; i++) {
var n1 = parseInt(hash1[i], 16);
var n2 = parseInt(hash2[i], 16);
d += one_bits[n1 ^ n2];
}
return d;
};
var median = function(data: number[]) {
var mdarr = data.slice(0);
mdarr.sort(function(a, b) { return a-b; });
if (mdarr.length % 2 === 0) {
return (mdarr[mdarr.length/2 - 1] + mdarr[mdarr.length/2]) / 2.0;
}
return mdarr[Math.floor(mdarr.length/2)];
};
var translate_blocks_to_bits = function(blocks: number[], pixels_per_block: number) {
var half_block_value = pixels_per_block * 256 * 3 / 2;
var bandsize = blocks.length / 4;
// Compare medians across four horizontal bands
for (var i = 0; i < 4; i++) {
var m = median(blocks.slice(i * bandsize, (i + 1) * bandsize));
for (var j = i * bandsize; j < (i + 1) * bandsize; j++) {
var v = blocks[j];
// Output a 1 if the block is brighter than the median.
// With images dominated by black or white, the median may
// end up being 0 or the max value, and thus having a lot
// of blocks of value equal to the median. To avoid
// generating hashes of all zeros or ones, in that case output
// 0 if the median is in the lower value space, 1 otherwise
blocks[j] = Number(v > m || (Math.abs(v - m) < 1 && m > half_block_value));
}
}
};
var bits_to_hexhash = function(bitsArray: number[]) {
var hex = [];
for (var i = 0; i < bitsArray.length; i += 4) {
var nibble = bitsArray.slice(i, i + 4);
hex.push(parseInt(nibble.join(''), 2).toString(16));
}
return hex.join('');
};
var bmvbhash_even = function(data: BlockImageData, bits: number) {
var blocksize_x = Math.floor(data.width / bits);
var blocksize_y = Math.floor(data.height / bits);
var result = [];
for (var y = 0; y < bits; y++) {
for (var x = 0; x < bits; x++) {
var total = 0;
for (var iy = 0; iy < blocksize_y; iy++) {
for (var ix = 0; ix < blocksize_x; ix++) {
var cx = x * blocksize_x + ix;
var cy = y * blocksize_y + iy;
var ii = (cy * data.width + cx) * 4;
var alpha = data.data[ii+3];
if (alpha === 0) {
total += 765;
} else {
total += data.data[ii] + data.data[ii+1] + data.data[ii+2];
}
}
}
result.push(total);
}
}
translate_blocks_to_bits(result, blocksize_x * blocksize_y);
return bits_to_hexhash(result);
};
var bmvbhash = function(data: BlockImageData, bits: number) {
var result = [];
var i, j, x, y;
var block_width, block_height;
var weight_top, weight_bottom, weight_left, weight_right;
var block_top, block_bottom, block_left, block_right;
var y_mod, y_frac, y_int;
var x_mod, x_frac, x_int;
var blocks: number[][] = [];
var even_x = data.width % bits === 0;
var even_y = data.height % bits === 0;
if (even_x && even_y) {
return bmvbhash_even(data, bits);
}
// initialize blocks array with 0s
for (i = 0; i < bits; i++) {
blocks.push([]);
for (j = 0; j < bits; j++) {
blocks[i].push(0);
}
}
block_width = data.width / bits;
block_height = data.height / bits;
for (y = 0; y < data.height; y++) {
if (even_y) {
// don't bother dividing y, if the size evenly divides by bits
block_top = block_bottom = Math.floor(y / block_height);
weight_top = 1;
weight_bottom = 0;
} else {
y_mod = (y + 1) % block_height;
y_frac = y_mod - Math.floor(y_mod);
y_int = y_mod - y_frac;
weight_top = (1 - y_frac);
weight_bottom = (y_frac);
// y_int will be 0 on bottom/right borders and on block boundaries
if (y_int > 0 || (y + 1) === data.height) {
block_top = block_bottom = Math.floor(y / block_height);
} else {
block_top = Math.floor(y / block_height);
block_bottom = Math.ceil(y / block_height);
}
}
for (x = 0; x < data.width; x++) {
var ii = (y * data.width + x) * 4;
var avgvalue, alpha = data.data[ii+3];
if (alpha === 0) {
avgvalue = 765;
} else {
avgvalue = data.data[ii] + data.data[ii+1] + data.data[ii+2];
}
if (even_x) {
block_left = block_right = Math.floor(x / block_width);
weight_left = 1;
weight_right = 0;
} else {
x_mod = (x + 1) % block_width;
x_frac = x_mod - Math.floor(x_mod);
x_int = x_mod - x_frac;
weight_left = (1 - x_frac);
weight_right = x_frac;
// x_int will be 0 on bottom/right borders and on block boundaries
if (x_int > 0 || (x + 1) === data.width) {
block_left = block_right = Math.floor(x / block_width);
} else {
block_left = Math.floor(x / block_width);
block_right = Math.ceil(x / block_width);
}
}
// add weighted pixel value to relevant blocks
blocks[block_top][block_left] += avgvalue * weight_top * weight_left;
blocks[block_top][block_right] += avgvalue * weight_top * weight_right;
blocks[block_bottom][block_left] += avgvalue * weight_bottom * weight_left;
blocks[block_bottom][block_right] += avgvalue * weight_bottom * weight_right;
}
}
for (i = 0; i < bits; i++) {
for (j = 0; j < bits; j++) {
result.push(blocks[i][j]);
}
}
translate_blocks_to_bits(result, block_width * block_height);
return bits_to_hexhash(result);
};
var blockhashData = function(imgData: BlockImageData, bits: number, method: number) {
var hash;
if (method === 1) {
hash = bmvbhash_even(imgData, bits);
}
else if (method === 2) {
hash = bmvbhash(imgData, bits);
}
else {
throw new Error("Bad hashing method");
}
return hash;
};
export const blockhash = async function(src: Sharp, bits: number, method: number = 2): Promise<string> {
const {data: buff, info} = await src.ensureAlpha().raw().toBuffer({resolveWithObject: true});
return blockhashData({
width: info.width,
height: info.height,
data: buff,
}, bits, method);
};

View File

@@ -1,2 +1,31 @@
import {HistoricalStats} from "./interfaces";
export const cacheOptDefaults = {ttl: 60, max: 500, checkPeriod: 600};
export const cacheTTLDefaults = {authorTTL: 60, userNotesTTL: 300, wikiTTL: 300, submissionTTL: 60, commentTTL: 60, filterCriteriaTTL: 60};
export const cacheTTLDefaults = {authorTTL: 60, userNotesTTL: 300, wikiTTL: 300, submissionTTL: 60, commentTTL: 60, filterCriteriaTTL: 60, subredditTTL: 600};
export const historicalDefaults: HistoricalStats = {
eventsCheckedTotal: 0,
eventsActionedTotal: 0,
checksRun: new Map(),
checksFromCache: new Map(),
checksTriggered: new Map(),
rulesRun: new Map(),
//rulesCached: new Map(),
rulesCachedTotal: 0,
rulesTriggered: new Map(),
actionsRun: new Map(),
}
export const createHistoricalDefaults = (): HistoricalStats => {
return {
eventsCheckedTotal: 0,
eventsActionedTotal: 0,
checksRun: new Map(),
checksFromCache: new Map(),
checksTriggered: new Map(),
rulesRun: new Map(),
//rulesCached: new Map(),
rulesCachedTotal: 0,
rulesTriggered: new Map(),
actionsRun: new Map(),
};
}

View File

@@ -3,6 +3,8 @@ import {Cache} from 'cache-manager';
import {MESSAGE} from 'triple-beam';
import Poll from "snoostorm/out/util/Poll";
import Snoowrap from "snoowrap";
import {RuleResult} from "../Rule";
import {IncomingMessage} from "http";
/**
* An ISO 8601 Duration
@@ -222,6 +224,186 @@ export interface ReferenceSubmission {
useSubmissionAsReference?: boolean,
}
/**
* When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.
*
* **Note:** This is an **experimental feature**
* */
export interface ImageDetection {
/**
* Is image detection enabled?
* */
enable?: boolean
/**
* Determines how and when to check if a URL is an image
*
* **Note:** After fetching a URL the **Content-Type** is validated to contain `image` before detection occurs
*
* **When `extension`:** (default)
*
* * Only URLs that end in known image extensions (.png, .jpg, etc...) are fetched
*
* **When `unknown`:**
*
* * URLs that end in known image extensions (.png, .jpg, etc...) are fetched
* * URLs with no extension or unknown (IE non-video, non-doc, etc...) are fetched
*
* **When `all`:**
*
* * All submissions that have URLs (non-self) will be fetched, regardless of extension
* * **Note:** This can be bandwidth/CPU intensive if history window is large so use with care
*
* @default "extension"
* */
fetchBehavior?: 'extension' | 'unknown' | 'all',
/**
* The percentage, as a whole number, of difference between two images at which point they will not be considered the same.
*
* Will be used as `hash.hardThreshold` and `pixel.threshold` if those values are not specified
*
* Default is `5`
*
* @default 5
* */
threshold?: number
/**
* Use perceptual hashing (blockhash-js) to compare images
*
* Pros:
*
* * very fast
* * low cpu/memory usage
* * results can be cached
*
* Cons:
*
* * not as accurate as pixel comparison
* * weaker for text-heavy images
* * mostly color-blind
*
* Best uses:
*
* * Detecting (general) duplicate images
* * Comparing large number of images
* */
hash?: {
/**
* Enabled by default.
*
* If both `hash` and `pixel` are enabled then `pixel` will be used to verify image comparison when hashes matches
*
* @default true
* */
enable?: boolean
/**
* Bit count determines accuracy of hash and granularity of hash comparison (comparison to other hashes)
*
* Default is `32`
*
* **NOTE:** Hashes of different sizes (bits) cannot be compared. If you are caching results make sure all rules where results may be shared use the same bit count to ensure hashes can be compared. Otherwise hashes will be recomputed.
*
* @default 32
* */
bits?: number
/**
* Number of seconds to cache image hash
* */
ttl?: number
/**
* High Confidence Threshold
*
* If the difference in comparison is equal to or less than this number the images are considered the same and pixel comparison WILL NOT occur
*
* Defaults to the parent-level `threshold` value if not present
*
* Use `null` if you want pixel comparison to ALWAYS occur (softThreshold must be present)
* */
hardThreshold?: number | null
/**
* Low Confidence Threshold -- only used if `pixel` is enabled
*
* If the difference in comparison is
*
* 1) equal to or less than this value and
* 2) the value is greater than `hardThreshold`
*
* the images will be compared using the `pixel` method
* */
softThreshold?: number
}
/**
* Use pixel counting to compare images
*
* Pros:
*
* * most accurate
* * strong with text or color-only changes
*
* Cons:
*
* * much slower than hashing
* * memory/cpu intensive
*
* Best uses:
*
* * Comparison text-only images
* * Comparison requires high degree of accuracy or changes are subtle
* */
pixel?: {
/**
* Disabled by default.
*
* @default false
* */
enable?: boolean
/**
* The percentage, as a whole number, of pixels that are **different** between the two images at which point the images are not considered the same.
* */
threshold?: number
}
}
export interface StrongImageDetection {
enable: boolean,
fetchBehavior: 'extension' | 'unknown' | 'all'
threshold: number,
hash: {
enable: boolean
bits: number
ttl?: number
hardThreshold: number | null
softThreshold?: number
}
pixel: {
enable: boolean
threshold: number
}
}
// export interface ImageData {
// data: Promise<Buffer>,
// buf?: Buffer,
// width: number,
// height: number
// pixels?: number
// url: string
// variants?: ImageData[]
// }
export interface ImageComparisonResult {
isSameDimensions: boolean
dimensionDifference: {
width: number;
height: number;
};
misMatchPercentage: number;
analysisTime: number;
}
export interface RichContent {
/**
* The Content to submit for this Action. Content is interpreted as reddit-flavored Markdown.
@@ -304,6 +486,38 @@ export type PollOn = 'unmoderated' | 'modqueue' | 'newSub' | 'newComm';
export interface PollingOptionsStrong extends PollingOptions {
limit: number,
interval: number,
clearProcessed: ClearProcessedOptions
}
/**
* For very long-running, high-volume subreddits clearing the list of processed activities helps manage memory bloat
*
* All of these options have default values based on the limit and/or interval set for polling options on each subreddit stream. They only need to modified if the defaults are not sufficient.
*
* If both `after` and `size` are defined whichever is hit first will trigger the list to clear. `after` will be reset after ever clear.
* */
export interface ClearProcessedOptions {
/**
* An interval the processed list should be cleared after.
*
* * EX `9 days`
* * EX `3 months`
* * EX `5 minutes`
* @pattern ^\s*(?<time>\d+)\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\s*$
* */
after?: string,
/**
* Number of activities found in processed list after which the list should be cleared.
*
* Defaults to the `limit` value from `PollingOptions`
* */
size?: number,
/**
* The number of activities to retain in processed list after clearing.
*
* Defaults to `limit` value from `PollingOptions`
* */
retain?: number,
}
export interface PollingDefaults {
@@ -377,53 +591,122 @@ export interface PollingOptions extends PollingDefaults {
*
* */
pollOn: 'unmoderated' | 'modqueue' | 'newSub' | 'newComm'
clearProcessed?: ClearProcessedOptions
}
export interface TTLConfig {
/**
* Amount of time, in seconds, author activities (Comments/Submission) should be cached
* Amount of time, in seconds, author activity history (Comments/Submission) should be cached
*
* * If `0` or `true` will cache indefinitely (not recommended)
* * If `false` will not cache
*
* * ENV => `AUTHOR_TTL`
* * ARG => `--authorTTL <sec>`
* @examples [60]
* @default 60
* */
authorTTL?: number;
authorTTL?: number | boolean;
/**
* Amount of time, in seconds, wiki content pages should be cached
*
* * If `0` or `true` will cache indefinitely (not recommended)
* * If `false` will not cache
*
* @examples [300]
* @default 300
* */
wikiTTL?: number;
wikiTTL?: number | boolean;
/**
* Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
* Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
*
* * If `0` or `true` will cache indefinitely (not recommended)
* * If `false` will not cache
*
* @examples [300]
* @default 300
* */
userNotesTTL?: number;
userNotesTTL?: number | boolean;
/**
* Amount of time, in seconds, a submission should be cached
*
* * If `0` or `true` will cache indefinitely (not recommended)
* * If `false` will not cache
*
* @examples [60]
* @default 60
* */
submissionTTL?: number;
submissionTTL?: number | boolean;
/**
* Amount of time, in seconds, a comment should be cached
*
* * If `0` or `true` will cache indefinitely (not recommended)
* * If `false` will not cache
*
* @examples [60]
* @default 60
* */
commentTTL?: number;
commentTTL?: number | boolean;
/**
* Amount of time, in seconds, a subreddit (attributes) should be cached
*
* * If `0` or `true` will cache indefinitely (not recommended)
* * If `false` will not cache
*
* @examples [600]
* @default 600
* */
subredditTTL?: number | boolean;
/**
* Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)
*
* This is especially useful if when polling high-volume comments and your checks rely on author/item filters
*
* * If `0` or `true` will cache indefinitely (not recommended)
* * If `false` will not cache
*
* @examples [60]
* @default 60
* */
filterCriteriaTTL?: number;
filterCriteriaTTL?: number | boolean;
}
export interface SubredditCacheConfig extends TTLConfig {
export interface CacheConfig extends TTLConfig {
/**
* The cache provider and, optionally, a custom configuration for that provider
*
* If not present or `null` provider will be `memory`.
*
* To specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`
* */
provider?: CacheProvider | CacheOptions
/**
* The **maximum** number of Events that the cache should store triggered result summaries for
*
* These summaries are viewable through the Web UI.
*
* The value specified by a subreddit cannot be larger than the value set by the Operator for the global/bot config (if set)
*
* @default 25
* @example [25]
* */
actionedEventsMax?: number
}
export interface OperatorCacheConfig extends CacheConfig {
/**
* The **default** number of Events that the cache will store triggered result summaries for
*
* These summaries are viewable through the Web UI.
*
* The value specified cannot be larger than `actionedEventsMax` for the global/bot config (if set)
*
* @default 25
* @example [25]
* */
actionedEventsDefault?: number
}
export interface Footer {
@@ -503,7 +786,7 @@ export interface ManagerOptions {
/**
* Per-subreddit config for caching TTL values. If set to `false` caching is disabled.
* */
caching?: SubredditCacheConfig
caching?: CacheConfig
/**
* Use this option to override the `dryRun` setting for all `Checks`
@@ -562,6 +845,22 @@ export interface ManagerOptions {
* */
export type CompareValue = string;
/**
* A duration and how to compare it against a value
*
* The syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`
*
* * EX `> 100 days` => Passes if the date being compared is before 100 days ago
* * EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months
*
* Unit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)
*
* [See] https://regexr.com/609n8 for example
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\s*$
* */
export type DurationComparor = string;
/**
* A string containing a comparison operator and a value to compare against
*
@@ -607,6 +906,9 @@ export interface ActivityState {
stickied?: boolean
distinguished?: boolean
approved?: boolean
score?: CompareValue
reports?: CompareValue
age?: DurationComparor
}
/**
@@ -630,6 +932,9 @@ export interface SubmissionState extends ActivityState {
link_flair_css_class?: string
}
// properties calculated/derived by CM -- not provided as plain values by reddit
export const cmActivityProperties = ['submissionState','score','reports','removed','deleted','filtered','age','title'];
/**
* Different attributes a `Comment` can be in. Only include a property if you want to check it.
* @examples [{"op": true, "removed": false}]
@@ -645,6 +950,41 @@ export interface CommentState extends ActivityState {
submissionState?: SubmissionState[]
}
/**
* Different attributes a `Subreddit` can be in. Only include a property if you want to check it.
* @examples [{"over18": true}]
* */
export interface SubredditState {
/**
* Is subreddit quarantined?
* */
quarantine?: boolean
/**
* Is subreddit NSFW/over 18?
*
* **Note**: This is **mod-controlled flag** so it is up to the mods of the subreddit to correctly mark their subreddit as NSFW
* */
over18?: boolean
/**
* The name the subreddit.
*
* Can be a normal string (will check case-insensitive) or a regular expression
*
* EX `["mealtimevideos", "/onlyfans*\/i"]`
*
* @examples ["mealtimevideos", "/onlyfans*\/i"]
* */
name?: string | RegExp
/**
* A friendly description of what this State is trying to parse
* */
stateDescription?: string
}
export interface StrongSubredditState extends SubredditState {
name?: RegExp
}
export type TypedActivityStates = SubmissionState[] | CommentState[];
export interface DomainInfo {
@@ -692,13 +1032,16 @@ export type CacheProvider = 'memory' | 'redis' | 'none';
// provider: CacheOptions
// }
export type StrongCache = {
authorTTL: number,
userNotesTTL: number,
wikiTTL: number,
submissionTTL: number,
commentTTL: number,
filterCriteriaTTL: number,
authorTTL: number | boolean,
userNotesTTL: number | boolean,
wikiTTL: number | boolean,
submissionTTL: number | boolean,
commentTTL: number | boolean,
subredditTTL: number | boolean,
filterCriteriaTTL: number | boolean,
provider: CacheOptions
actionedEventsMax?: number,
actionedEventsDefault: number,
}
/**
@@ -757,6 +1100,8 @@ export interface CacheOptions {
* @examples [500]
* */
max?: number
[key:string]: any
}
export type NotificationProvider = 'discord';
@@ -1034,6 +1379,13 @@ export interface BotInstanceJsonConfig {
* @default false
* */
sharedMod?: boolean,
/**
* If sharing a mod stream stagger pushing relevant Activities to individual subreddits.
*
* Useful when running many subreddits and rules are potentially cpu/memory/traffic heavy -- allows spreading out load
* */
stagger?: number,
},
/**
* Settings related to default configurations for queue behavior for subreddits
@@ -1053,60 +1405,11 @@ export interface BotInstanceJsonConfig {
}
/**
* Settings to configure the default caching behavior for each suberddit
* Settings to configure the default caching behavior for this bot
*
* Every setting not specified will default to what is specified by the global operator caching config
* */
caching?: {
/**
* Amount of time, in seconds, author activity history (Comments/Submission) should be cached
*
* * ENV => `AUTHOR_TTL`
* * ARG => `--authorTTL <sec>`
* @examples [60]
* @default 60
* */
authorTTL?: number;
/**
* Amount of time, in seconds, wiki content pages should be cached
* @examples [300]
* @default 300
* */
wikiTTL?: number;
/**
* Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached
* @examples [300]
* @default 300
* */
userNotesTTL?: number;
/**
* Amount of time, in seconds, a submission should be cached
* @examples [60]
* @default 60
* */
submissionTTL?: number;
/**
* Amount of time, in seconds, a comment should be cached
* @examples [60]
* @default 60
* */
commentTTL?: number;
/**
* Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)
*
* This is especially useful if when polling high-volume comments and your checks rely on author/item filters
*
* @examples [60]
* @default 60
* */
filterCriteriaTTL?: number;
/**
* The cache provider and, optionally, a custom configuration for that provider
*
* If not present or `null` provider will be `memory`.
*
* To specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`
* */
provider?: CacheProvider | CacheOptions
}
caching?: OperatorCacheConfig
/**
* Settings related to managing heavy API usage.
* */
@@ -1214,6 +1517,13 @@ export interface OperatorJsonConfig {
path?: string,
},
/**
* Settings to configure the default caching behavior globally
*
* These settings will be used by each bot, and subreddit, that does not specify their own
* */
caching?: OperatorCacheConfig
bots?: BotInstanceJsonConfig[]
/**
@@ -1230,23 +1540,30 @@ export interface OperatorJsonConfig {
* @examples [8085]
* */
port?: number,
/**
* Caching provider to use for session and invite data
*
* If none is provided the top-level caching provider is used
* */
caching?: 'memory' | 'redis' | CacheOptions
/**
* Settings to configure the behavior of user sessions -- the session is what the web interface uses to identify logged in users.
* */
session?: {
/**
* The cache provider to use.
* Number of seconds a session should be valid for.
*
* The default should be sufficient for almost all use cases
* Default is 1 day
*
* @default "memory"
* @examples ["memory"]
* @default 86400
* @examples [86400]
* */
provider?: 'memory' | 'redis' | CacheOptions,
maxAge?: number
/**
* The secret value used to encrypt session data
*
* If provider is persistent (redis) specifying a value here will ensure sessions are valid between application restarts
* If provider is persistent (`redis`) specifying a value here will ensure sessions are valid between application restarts
*
* When not present or `null` a random string is generated on application start
*
@@ -1254,6 +1571,21 @@ export interface OperatorJsonConfig {
* */
secret?: string,
}
/**
* Settings related to oauth flow invites
* */
invites?: {
/**
* Number of seconds an invite should be valid for
*
* If `0` or not specified (default) invites do not expire
*
* @default 0
* @examples [0]
* */
maxAge?: number
}
/**
* The default log level to filter to in the web interface
*
@@ -1340,6 +1672,7 @@ export interface BotInstanceConfig extends BotInstanceJsonConfig {
},
polling: {
sharedMod: boolean,
stagger?: number,
limit: number,
interval: number,
},
@@ -1364,12 +1697,17 @@ export interface OperatorConfig extends OperatorJsonConfig {
level: LogLevel,
path?: string,
},
caching: StrongCache,
web: {
port: number,
caching: CacheOptions,
session: {
provider: CacheOptions,
maxAge: number,
secret: string,
}
},
invites: {
maxAge: number
},
logLevel?: LogLevel,
maxLogs: number,
clients: BotConnection[]
@@ -1410,3 +1748,133 @@ export interface LogInfo {
labels?: string[]
bot?: string
}
export interface ActionResult {
kind: string,
name: string,
run: boolean,
runReason?: string,
dryRun: boolean,
success: boolean,
result?: string,
}
export interface ActionProcessResult {
success: boolean,
dryRun: boolean,
result?: string
}
export interface ActionedEvent {
activity: {
peek: string
link: string
}
author: string
timestamp: number
check: string
ruleSummary: string,
subreddit: string,
ruleResults: RuleResult[]
actionResults: ActionResult[]
}
export interface UserResultCache {
result: boolean,
ruleResults: RuleResult[]
}
export type RedditEntityType = 'user' | 'subreddit';
export interface RedditEntity {
name: string
type: RedditEntityType
}
export interface StatusCodeError extends Error {
name: 'StatusCodeError',
statusCode: number,
message: string,
response: IncomingMessage,
error: Error
}
export interface HistoricalStatsDisplay extends HistoricalStats {
checksRunTotal: number
checksFromCacheTotal: number
checksTriggeredTotal: number
rulesRunTotal: number
rulesCachedTotal: number
rulesTriggeredTotal: number
actionsRunTotal: number
}
export interface HistoricalStats {
eventsCheckedTotal: number
eventsActionedTotal: number
checksRun: Map<string, number>
checksFromCache: Map<string, number>
checksTriggered: Map<string, number>
rulesRun: Map<string, number>
//rulesCached: Map<string, number>
rulesCachedTotal: number
rulesTriggered: Map<string, number>
actionsRun: Map<string, number>
[index: string]: any
}
export interface SubredditHistoricalStats {
allTime: HistoricalStats
lastReload: HistoricalStats
}
export interface SubredditHistoricalStatsDisplay {
allTime: HistoricalStatsDisplay
lastReload: HistoricalStatsDisplay
}
export interface ManagerStats {
// eventsCheckedTotal: number
// eventsCheckedSinceStartTotal: number
eventsAvg: number
// checksRunTotal: number
// checksRunSinceStartTotal: number
// checksTriggered: number
// checksTriggeredTotal: number
// checksTriggeredSinceStart: number
// checksTriggeredSinceStartTotal: number
// rulesRunTotal: number
// rulesRunSinceStartTotal: number
// rulesCachedTotal: number
// rulesCachedSinceStartTotal: number
// rulesTriggeredTotal: number
// rulesTriggeredSinceStartTotal: number
rulesAvg: number
// actionsRun: number
// actionsRunTotal: number
// actionsRunSinceStart: number,
// actionsRunSinceStartTotal: number
historical: SubredditHistoricalStatsDisplay
cache: {
provider: string,
currentKeyCount: number,
isShared: boolean,
totalRequests: number,
totalMiss: number,
missPercent: string,
requestRate: number,
types: ResourceStats
},
}
export interface HistoricalStatUpdateData {
eventsCheckedTotal?: number
eventsActionedTotal?: number
checksRun: string[] | string
checksTriggered: string[] | string
checksFromCache: string[] | string
actionsRun: string[] | string
rulesRun: string[] | string
rulesCachedTotal: number
rulesTriggered: string[] | string
}

View File

@@ -1,12 +1,12 @@
import {Logger} from "winston";
import {
buildCacheOptionsFromProvider,
buildCacheOptionsFromProvider, buildCachePrefix,
createAjvFactory,
mergeArr,
normalizeName,
overwriteMerge,
parseBool, randomId,
readJson,
readConfigFile,
removeUndefinedKeys
} from "./util";
import {CommentCheck} from "./Check/CommentCheck";
@@ -43,6 +43,7 @@ import {operatorConfig} from "./Utils/CommandConfig";
import merge from 'deepmerge';
import * as process from "process";
import {cacheOptDefaults, cacheTTLDefaults} from "./Common/defaults";
import objectHash from "object-hash";
export interface ConfigBuilderOptions {
logger: Logger,
@@ -141,15 +142,30 @@ export const buildPollingOptions = (values: (string | PollingOptions)[]): Pollin
let opts: PollingOptionsStrong[] = [];
for (const v of values) {
if (typeof v === 'string') {
opts.push({pollOn: v as PollOn, interval: DEFAULT_POLLING_INTERVAL, limit: DEFAULT_POLLING_LIMIT});
opts.push({
pollOn: v as PollOn,
interval: DEFAULT_POLLING_INTERVAL,
limit: DEFAULT_POLLING_LIMIT,
clearProcessed: {
size: DEFAULT_POLLING_LIMIT,
retain: DEFAULT_POLLING_LIMIT,
}
});
} else {
const {
pollOn: p,
interval = DEFAULT_POLLING_INTERVAL,
limit = DEFAULT_POLLING_LIMIT,
delayUntil,
clearProcessed = {size: limit, retain: limit},
} = v;
opts.push({pollOn: p as PollOn, interval, limit, delayUntil});
opts.push({
pollOn: p as PollOn,
interval,
limit,
delayUntil,
clearProcessed
});
}
}
return opts;
@@ -291,10 +307,6 @@ export const parseDefaultBotInstanceFromArgs = (args: any): BotInstanceJsonConfi
polling: {
sharedMod,
},
caching: {
provider: caching,
authorTTL
},
nanny: {
softLimit,
hardLimit
@@ -316,6 +328,8 @@ export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
sessionSecret,
web,
mode,
caching,
authorTTL,
} = args || {};
const data = {
@@ -328,6 +342,10 @@ export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
level: logLevel,
path: logDir === true ? `${process.cwd()}/logs` : undefined,
},
caching: {
provider: caching,
authorTTL
},
web: {
enabled: web,
port,
@@ -345,9 +363,9 @@ export const parseOpConfigFromArgs = (args: any): OperatorJsonConfig => {
return removeUndefinedKeys(data) as OperatorJsonConfig;
}
const parseListFromEnv = (val: string|undefined) => {
const parseListFromEnv = (val: string | undefined) => {
let listVals: undefined | string[];
if(val === undefined) {
if (val === undefined) {
return listVals;
}
const trimmedVal = val.trim();
@@ -387,13 +405,6 @@ export const parseDefaultBotInstanceFromEnv = (): BotInstanceJsonConfig => {
polling: {
sharedMod: parseBool(process.env.SHARE_MOD),
},
caching: {
provider: {
// @ts-ignore
store: process.env.CACHING as (CacheProvider | undefined)
},
authorTTL: process.env.AUTHOR_TTL !== undefined ? parseInt(process.env.AUTHOR_TTL) : undefined
},
nanny: {
softLimit: process.env.SOFT_LIMIT !== undefined ? parseInt(process.env.SOFT_LIMIT) : undefined,
hardLimit: process.env.HARD_LIMIT !== undefined ? parseInt(process.env.HARD_LIMIT) : undefined
@@ -405,7 +416,7 @@ export const parseDefaultBotInstanceFromEnv = (): BotInstanceJsonConfig => {
export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
const data = {
mode: process.env.MODE !== undefined ? process.env.MODE as ('all' | 'server' | 'client') : undefined,
operator: {
operator: {
name: parseListFromEnv(process.env.OPERATOR),
display: process.env.OPERATOR_DISPLAY
},
@@ -414,6 +425,13 @@ export const parseOpConfigFromEnv = (): OperatorJsonConfig => {
level: process.env.LOG_LEVEL,
path: process.env.LOG_DIR === 'true' ? `${process.cwd()}/logs` : undefined,
},
caching: {
provider: {
// @ts-ignore
store: process.env.CACHING as (CacheProvider | undefined)
},
authorTTL: process.env.AUTHOR_TTL !== undefined ? parseInt(process.env.AUTHOR_TTL) : undefined
},
web: {
port: process.env.PORT !== undefined ? parseInt(process.env.PORT) : undefined,
session: {
@@ -473,7 +491,7 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
if (operatorConfig !== undefined) {
let rawConfig;
try {
rawConfig = await readJson(operatorConfig, {log: initLogger});
rawConfig = await readConfigFile(operatorConfig, {log: initLogger}) as object;
} catch (err) {
initLogger.error('Cannot continue app startup because operator config file was not parseable.');
err.logged = true;
@@ -493,21 +511,25 @@ export const parseOperatorConfigFromSources = async (args: any): Promise<Operato
const defaultBotInstanceFromEnv = parseDefaultBotInstanceFromEnv();
const {bots: botInstancesFromFile = [], ...restConfigFile} = configFromFile;
const defaultBotInstance = merge.all([defaultBotInstanceFromEnv, defaultBotInstanceFromArgs], {
const mergedConfig = merge.all([opConfigFromEnv, restConfigFile, opConfigFromArgs], {
arrayMerge: overwriteMerge,
});
const defaultBotInstance = merge.all([defaultBotInstanceFromEnv, defaultBotInstanceFromArgs], {
arrayMerge: overwriteMerge,
}) as BotInstanceJsonConfig;
if (configFromFile.caching !== undefined) {
defaultBotInstance.caching = configFromFile.caching;
}
let botInstances = [];
if(botInstancesFromFile.length === 0) {
if (botInstancesFromFile.length === 0) {
botInstances = [defaultBotInstance];
} else {
botInstances = botInstancesFromFile.map(x => merge.all([defaultBotInstance, x], {arrayMerge: overwriteMerge}));
}
const mergedConfig = merge.all([opConfigFromEnv, restConfigFile, opConfigFromArgs], {
arrayMerge: overwriteMerge,
});
return removeUndefinedKeys({...mergedConfig, bots: botInstances}) as OperatorJsonConfig;
}
@@ -522,12 +544,17 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
level = 'verbose',
path,
} = {},
caching: opCache,
web: {
port = 8085,
maxLogs = 200,
caching: webCaching = {},
session: {
secret = randomId(),
provider: sessionProvider = { store: 'memory' },
maxAge: sessionMaxAge = 86400,
} = {},
invites: {
maxAge: inviteMaxAge = 0,
} = {},
clients,
credentials: webCredentials,
@@ -541,102 +568,177 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
bots = [],
} = data;
let hydratedBots: BotInstanceConfig[] = bots.map(x => {
const {
polling: {
sharedMod = false,
limit = 100,
interval = 30,
} = {},
queue: {
maxWorkers = 1,
} = {},
caching,
nanny: {
softLimit = 250,
hardLimit = 50
} = {},
snoowrap = {},
credentials: {
clientId: ci,
clientSecret: cs,
...restCred
} = {},
subreddits: {
names = [],
exclude = [],
wikiConfig = 'botconfig/contextbot',
dryRun,
heartbeatInterval = 300,
} = {},
} = x;
let cache: StrongCache;
let defaultProvider: CacheOptions;
let opActionedEventsMax: number | undefined;
let opActionedEventsDefault: number = 25;
if(caching === undefined) {
if (opCache === undefined) {
defaultProvider = {
store: 'memory',
...cacheOptDefaults
};
cache = {
...cacheTTLDefaults,
provider: {
store: 'memory',
...cacheOptDefaults
}
provider: defaultProvider,
actionedEventsDefault: opActionedEventsDefault,
};
} else {
const {provider, ...restConfig} = caching;
const {provider, actionedEventsMax, actionedEventsDefault = opActionedEventsDefault, ...restConfig} = opCache;
if (actionedEventsMax !== undefined && actionedEventsMax !== null) {
opActionedEventsMax = actionedEventsMax;
opActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
}
if (typeof provider === 'string') {
cache = {
...cacheTTLDefaults,
...restConfig,
provider: {
store: provider as CacheProvider,
...cacheOptDefaults
}
}
defaultProvider = {
store: provider as CacheProvider,
...cacheOptDefaults
};
} else {
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
cache = {
...cacheTTLDefaults,
...restConfig,
provider: {
store,
...cacheOptDefaults,
...rest,
},
}
defaultProvider = {
store,
...cacheOptDefaults,
...rest,
};
}
cache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsMax: opActionedEventsMax,
actionedEventsDefault: opActionedEventsDefault,
provider: defaultProvider,
}
}
return {
snoowrap,
subreddits: {
names,
exclude,
wikiConfig,
heartbeatInterval,
dryRun,
},
credentials: {
let hydratedBots: BotInstanceConfig[] = bots.map(x => {
const {
name: botName,
polling: {
sharedMod = false,
stagger,
limit = 100,
interval = 30,
} = {},
queue: {
maxWorkers = 1,
} = {},
caching,
nanny: {
softLimit = 250,
hardLimit = 50
} = {},
snoowrap = {},
credentials: {
clientId: ci,
clientSecret: cs,
...restCred
} = {},
subreddits: {
names = [],
exclude = [],
wikiConfig = 'botconfig/contextbot',
dryRun,
heartbeatInterval = 300,
} = {},
} = x;
let botCache: StrongCache;
let botActionedEventsDefault: number;
if (caching === undefined) {
botCache = {
...cacheTTLDefaults,
actionedEventsDefault: opActionedEventsDefault,
actionedEventsMax: opActionedEventsMax,
provider: {
store: 'memory',
...cacheOptDefaults
}
};
} else {
const {
provider,
actionedEventsMax = opActionedEventsMax,
actionedEventsDefault = opActionedEventsDefault,
...restConfig
} = caching;
botActionedEventsDefault = actionedEventsDefault;
if (actionedEventsMax !== undefined) {
botActionedEventsDefault = Math.min(actionedEventsDefault, actionedEventsMax);
}
if (typeof provider === 'string') {
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: botActionedEventsDefault,
provider: {
store: provider as CacheProvider,
...cacheOptDefaults
}
}
} else {
const {ttl = 60, max = 500, store = 'memory', ...rest} = provider || {};
botCache = {
...cacheTTLDefaults,
...restConfig,
actionedEventsDefault: botActionedEventsDefault,
actionedEventsMax,
provider: {
store,
...cacheOptDefaults,
...rest,
},
}
}
}
const botCreds = {
clientId: (ci as string),
clientSecret: (cs as string),
...restCred,
},
caching: cache,
polling: {
sharedMod,
limit,
interval,
},
queue: {
maxWorkers,
},
nanny: {
softLimit,
hardLimit
};
if (botCache.provider.prefix === undefined || botCache.provider.prefix === defaultProvider.prefix) {
// need to provide unique prefix to bot
botCache.provider.prefix = buildCachePrefix([botCache.provider.prefix, 'bot', (botName || objectHash.sha1(botCreds))]);
}
return {
name: botName,
snoowrap,
subreddits: {
names,
exclude,
wikiConfig,
heartbeatInterval,
dryRun,
},
credentials: botCreds,
caching: botCache,
polling: {
sharedMod,
stagger,
limit,
interval,
},
queue: {
maxWorkers,
},
nanny: {
softLimit,
hardLimit
}
}
}
});
const defaultOperators = typeof name === 'string' ? [name] : name;
const config: OperatorConfig = {
@@ -649,19 +751,19 @@ export const buildOperatorConfigWithDefaults = (data: OperatorJsonConfig): Opera
level,
path
},
caching: cache,
web: {
port,
caching: {
...defaultProvider,
...webCaching
},
invites: {
maxAge: inviteMaxAge,
},
session: {
secret,
provider: typeof sessionProvider === 'string' ? {
...buildCacheOptionsFromProvider({
ttl: 86400000,
store: sessionProvider,
})
} : {
...buildCacheOptionsFromProvider(sessionProvider),
ttl: 86400000,
},
maxAge: sessionMaxAge,
},
maxLogs,
clients: clients === undefined ? [{host: 'localhost:8095', secret: apiSecret}] : clients,

View File

@@ -1,19 +1,21 @@
import {SubmissionRule, SubmissionRuleJSONConfig} from "./SubmissionRule";
import {ActivityWindowType, DomainInfo, ReferenceSubmission} from "../Common/interfaces";
import {ActivityWindowType, CommentState, DomainInfo, ReferenceSubmission, SubmissionState} from "../Common/interfaces";
import {Rule, RuleOptions, RuleResult} from "./index";
import Submission from "snoowrap/dist/objects/Submission";
import {getAttributionIdentifier} from "../Utils/SnoowrapUtils";
import dayjs from "dayjs";
import {
comparisonTextOp,
asSubmission,
comparisonTextOp, convertSubredditsRawToStrong,
FAIL,
formatNumber,
formatNumber, getActivitySubredditName, isSubmission,
parseGenericValueOrPercentComparison,
parseSubredditName,
PASS
} from "../util";
import { Comment } from "snoowrap/dist/objects";
import SimpleError from "../Utils/SimpleError";
import as from "async";
export interface AttributionCriteria {
@@ -52,8 +54,6 @@ export interface AttributionCriteria {
/**
* A list of domains whose Activities will be tested against `threshold`.
*
* If this is present then `aggregateOn` is ignored.
*
* The values are tested as partial strings so you do not need to include full URLs, just the part that matters.
*
* EX `["youtube"]` will match submissions with the domain `https://youtube.com/c/aChannel`
@@ -77,38 +77,55 @@ export interface AttributionCriteria {
domainsCombined?: boolean,
/**
* Only include Activities from this list of Subreddits (by name, case-insensitive)
* When present, Activities WILL ONLY be counted if they are found in this list of Subreddits
*
* Each value in the list can be either:
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* * string (name of subreddit)
* * regular expression to run on the subreddit name
* * `SubredditState`
*
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
* */
include?: string[],
/**
* Do not include Activities from this list of Subreddits (by name, case-insensitive)
* When present, Activities WILL NOT be counted if they are found in this list of Subreddits
*
* Will be ignored if `include` is present.
* Each value in the list can be either:
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* * string (name of subreddit)
* * regular expression to run on the subreddit name
* * `SubredditState`
*
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
* */
exclude?: string[],
/**
* If `domains` is not specified this list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`
* When present, Submissions from `window` will only be counted if they meet this criteria
* */
submissionState?: SubmissionState
/**
* When present, Comments from `window` will only be counted if they meet this criteria
* */
commentState?: CommentState
/**
* This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`
*
* * If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)
* * If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)
* * If `link` is included then aggregate author's submission history which is external links but not media
* * If `redditMedia` is included then aggregate on author's submissions history which are media hosted on reddit: galleries, videos, and images (i.redd.it / v.redd.it)
* * If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or domain is `reddit.com`
* * If `link` is included then aggregate author's submission history which is external links and not recognized as `media` by reddit
*
* If nothing is specified or list is empty (default) all domains are aggregated
* If nothing is specified or list is empty (default) rule will only aggregate on `link` and `media` (ignores reddit-hosted content and self-posts)
*
* @default undefined
* @examples [[]]
* */
aggregateOn?: ('media' | 'self' | 'link')[],
aggregateOn?: ('media' | 'redditMedia' | 'self' | 'link')[],
/**
* Should the criteria consolidate recognized media domains into the parent domain?
@@ -174,25 +191,40 @@ export class AttributionRule extends Rule {
window,
thresholdOn = 'all',
minActivityCount = 10,
aggregateOn = [],
aggregateOn = ['link','media'],
consolidateMediaDomains = false,
domains = [],
domainsCombined = false,
include: includeRaw = [],
exclude: excludeRaw = [],
include = [],
exclude = [],
commentState,
submissionState,
} = criteria;
const include = includeRaw.map(x => parseSubredditName(x).toLowerCase());
const exclude = excludeRaw.map(x => parseSubredditName(x).toLowerCase());
const {operator, value, isPercent, extra = ''} = parseGenericValueOrPercentComparison(threshold);
let activities = thresholdOn === 'submissions' ? await this.resources.getAuthorSubmissions(item.author, {window: window}) : await this.resources.getAuthorActivities(item.author, {window: window});
activities = activities.filter(act => {
if (include.length > 0) {
return include.some(x => x === act.subreddit.display_name.toLowerCase());
} else if (exclude.length > 0) {
return !exclude.some(x => x === act.subreddit.display_name.toLowerCase())
if(include.length > 0 || exclude.length > 0) {
const defaultOpts = {
defaultFlags: 'i',
generateDescription: true
};
if(include.length > 0) {
const subStates = include.map(x => convertSubredditsRawToStrong(x, defaultOpts));
activities = await this.resources.batchTestSubredditCriteria(activities, subStates);
} else {
const subStates = exclude.map(x => convertSubredditsRawToStrong(x, defaultOpts));
const toExclude = (await this.resources.batchTestSubredditCriteria(activities, subStates)).map(x => x.id);
activities = activities.filter(x => !toExclude.includes(x.id));
}
}
activities = await as.filter(activities, async (activity) => {
if (asSubmission(activity) && submissionState !== undefined) {
return await this.resources.testItemCriteria(activity, [submissionState]);
} else if (commentState !== undefined) {
return await this.resources.testItemCriteria(activity, [commentState]);
}
return true;
});
@@ -219,7 +251,7 @@ export class AttributionRule extends Rule {
const realDomains: DomainInfo[] = domains.map(x => {
if(x === SUBMISSION_DOMAIN) {
if(!(item instanceof Submission)) {
if(!(asSubmission(item))) {
throw new SimpleError('Cannot run Attribution Rule with the domain SELF:AGG on a Comment');
}
return getAttributionIdentifier(item, consolidateMediaDomains);
@@ -228,21 +260,28 @@ export class AttributionRule extends Rule {
});
const realDomainIdents = realDomains.map(x => x.aliases).flat(1).map(x => x.toLowerCase());
const submissions: Submission[] = thresholdOn === 'submissions' ? activities as Submission[] : activities.filter(x => x instanceof Submission) as Submission[];
const submissions: Submission[] = thresholdOn === 'submissions' ? activities as Submission[] : activities.filter(x => isSubmission(x)) as Submission[];
const aggregatedSubmissions = submissions.reduce((acc: Map<string, DomainAgg>, sub) => {
const domainInfo = getAttributionIdentifier(sub, consolidateMediaDomains)
let domainType = 'link';
if(sub.secure_media !== undefined && sub.secure_media !== null) {
domainType = 'media';
} else if((sub.is_self || sub.is_video || sub.domain === 'i.redd.it')) {
if(sub.is_video || ['i.redd.it','v.redd.it'].includes(sub.domain)
// @ts-ignore
|| sub.gallery_data !== undefined) {
domainType = 'redditMedia';
} else if(sub.is_self || sub.domain === 'reddit.com') {
domainType = 'self';
} else if(sub.secure_media !== undefined && sub.secure_media !== null) {
domainType = 'media';
}
if(realDomains.length === 0 && aggregateOn.length !== 0) {
if(aggregateOn.length !== 0) {
if(domainType === 'media' && !aggregateOn.includes('media')) {
return acc;
}
if(domainType === 'redditMedia' && !aggregateOn.includes('redditMedia')) {
return acc;
}
if(domainType === 'self' && !aggregateOn.includes('self')) {
return acc;
}
@@ -385,7 +424,7 @@ export class AttributionRule extends Rule {
}
interface AttributionConfig extends ReferenceSubmission {
interface AttributionConfig {
/**
* A list threshold-window values to test attribution against

View File

@@ -1,17 +1,19 @@
import {ActivityWindowType, CompareValueOrPercent, ThresholdCriteria} from "../Common/interfaces";
import {ActivityWindowType, CompareValueOrPercent, SubredditState, ThresholdCriteria} from "../Common/interfaces";
import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
import Submission from "snoowrap/dist/objects/Submission";
import {getAuthorActivities} from "../Utils/SnoowrapUtils";
import dayjs from "dayjs";
import {
asSubmission,
comparisonTextOp,
FAIL,
formatNumber,
formatNumber, getActivitySubredditName, isSubmission,
parseGenericValueOrPercentComparison, parseSubredditName,
PASS,
percentFromString
percentFromString, toStrongSubredditState
} from "../util";
import {Comment} from "snoowrap";
export interface CommentThresholdCriteria extends ThresholdCriteria {
/**
@@ -23,42 +25,56 @@ export interface CommentThresholdCriteria extends ThresholdCriteria {
asOp?: boolean
}
/**
* If both `submission` and `comment` are defined then criteria will only trigger if BOTH thresholds are met
* Criteria will only trigger if ALL present thresholds (comment, submission, total) are met
* */
export interface HistoryCriteria {
/**
* A string containing a comparison operator and a value to compare submissions against
* A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) submissions against
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign]`
*
* * EX `> 100` => greater than 100 submissions
* * EX `<= 75%` => submissions are equal to or less than 75% of all Activities
* * EX `> 100` => greater than 100 filtered submissions
* * EX `<= 75%` => filtered submissions are equal to or less than 75% of unfiltered Activities
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* */
submission?: CompareValueOrPercent
/**
* A string containing a comparison operator and a value to compare comments against
* A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) comments against
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`
*
* * EX `> 100` => greater than 100 comments
* * EX `<= 75%` => comments are equal to or less than 75% of all Activities
* * EX `<= 75%` => comments are equal to or less than 75% of unfiltered Activities
*
* If your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:
*
* * EX `> 100 OP` => greater than 100 comments as OP
* * EX `<= 25% as OP` => Comments as OP were less then or equal to 25% of **all Comments**
* * EX `> 100 OP` => greater than 100 filtered comments as OP
* * EX `<= 25% as OP` => **Filtered** comments as OP were less then or equal to 25% of **unfiltered Comments**
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* */
comment?: CompareValueOrPercent
/**
* A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`) activities against
*
* **Note:** This is only useful if using `include` or `exclude` otherwise percent will always be 100% and total === activityTotal
*
* The syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`
*
* * EX `> 100` => greater than 100 filtered activities
* * EX `<= 75%` => filtered activities are equal to or less than 75% of all Activities
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* */
total?: CompareValueOrPercent
window: ActivityWindowType
/**
* The minimum number of activities that must exist from the `window` results for this criteria to run
* The minimum number of **filtered** activities that must exist from the `window` results for this criteria to run
* @default 5
* */
minActivityCount?: number
@@ -68,8 +84,9 @@ export interface HistoryCriteria {
export class HistoryRule extends Rule {
criteria: HistoryCriteria[];
condition: 'AND' | 'OR';
include: string[];
exclude: string[];
include: (string | SubredditState)[];
exclude: (string | SubredditState)[];
activityFilterFunc: (x: Submission|Comment) => Promise<boolean> = async (x) => true;
constructor(options: HistoryOptions) {
super(options);
@@ -85,8 +102,41 @@ export class HistoryRule extends Rule {
if (this.criteria.length === 0) {
throw new Error('Must provide at least one HistoryCriteria');
}
this.include = include.map(x => parseSubredditName(x).toLowerCase());
this.exclude = exclude.map(x => parseSubredditName(x).toLowerCase());
this.include = include;
this.exclude = exclude;
if(this.include.length > 0) {
const subStates = include.map((x) => {
if(typeof x === 'string') {
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
}
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
});
this.activityFilterFunc = async (x: Submission|Comment) => {
for(const ss of subStates) {
if(await this.resources.testSubredditCriteria(x, ss)) {
return true;
}
}
return false;
};
} else if(this.exclude.length > 0) {
const subStates = exclude.map((x) => {
if(typeof x === 'string') {
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
}
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
});
this.activityFilterFunc = async (x: Submission|Comment) => {
for(const ss of subStates) {
if(await this.resources.testSubredditCriteria(x, ss)) {
return false;
}
}
return true;
};
}
}
getKind(): string {
@@ -107,25 +157,23 @@ export class HistoryRule extends Rule {
for (const criteria of this.criteria) {
const {comment, window, submission, minActivityCount = 5} = criteria;
const {comment, window, submission, total, minActivityCount = 5} = criteria;
let activities = await this.resources.getAuthorActivities(item.author, {window: window});
activities = activities.filter(act => {
if (this.include.length > 0) {
return this.include.some(x => x === act.subreddit.display_name.toLowerCase());
} else if (this.exclude.length > 0) {
return !this.exclude.some(x => x === act.subreddit.display_name.toLowerCase())
const filteredActivities = [];
for(const a of activities) {
if(await this.activityFilterFunc(a)) {
filteredActivities.push(a);
}
return true;
});
}
if (activities.length < minActivityCount) {
if (filteredActivities.length < minActivityCount) {
continue;
}
const activityTotal = activities.length;
const {submissionTotal, commentTotal, opTotal} = activities.reduce((acc, act) => {
if(act instanceof Submission) {
if(asSubmission(act)) {
return {...acc, submissionTotal: acc.submissionTotal + 1};
}
let a = {...acc, commentTotal: acc.commentTotal + 1};
@@ -134,6 +182,24 @@ export class HistoryRule extends Rule {
}
return a;
},{submissionTotal: 0, commentTotal: 0, opTotal: 0});
let fSubmissionTotal = submissionTotal;
let fCommentTotal = commentTotal;
let fOpTotal = opTotal;
if(activities.length !== filteredActivities.length) {
const filteredCounts = filteredActivities.reduce((acc, act) => {
if(asSubmission(act)) {
return {...acc, submissionTotal: acc.submissionTotal + 1};
}
let a = {...acc, commentTotal: acc.commentTotal + 1};
if(act.is_submitter) {
a.opTotal = a.opTotal + 1;
}
return a;
},{submissionTotal: 0, commentTotal: 0, opTotal: 0});
fSubmissionTotal = filteredCounts.submissionTotal;
fCommentTotal = filteredCounts.commentTotal;
fOpTotal = filteredCounts.opTotal;
}
let commentTrigger = undefined;
if(comment !== undefined) {
@@ -142,15 +208,15 @@ export class HistoryRule extends Rule {
if(isPercent) {
const per = value / 100;
if(asOp) {
commentTrigger = comparisonTextOp(opTotal / commentTotal, operator, per);
commentTrigger = comparisonTextOp(fOpTotal / commentTotal, operator, per);
} else {
commentTrigger = comparisonTextOp(commentTotal / activityTotal, operator, per);
commentTrigger = comparisonTextOp(fCommentTotal / activityTotal, operator, per);
}
} else {
if(asOp) {
commentTrigger = comparisonTextOp(opTotal, operator, value);
commentTrigger = comparisonTextOp(fOpTotal, operator, value);
} else {
commentTrigger = comparisonTextOp(commentTotal, operator, value);
commentTrigger = comparisonTextOp(fCommentTotal, operator, value);
}
}
}
@@ -160,27 +226,40 @@ export class HistoryRule extends Rule {
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(submission);
if(isPercent) {
const per = value / 100;
submissionTrigger = comparisonTextOp(submissionTotal / activityTotal, operator, per);
submissionTrigger = comparisonTextOp(fSubmissionTotal / activityTotal, operator, per);
} else {
submissionTrigger = comparisonTextOp(submissionTotal, operator, value);
submissionTrigger = comparisonTextOp(fSubmissionTotal, operator, value);
}
}
let totalTrigger = undefined;
if(total !== undefined) {
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(total);
if(isPercent) {
const per = value / 100;
totalTrigger = comparisonTextOp(filteredActivities.length / activityTotal, operator, per);
} else {
totalTrigger = comparisonTextOp(filteredActivities.length, operator, value);
}
}
const firstActivity = activities[0];
const lastActivity = activities[activities.length - 1];
const activityTotalWindow = dayjs.duration(dayjs(firstActivity.created_utc * 1000).diff(dayjs(lastActivity.created_utc * 1000)));
const activityTotalWindow = activities.length === 0 ? dayjs.duration(0, 's') : dayjs.duration(dayjs(firstActivity.created_utc * 1000).diff(dayjs(lastActivity.created_utc * 1000)));
criteriaResults.push({
criteria,
activityTotal,
activityTotalWindow,
submissionTotal,
commentTotal,
opTotal,
submissionTotal: fSubmissionTotal,
commentTotal: fCommentTotal,
opTotal: fOpTotal,
filteredTotal: filteredActivities.length,
submissionTrigger,
commentTrigger,
triggered: (submissionTrigger === undefined || submissionTrigger === true) && (commentTrigger === undefined || commentTrigger === true)
totalTrigger,
triggered: (submissionTrigger === undefined || submissionTrigger === true) && (commentTrigger === undefined || commentTrigger === true) && (totalTrigger === undefined || totalTrigger === true)
});
}
@@ -223,36 +302,50 @@ export class HistoryRule extends Rule {
activityTotalWindow,
submissionTotal,
commentTotal,
filteredTotal,
opTotal,
criteria: {
comment,
submission,
total,
window,
},
criteria,
triggered,
submissionTrigger,
commentTrigger,
totalTrigger,
} = results;
const data: any = {
activityTotal,
submissionTotal,
commentTotal,
filteredTotal,
opTotal,
commentPercent: formatNumber((commentTotal/activityTotal)*100),
submissionPercent: formatNumber((submissionTotal/activityTotal)*100),
opPercent: formatNumber((opTotal/commentTotal)*100),
filteredPercent: formatNumber((filteredTotal/activityTotal)*100),
criteria,
window: typeof window === 'number' ? `${activityTotal} Items` : activityTotalWindow.humanize(true),
window: typeof window === 'number' || activityTotal === 0 ? `${activityTotal} Items` : activityTotalWindow.humanize(true),
triggered,
submissionTrigger,
commentTrigger,
totalTrigger,
};
let thresholdSummary = [];
let totalSummary;
let submissionSummary;
let commentSummary;
if(total !== undefined) {
const {operator, value, isPercent, displayText} = parseGenericValueOrPercentComparison(total);
const suffix = !isPercent ? 'Items' : `(${formatNumber((filteredTotal/activityTotal)*100)}%) of ${activityTotal} Total`;
totalSummary = `${includePassFailSymbols ? `${submissionTrigger ? PASS : FAIL} ` : ''}Filtered Activities (${filteredTotal}) were${totalTrigger ? '' : ' not'} ${displayText} ${suffix}`;
data.totalSummary = totalSummary;
thresholdSummary.push(totalSummary);
}
if(submission !== undefined) {
const {operator, value, isPercent, displayText} = parseGenericValueOrPercentComparison(submission);
const suffix = !isPercent ? 'Items' : `(${formatNumber((submissionTotal/activityTotal)*100)}%) of ${activityTotal} Total`;
@@ -298,21 +391,45 @@ interface HistoryConfig {
condition?: 'AND' | 'OR'
/**
* Only include Submissions from this list of Subreddits (by name, case-insensitive)
* If present, activities will be counted only if they are found in this list of Subreddits.
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* Each value in the list can be either:
*
* * string (name of subreddit)
* * regular expression to run on the subreddit name
* * `SubredditState`
*
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
*
* **Note:** This affects **post-window retrieval** activities. So that:
*
* * `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering
* * all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**
* * -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`
*
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
* */
include?: string[],
include?: (string | SubredditState)[],
/**
* Do not include Submissions from this list of Subreddits (by name, case-insensitive)
* If present, activities will be counted only if they are **NOT** found in this list of Subreddits
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* Each value in the list can be either:
*
* * string (name of subreddit)
* * regular expression to run on the subreddit name
* * `SubredditState`
*
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
*
* **Note:** This affects **post-window retrieval** activities. So that:
*
* * `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering
* * all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**
* * -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`
*
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
* */
exclude?: string[],
exclude?: (string | SubredditState)[],
}
export interface HistoryOptions extends HistoryConfig, RuleOptions {

View File

@@ -1,27 +1,50 @@
import {Rule, RuleJSONConfig, RuleOptions, RulePremise, RuleResult} from "./index";
import {Comment, VoteableContent} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import as from 'async';
import pMap from 'p-map';
// @ts-ignore
import subImageMatch from 'matches-subimage';
import {
activityWindowText,
comparisonTextOp, FAIL, formatNumber,
parseGenericValueOrPercentComparison, parseSubredditName,
asSubmission, bitsToHexLength,
// blockHashImage,
compareImages,
comparisonTextOp, convertSubredditsRawToStrong,
FAIL,
formatNumber,
getActivitySubredditName, imageCompareMaxConcurrencyGuess,
//getImageDataFromUrl,
isSubmission,
isValidImageURL,
objectToStringSummary,
parseGenericValueOrPercentComparison,
parseStringToRegex,
parseSubredditName,
parseUsableLinkIdentifier,
PASS
PASS, sleep,
toStrongSubredditState
} from "../util";
import {
ActivityWindow,
ActivityWindowCriteria,
ActivityWindowType,
ReferenceSubmission,
SubredditCriteria
ActivityWindowType, CommentState,
//ImageData,
ImageDetection,
ReferenceSubmission, StrongImageDetection, StrongSubredditState, SubmissionState,
SubredditCriteria, SubredditState
} from "../Common/interfaces";
import ImageData from "../Common/ImageData";
import {blockhash, hammingDistance} from "../Common/blockhash/blockhash";
import leven from "leven";
const parseLink = parseUsableLinkIdentifier();
export class RecentActivityRule extends Rule {
window: ActivityWindowType;
thresholds: SubThreshold[];
thresholds: ActivityThreshold[];
useSubmissionAsReference: boolean;
imageDetection: StrongImageDetection
lookAt?: 'comments' | 'submissions';
constructor(options: RecentActivityRuleOptions) {
@@ -29,8 +52,47 @@ export class RecentActivityRule extends Rule {
const {
window = 15,
useSubmissionAsReference = true,
imageDetection,
lookAt,
} = options || {};
const {
enable = false,
fetchBehavior = 'extension',
threshold = 5,
hash = {},
pixel = {},
} = imageDetection || {};
const {
enable: hEnable = true,
bits = 16,
ttl = 60,
hardThreshold = threshold,
softThreshold
} = hash || {};
const {
enable: pEnable = true,
threshold: pThreshold = threshold,
} = pixel || {};
this.imageDetection = {
enable,
fetchBehavior,
threshold,
hash: {
enable: hEnable,
hardThreshold,
softThreshold,
bits,
ttl,
},
pixel: {
enable: pEnable,
threshold: pThreshold
}
};
this.lookAt = lookAt;
this.useSubmissionAsReference = useSubmissionAsReference;
this.window = window;
@@ -67,46 +129,226 @@ export class RecentActivityRule extends Rule {
let viableActivity = activities;
if (this.useSubmissionAsReference) {
if (!(item instanceof Submission)) {
if (!asSubmission(item)) {
this.logger.warn('Cannot use post as reference because triggered item is not a Submission');
} else if (item.is_self) {
this.logger.warn('Cannot use post as reference because triggered Submission is not a link type');
} else {
const usableUrl = parseLink(await item.url);
viableActivity = viableActivity.filter((x) => {
if (!(x instanceof Submission)) {
return false;
const itemId = item.id;
const referenceUrl = await item.url;
const usableUrl = parseLink(referenceUrl);
let filteredActivity: (Submission|Comment)[] = [];
let analysisTimes: number[] = [];
let referenceImage: ImageData | undefined;
if (this.imageDetection.enable) {
try {
referenceImage = ImageData.fromSubmission(item);
referenceImage.setPreferredResolutionByWidth(800);
if(this.imageDetection.hash.enable) {
let refHash: string | undefined;
if(this.imageDetection.hash.ttl !== undefined) {
refHash = await this.resources.getImageHash(referenceImage);
if(refHash === undefined) {
refHash = await referenceImage.hash(this.imageDetection.hash.bits);
await this.resources.setImageHash(referenceImage, refHash, this.imageDetection.hash.ttl);
} else if(refHash.length !== bitsToHexLength(this.imageDetection.hash.bits)) {
this.logger.warn('Reference image hash length did not correspond to bits specified in config. Recomputing...');
refHash = await referenceImage.hash(this.imageDetection.hash.bits);
await this.resources.setImageHash(referenceImage, refHash, this.imageDetection.hash.ttl);
}
} else {
refHash = await referenceImage.hash(this.imageDetection.hash.bits);
}
}
//await referenceImage.sharp();
// await referenceImage.hash();
// if (referenceImage.preferredResolution !== undefined) {
// await (referenceImage.getSimilarResolutionVariant(...referenceImage.preferredResolution) as ImageData).sharp();
// }
} catch (err) {
this.logger.verbose(err.message);
}
}
let longRun;
if (referenceImage !== undefined) {
const l = this.logger;
longRun = setTimeout(() => {
l.verbose('FYI: Image processing is causing rule to take longer than normal');
}, 2500);
}
// @ts-ignore
const ci = async (x: (Submission|Comment)) => {
if (!asSubmission(x) || x.id === itemId) {
return null;
}
if (x.url === undefined) {
return false;
return null;
}
return parseLink(x.url) === usableUrl;
});
if (parseLink(x.url) === usableUrl) {
return x;
}
// only do image detection if regular URL comparison and other conditions fail first
// to reduce CPU/bandwidth usage
if (referenceImage !== undefined) {
try {
let imgData = ImageData.fromSubmission(x);
imgData.setPreferredResolutionByWidth(800);
if(this.imageDetection.hash.enable) {
let compareHash: string | undefined;
if(this.imageDetection.hash.ttl !== undefined) {
compareHash = await this.resources.getImageHash(imgData);
}
if(compareHash === undefined)
{
compareHash = await imgData.hash(this.imageDetection.hash.bits);
if(this.imageDetection.hash.ttl !== undefined) {
await this.resources.setImageHash(imgData, compareHash, this.imageDetection.hash.ttl);
}
}
const refHash = await referenceImage.hash(this.imageDetection.hash.bits);
if(refHash.length !== compareHash.length) {
this.logger.debug(`Hash lengths were not the same! Will need to recompute compare hash to match reference.\n\nReference: ${referenceImage.baseUrl} has is ${refHash.length} char long | Comparing: ${imgData.baseUrl} has is ${compareHash} ${compareHash.length} long`);
compareHash = await imgData.hash(this.imageDetection.hash.bits)
}
const distance = leven(refHash, compareHash);
const diff = (distance/refHash.length)*100;
// return image if hard is defined and diff is less
if(null !== this.imageDetection.hash.hardThreshold && diff <= this.imageDetection.hash.hardThreshold) {
return x;
}
// hard is either not defined or diff was gerater than hard
// if soft is defined
if (this.imageDetection.hash.softThreshold !== undefined) {
// and diff is greater than soft allowance
if(diff > this.imageDetection.hash.softThreshold) {
// not similar enough
return null;
}
// similar enough, will continue on to pixel (if enabled!)
} else {
// only hard was defined and did not pass
return null;
}
}
// at this point either hash was not enabled or it was and we hit soft threshold but not hard
if(this.imageDetection.pixel.enable) {
try {
const [compareResult, sameImage] = await compareImages(referenceImage, imgData, this.imageDetection.pixel.threshold / 100);
analysisTimes.push(compareResult.analysisTime);
if (sameImage) {
return x;
}
} catch (err) {
this.logger.warn(`Unexpected error encountered while pixel-comparing images, will skip comparison => ${err.message}`);
}
}
} catch (err) {
if(!err.message.includes('did not end with a valid image extension')) {
this.logger.warn(`Will not compare image from Submission ${x.id} due to error while parsing image URL => ${err.message}`);
}
}
}
return null;
}
// parallel all the things
this.logger.profile('asyncCompare');
const results = await pMap(viableActivity, ci, {concurrency: imageCompareMaxConcurrencyGuess});
this.logger.profile('asyncCompare', {level: 'debug', message: 'Total time for image comparison (incl download/cache calls)'});
const totalAnalysisTime = analysisTimes.reduce((acc, x) => acc + x,0);
if(analysisTimes.length > 0) {
this.logger.debug(`Reference image pixel-compared ${analysisTimes.length} times. Timings: Avg ${formatNumber(totalAnalysisTime / analysisTimes.length, {toFixed: 0})}ms | Max: ${Math.max(...analysisTimes)}ms | Min: ${Math.min(...analysisTimes)}ms | Total: ${totalAnalysisTime}ms (${formatNumber(totalAnalysisTime/1000)}s)`);
}
filteredActivity = filteredActivity.concat(results.filter(x => x !== null));
if (longRun !== undefined) {
clearTimeout(longRun);
}
viableActivity = filteredActivity;
}
}
const groupedActivity = viableActivity.reduce((grouped, activity) => {
const s = activity.subreddit.display_name.toLowerCase();
grouped[s] = (grouped[s] || []).concat(activity);
return grouped;
}, {} as Record<string, (Submission | Comment)[]>);
const summaries = [];
let totalTriggeredOn;
for (const triggerSet of this.thresholds) {
let currCount = 0;
const presentSubs = [];
const {threshold = '>= 1', subreddits = []} = triggerSet;
for (const sub of subreddits.map(x => parseSubredditName(x))) {
const isub = sub.toLowerCase();
const {[isub]: tSub = []} = groupedActivity;
if (tSub.length > 0) {
currCount += tSub.length;
presentSubs.push(sub);
const presentSubs: string[] = [];
let combinedKarma = 0;
const {
threshold = '>= 1',
subreddits = [],
karma: karmaThreshold,
commentState,
submissionState,
} = triggerSet;
// convert subreddits array into entirely StrongSubredditState
const defaultOpts = {
defaultFlags: 'i',
generateDescription: true
};
const subStates: StrongSubredditState[] = subreddits.map((x) => convertSubredditsRawToStrong(x, defaultOpts));
let validActivity: (Comment | Submission)[] = await as.filter(viableActivity, async (activity) => {
if (asSubmission(activity) && submissionState !== undefined) {
return await this.resources.testItemCriteria(activity, [submissionState]);
} else if (commentState !== undefined) {
return await this.resources.testItemCriteria(activity, [commentState]);
}
return true;
});
validActivity = await this.resources.batchTestSubredditCriteria(validActivity, subStates);
for (const activity of validActivity) {
currCount++;
// @ts-ignore
combinedKarma += activity.score;
const pSub = getActivitySubredditName(activity);
if (!presentSubs.includes(pSub)) {
presentSubs.push(pSub);
}
}
for (const activity of viableActivity) {
if (asSubmission(activity) && submissionState !== undefined) {
if (!(await this.resources.testItemCriteria(activity, [submissionState]))) {
continue;
}
} else if (commentState !== undefined) {
if (!(await this.resources.testItemCriteria(activity, [commentState]))) {
continue;
}
}
let inSubreddits = false;
for (const ss of subStates) {
const res = await this.resources.testSubredditCriteria(activity, ss);
if (res) {
inSubreddits = true;
break;
}
}
if (inSubreddits) {
currCount++;
combinedKarma += activity.score;
const pSub = getActivitySubredditName(activity);
if (!presentSubs.includes(pSub)) {
presentSubs.push(pSub);
}
}
}
const {operator, value, isPercent} = parseGenericValueOrPercentComparison(threshold);
let sum = {subsWithActivity: presentSubs, subreddits, count: currCount, threshold, triggered: false, testValue: currCount.toString()};
let sum = {
subsWithActivity: presentSubs,
combinedKarma,
karmaThreshold,
subreddits: subStates.map(x => x.stateDescription),
count: currCount,
threshold,
triggered: false,
testValue: currCount.toString()
};
if (isPercent) {
sum.testValue = `${formatNumber((currCount / viableActivity.length) * 100)}%`;
if (comparisonTextOp(currCount / viableActivity.length, operator, value / 100)) {
@@ -117,6 +359,15 @@ export class RecentActivityRule extends Rule {
sum.triggered = true;
totalTriggeredOn = sum;
}
// if we would trigger on threshold need to also test for karma
if (totalTriggeredOn !== undefined && karmaThreshold !== undefined) {
const {operator: opKarma, value: valueKarma} = parseGenericValueOrPercentComparison(karmaThreshold);
if (!comparisonTextOp(combinedKarma, opKarma, valueKarma)) {
sum.triggered = false;
totalTriggeredOn = undefined;
}
}
summaries.push(sum);
// if either trigger condition is hit end the iteration early
if (totalTriggeredOn !== undefined) {
@@ -129,7 +380,7 @@ export class RecentActivityRule extends Rule {
result = `${PASS} ${resultData.result}`;
this.logger.verbose(result);
return Promise.resolve([true, this.getResult(true, resultData)]);
} else if(summaries.length === 1) {
} else if (summaries.length === 1) {
// can display result if its only one summary otherwise need to log to debug
const res = this.generateResultData(summaries[0], viableActivity);
result = `${FAIL} ${res.result}`;
@@ -142,7 +393,7 @@ export class RecentActivityRule extends Rule {
return Promise.resolve([false, this.getResult(false, {result})]);
}
generateResultData(summary: any, activities: (Submission | Comment)[] = []) {
const {
count,
@@ -150,10 +401,15 @@ export class RecentActivityRule extends Rule {
subreddits = [],
subsWithActivity = [],
threshold,
triggered
triggered,
combinedKarma,
karmaThreshold,
} = summary;
const relevantSubs = subsWithActivity.length === 0 ? subreddits : subsWithActivity;
const totalSummary = `${testValue} activities over ${relevantSubs.length} subreddits ${triggered ? 'met' : 'did not meet'} threshold of ${threshold}`;
let totalSummary = `${testValue} activities over ${relevantSubs.length} subreddits${karmaThreshold !== undefined ? ` with ${combinedKarma} combined karma` : ''} ${triggered ? 'met' : 'did not meet'} threshold of ${threshold}${karmaThreshold !== undefined ? ` and ${karmaThreshold} combined karma` : ''}`;
if (triggered && subsWithActivity.length > 0) {
totalSummary = `${totalSummary} -- subreddits: ${subsWithActivity.join(', ')}`;
}
return {
result: totalSummary,
data: {
@@ -163,7 +419,8 @@ export class RecentActivityRule extends Rule {
subCount: relevantSubs.length,
totalCount: count,
threshold,
testValue
testValue,
karmaThreshold,
}
};
}
@@ -175,7 +432,16 @@ export class RecentActivityRule extends Rule {
* @minProperties 1
* @additionalProperties false
* */
export interface SubThreshold extends SubredditCriteria {
export interface ActivityThreshold {
/**
* When present, a Submission will only be counted if it meets this criteria
* */
submissionState?: SubmissionState
/**
* When present, a Comment will only be counted if it meets this criteria
* */
commentState?: CommentState
/**
* A string containing a comparison operator and a value to compare recent activities against
*
@@ -191,6 +457,35 @@ export interface SubThreshold extends SubredditCriteria {
* @examples [">= 1"]
* */
threshold?: string
/**
* Test the **combined karma** from Activities found in the specified subreddits
*
* Value is a string containing a comparison operator and a number of **combined karma** to compare against
*
* If specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied
*
* The syntax is `(< OR > OR <= OR >=) <number>`
*
* * EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(%?)(.*)$
* */
karma?: string
/**
* Activities will be counted if they are found in this list of Subreddits
*
* Each value in the list can be either:
*
* * string (name of subreddit)
* * regular expression to run on the subreddit name
* * `SubredditState`
*
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
* */
subreddits?: (string | SubredditState)[]
}
interface RecentActivityConfig extends ActivityWindow, ReferenceSubmission {
@@ -203,7 +498,9 @@ interface RecentActivityConfig extends ActivityWindow, ReferenceSubmission {
* A list of subreddits/count criteria that may trigger this rule. ANY SubThreshold will trigger this rule.
* @minItems 1
* */
thresholds: SubThreshold[],
thresholds: ActivityThreshold[],
imageDetection?: ImageDetection
}
export interface RecentActivityRuleOptions extends RecentActivityConfig, RuleOptions {

View File

@@ -2,14 +2,16 @@ import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
import {Comment} from "snoowrap";
import Submission from "snoowrap/dist/objects/Submission";
import {
comparisonTextOp, FAIL, isExternalUrlSubmission, parseGenericValueComparison,
parseGenericValueOrPercentComparison, parseRegex,
PASS
asSubmission,
comparisonTextOp, FAIL, isExternalUrlSubmission, isSubmission, parseGenericValueComparison,
parseGenericValueOrPercentComparison, parseRegex, parseStringToRegex,
PASS, triggeredIndicator
} from "../util";
import {
ActivityWindowType, JoinOperands,
} from "../Common/interfaces";
import dayjs from 'dayjs';
import SimpleError from "../Utils/SimpleError";
export interface RegexCriteria {
/**
@@ -21,17 +23,11 @@ export interface RegexCriteria {
/**
* A valid Regular Expression to test content against
*
* Do not wrap expression in forward slashes
* If no flags are specified then the **global** flag is used by default
*
* EX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`
*
* @examples ["reddit|FoxxMD"]
* @examples ["/reddit|FoxxMD/ig"]
* */
regex: string,
/**
* Regex flags to use
* */
regexFlags?: string,
/**
* Which content from an Activity to test the regex against
@@ -134,12 +130,11 @@ export class RegexRule extends Rule {
let criteriaResults = [];
for (const criteria of this.criteria) {
for (const [index, criteria] of this.criteria.entries()) {
const {
name,
name = (index + 1),
regex,
regexFlags,
testOn: testOnVals = ['title', 'body'],
lookAt = 'all',
matchThreshold = '> 0',
@@ -157,7 +152,10 @@ export class RegexRule extends Rule {
}, []);
// check regex
const reg = new RegExp(regex);
const reg = parseStringToRegex(regex, 'g');
if(reg === undefined) {
throw new SimpleError(`Value given for regex on Criteria ${name} was not valid: ${regex}`);
}
// ok cool its a valid regex
const matchComparison = parseGenericValueComparison(matchThreshold);
@@ -176,7 +174,7 @@ export class RegexRule extends Rule {
// first lets see if the activity we are checking satisfies thresholds
// since we may be able to avoid api calls to get history
let actMatches = this.getMatchesFromActivity(item, testOn, reg, regexFlags);
let actMatches = this.getMatchesFromActivity(item, testOn, reg);
matches = matches.concat(actMatches).slice(0, 100);
matchCount += actMatches.length;
@@ -226,7 +224,7 @@ export class RegexRule extends Rule {
for (const h of history) {
activitiesTested++;
const aMatches = this.getMatchesFromActivity(h, testOn, reg, regexFlags);
const aMatches = this.getMatchesFromActivity(h, testOn, reg);
matches = matches.concat(aMatches).slice(0, 100);
matchCount += aMatches.length;
const matched = comparisonTextOp(aMatches.length, matchComparison.operator, matchComparison.value);
@@ -300,30 +298,35 @@ export class RegexRule extends Rule {
let index = 0;
for (const c of criteriaResults) {
index++;
let msg = `Crit ${c.criteria.name || index} ${c.triggered ? PASS : FAIL}`;
let msg = `Criteria ${c.criteria.name || `#${index}`} ${triggeredIndicator(c.triggered)}`;
if (c.activityThresholdMet !== undefined) {
msg = `${msg} -- Activity Match=> ${c.activityThresholdMet ? PASS : FAIL} ${c.activitiesMatchedCount} ${c.criteria.activityMatchThreshold} (Threshold ${c.criteria.matchThreshold})`;
msg = `${msg} -- Activity Match ${triggeredIndicator(c.activityThresholdMet)} => ${c.activitiesMatchedCount} ${c.criteria.activityMatchThreshold} (Threshold ${c.criteria.matchThreshold})`;
}
if (c.totalThresholdMet !== undefined) {
msg = `${msg} -- Total Matches=> ${c.totalThresholdMet ? PASS : FAIL} ${c.matchCount} ${c.criteria.totalMatchThreshold}`;
msg = `${msg} -- Total Matches ${triggeredIndicator(c.totalThresholdMet)} => ${c.matchCount} ${c.criteria.totalMatchThreshold}`;
} else {
msg = `${msg} and ${c.matchCount} Total Matches`;
}
msg = `${msg} (Window: ${c.criteria.window})`;
logSummary.push(msg);
if(c.matches.length > 0) {
let matchSample = `-- Matched Values: ${c.matches.slice(0, 3).map(x => `"${x}"`).join(', ')}${c.matches.length > 3 ? `, and ${c.matches.length - 3} more...` : ''}`;
logSummary.push(`${msg} ${matchSample}`);
} else {
logSummary.push(msg);
}
}
const result = `${criteriaMet ? PASS : FAIL} ${logSummary.join(' || ')}`;
const result = `${triggeredIndicator(criteriaMet)} ${logSummary.join(' || ')}`;
this.logger.verbose(result);
return Promise.resolve([criteriaMet, this.getResult(criteriaMet, {result, data: criteriaResults})]);
}
protected getMatchesFromActivity(a: (Submission | Comment), testOn: string[], reg: RegExp, flags?: string): string[] {
protected getMatchesFromActivity(a: (Submission | Comment), testOn: string[], reg: RegExp): string[] {
let m: string[] = [];
// determine what content we are testing
let contents: string[] = [];
if (a instanceof Submission) {
if (asSubmission(a)) {
for (const l of testOn) {
switch (l) {
case 'title':
@@ -346,7 +349,7 @@ export class RegexRule extends Rule {
}
for (const c of contents) {
const results = parseRegex(reg, c, flags);
const results = parseRegex(reg, c);
if (results.matched) {
m = m.concat(results.matches);
}

View File

@@ -1,12 +1,18 @@
import {Rule, RuleJSONConfig, RuleOptions, RuleResult} from "./index";
import {Comment} from "snoowrap";
import {
activityWindowText,
comparisonTextOp, FAIL, isExternalUrlSubmission, isRedditMedia,
activityWindowText, asSubmission,
comparisonTextOp, FAIL, getActivitySubredditName, isExternalUrlSubmission, isRedditMedia,
parseGenericValueComparison, parseSubredditName,
parseUsableLinkIdentifier as linkParser, PASS
parseUsableLinkIdentifier as linkParser, PASS, subredditStateIsNameOnly, toStrongSubredditState
} from "../util";
import {ActivityWindow, ActivityWindowType, ReferenceSubmission} from "../Common/interfaces";
import {
ActivityWindow,
ActivityWindowType,
ReferenceSubmission,
StrongSubredditState,
SubredditState
} from "../Common/interfaces";
import Submission from "snoowrap/dist/objects/Submission";
import dayjs from "dayjs";
import Fuse from 'fuse.js'
@@ -25,7 +31,7 @@ interface RepeatActivityReducer {
const getActivityIdentifier = (activity: (Submission | Comment), length = 200) => {
let identifier: string;
if (activity instanceof Submission) {
if (asSubmission(activity)) {
if (activity.is_self) {
identifier = `${activity.title}${activity.selftext.slice(0, length)}`;
} else if(isRedditMedia(activity)) {
@@ -50,8 +56,10 @@ export class RepeatActivityRule extends Rule {
gapAllowance?: number;
useSubmissionAsReference: boolean;
lookAt: 'submissions' | 'all';
include: string[];
exclude: string[];
include: (string | SubredditState)[];
exclude: (string | SubredditState)[];
hasFullSubredditCrits: boolean = false;
activityFilterFunc: (x: Submission|Comment) => Promise<boolean> = async (x) => true;
keepRemoved: boolean;
minWordCount: number;
@@ -74,8 +82,42 @@ export class RepeatActivityRule extends Rule {
this.window = window;
this.gapAllowance = gapAllowance;
this.useSubmissionAsReference = useSubmissionAsReference;
this.include = include.map(x => parseSubredditName(x).toLowerCase());
this.exclude = exclude.map(x => parseSubredditName(x).toLowerCase());
this.include = include;
this.exclude = exclude;
if(this.include.length > 0) {
const subStates = include.map((x) => {
if(typeof x === 'string') {
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
}
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
});
this.hasFullSubredditCrits = !subStates.every(x => subredditStateIsNameOnly(x));
this.activityFilterFunc = async (x: Submission|Comment) => {
for(const ss of subStates) {
if(await this.resources.testSubredditCriteria(x, ss)) {
return true;
}
}
return false;
};
} else if(this.exclude.length > 0) {
const subStates = exclude.map((x) => {
if(typeof x === 'string') {
return toStrongSubredditState({name: x, stateDescription: x}, {defaultFlags: 'i', generateDescription: true});
}
return toStrongSubredditState(x, {defaultFlags: 'i', generateDescription: true});
});
this.hasFullSubredditCrits = !subStates.every(x => subredditStateIsNameOnly(x));
this.activityFilterFunc = async (x: Submission|Comment) => {
for(const ss of subStates) {
if(await this.resources.testSubredditCriteria(x, ss)) {
return false;
}
}
return true;
};
}
this.lookAt = lookAt;
}
@@ -96,17 +138,10 @@ export class RepeatActivityRule extends Rule {
async process(item: Submission|Comment): Promise<[boolean, RuleResult]> {
let referenceUrl;
if(item instanceof Submission && this.useSubmissionAsReference) {
if(asSubmission(item) && this.useSubmissionAsReference) {
referenceUrl = await item.url;
}
let filterFunc = (x: any) => true;
if(this.include.length > 0) {
filterFunc = (x: Submission|Comment) => this.include.includes(x.subreddit.display_name.toLowerCase());
} else if(this.exclude.length > 0) {
filterFunc = (x: Submission|Comment) => !this.exclude.includes(x.subreddit.display_name.toLowerCase());
}
let activities: (Submission | Comment)[] = [];
switch (this.lookAt) {
case 'submissions':
@@ -117,13 +152,20 @@ export class RepeatActivityRule extends Rule {
break;
}
const condensedActivities = activities.reduce((acc: RepeatActivityReducer, activity: (Submission | Comment), index: number) => {
if(this.hasFullSubredditCrits) {
// go ahead and cache subreddits now
// because we can't use batch test since testing activities in order is important for this rule
await this.resources.cacheSubreddits(activities.map(x => x.subreddit));
}
const condensedActivities = await activities.reduce(async (accProm: Promise<RepeatActivityReducer>, activity: (Submission | Comment), index: number) => {
const acc = await accProm;
const {openSets = [], allSets = []} = acc;
let identifier = getActivityIdentifier(activity);
const isUrl = isExternalUrlSubmission(activity);
let fu = new Fuse([identifier], !isUrl ? fuzzyOptions : {...fuzzyOptions, distance: 5});
const validSub = filterFunc(activity);
const validSub = await this.activityFilterFunc(activity);
let minMet = identifier.length >= this.minWordCount;
let updatedAllSets = [...allSets];
@@ -174,7 +216,7 @@ export class RepeatActivityRule extends Rule {
return {openSets: updatedOpenSets, allSets: updatedAllSets};
}, {openSets: [], allSets: []});
}, Promise.resolve({openSets: [], allSets: []}));
const allRepeatSets = [...condensedActivities.allSets, ...condensedActivities.openSets];
@@ -223,7 +265,7 @@ export class RepeatActivityRule extends Rule {
};
for (let set of value) {
const test = comparisonTextOp(set.length, operator, thresholdValue);
const md = set.map((x: (Comment | Submission)) => `[${x instanceof Submission ? x.title : getActivityIdentifier(x, 50)}](https://reddit.com${x.permalink}) in ${x.subreddit_name_prefixed} on ${dayjs(x.created_utc * 1000).utc().format()}`);
const md = set.map((x: (Comment | Submission)) => `[${asSubmission(x) ? x.title : getActivityIdentifier(x, 50)}](https://reddit.com${x.permalink}) in ${x.subreddit_name_prefixed} on ${dayjs(x.created_utc * 1000).utc().format()}`);
summaryData.sets.push(set);
summaryData.largestTrigger = Math.max(summaryData.largestTrigger, set.length);
@@ -294,21 +336,31 @@ interface RepeatActivityConfig extends ActivityWindow, ReferenceSubmission {
* */
gapAllowance?: number,
/**
* Only include Submissions from this list of Subreddits (by name, case-insensitive)
* If present, activities will be counted only if they are found in this list of Subreddits
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* Each value in the list can be either:
*
* * string (name of subreddit)
* * regular expression to run on the subreddit name
* * `SubredditState`
*
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
* */
include?: string[],
include?: (string | SubredditState)[],
/**
* Do not include Submissions from this list of Subreddits (by name, case-insensitive)
* If present, activities will be counted only if they are **NOT** found in this list of Subreddits
*
* EX `["mealtimevideos","askscience"]`
* @examples ["mealtimevideos","askscience"]
* @minItems 1
* Each value in the list can be either:
*
* * string (name of subreddit)
* * regular expression to run on the subreddit name
* * `SubredditState`
*
* EX `["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]`
* @examples [["mealtimevideos","askscience", "/onlyfans*\/i", {"over18": true}]]
* */
exclude?: string[],
exclude?: (string | SubredditState)[],
/**
* If present determines which activities to consider for gapAllowance.

View File

@@ -28,10 +28,16 @@ interface ResultContext {
export interface RuleResult extends ResultContext {
premise: RulePremise
kind: string
name: string
triggered: (boolean | null)
}
export type FormattedRuleResult = RuleResult & {
triggered: string
result: string
}
export interface RuleSetResult {
results: RuleResult[],
condition: 'OR' | 'AND',
@@ -148,6 +154,7 @@ export abstract class Rule implements IRule, Triggerable {
protected getResult(triggered: (boolean | null) = null, context: ResultContext = {}): RuleResult {
return {
premise: this.getPremise(),
kind: this.getKind(),
name: this.name,
triggered,
...context,
@@ -204,22 +211,6 @@ export interface UserNoteCriteria {
search?: 'current' | 'consecutive' | 'total'
}
/**
* A duration and how to compare it against a value
*
* The syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`
*
* * EX `> 100 days` => Passes if the date being compared is before 100 days ago
* * EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months
*
* Unit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)
*
* [See] https://regexr.com/609n8 for example
*
* @pattern ^\s*(>|>=|<|<=)\s*(\d+)\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\s*$
* */
export type DurationComparor = string;
export interface IRule extends ChecksActivityState {
/**
* An optional, but highly recommended, friendly name for this rule. If not present will default to `kind`.

View File

@@ -29,6 +29,26 @@
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"description": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "An (array of) string/regular expression to test contents of an Author's profile description against\n\nIf no flags are specified then the **insensitive** flag is used by default\n\nIf using an array then if **any** value in the array passes the description test passes",
"examples": [
[
"/test$/i",
"look for this string literal"
]
]
},
"flairCssClass": {
"description": "A list of (user) flair css class values from the subreddit to match against",
"examples": [
@@ -69,6 +89,10 @@
},
"type": "array"
},
"shadowBanned": {
"description": "Is the author shadowbanned?\n\nThis is determined by trying to retrieve the author's profile. If a 404 is returned it is likely they are shadowbanned",
"type": "boolean"
},
"totalKarma": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
@@ -132,6 +156,11 @@
}
],
"properties": {
"age": {
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
"type": "string"
},
"approved": {
"type": "boolean"
},
@@ -154,6 +183,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
@@ -179,6 +218,11 @@
}
],
"properties": {
"age": {
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
"type": "string"
},
"approved": {
"type": "boolean"
},
@@ -213,6 +257,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},

View File

@@ -1,6 +1,72 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"ActivityThreshold": {
"additionalProperties": false,
"description": "At least one count property must be present. If both are present then either can trigger the rule",
"minProperties": 1,
"properties": {
"commentState": {
"$ref": "#/definitions/CommentState",
"description": "When present, a Comment will only be counted if it meets this criteria",
"examples": [
{
"op": true,
"removed": false
}
]
},
"karma": {
"description": "Test the **combined karma** from Activities found in the specified subreddits\n\nValue is a string containing a comparison operator and a number of **combined karma** to compare against\n\nIf specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"submissionState": {
"$ref": "#/definitions/SubmissionState",
"description": "When present, a Submission will only be counted if it meets this criteria",
"examples": [
{
"over_18": true,
"removed": false
}
]
},
"subreddits": {
"description": "Activities will be counted if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"type": "array"
},
"threshold": {
"default": ">= 1",
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
"examples": [
">= 1"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
}
},
"type": "object"
},
"ActivityWindowCriteria": {
"additionalProperties": false,
"description": "Multiple properties that may be used to define what range of Activity to retrieve.\n\nMay specify one, or both properties along with the `satisfyOn` property, to affect the retrieval behavior.",
@@ -167,7 +233,7 @@
"properties": {
"aggregateOn": {
"default": "undefined",
"description": "If `domains` is not specified this list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `redditMedia` is included then aggregate on author's submissions history which are media hosted on reddit: galleries, videos, and images (i.redd.it / v.redd.it)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or domain is `reddit.com`\n* If `link` is included then aggregate author's submission history which is external links and not recognized as `media` by reddit\n\nIf nothing is specified or list is empty (default) rule will only aggregate on `link` and `media` (ignores reddit-hosted content and self-posts)",
"examples": [
[
]
@@ -176,12 +242,23 @@
"enum": [
"link",
"media",
"redditMedia",
"self"
],
"type": "string"
},
"type": "array"
},
"commentState": {
"$ref": "#/definitions/CommentState",
"description": "When present, Comments from `window` will only be counted if they meet this criteria",
"examples": [
{
"op": true,
"removed": false
}
]
},
"consolidateMediaDomains": {
"default": false,
"description": "Should the criteria consolidate recognized media domains into the parent domain?\n\nSubmissions to major media domains (youtube, vimeo) can be identified by individual Channel/Author...\n\n* If `false` then domains will be aggregated at the channel level IE Youtube Channel A (2 counts), Youtube Channel B (3 counts)\n* If `true` then then media domains will be consolidated at domain level and then aggregated IE youtube.com (5 counts)",
@@ -195,7 +272,7 @@
[
]
],
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nIf this is present then `aggregateOn` is ignored.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"items": {
"type": "string"
},
@@ -210,27 +287,37 @@
"type": "boolean"
},
"exclude": {
"description": "Do not include Activities from this list of Subreddits (by name, case-insensitive)\n\nWill be ignored if `include` is present.\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "When present, Activities WILL NOT be counted if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
},
"minItems": 1,
"type": "array"
},
"include": {
"description": "Only include Activities from this list of Subreddits (by name, case-insensitive)\n\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "When present, Activities WILL ONLY be counted if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
},
"minItems": 1,
"type": "array"
},
"minActivityCount": {
@@ -241,6 +328,16 @@
"name": {
"type": "string"
},
"submissionState": {
"$ref": "#/definitions/SubmissionState",
"description": "When present, Submissions from `window` will only be counted if they meet this criteria",
"examples": [
{
"over_18": true,
"removed": false
}
]
},
"threshold": {
"default": "> 10%",
"description": "A string containing a comparison operator and a value to compare comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 12` => greater than 12 activities originate from same attribution\n* EX `<= 10%` => less than 10% of all Activities have the same attribution",
@@ -352,11 +449,6 @@
],
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
"type": "string"
},
"useSubmissionAsReference": {
"default": true,
"description": "If activity is a Submission and is a link (not self-post) then only look at Submissions that contain this link, otherwise consider all activities.",
"type": "boolean"
}
},
"required": [
@@ -392,6 +484,26 @@
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"description": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "An (array of) string/regular expression to test contents of an Author's profile description against\n\nIf no flags are specified then the **insensitive** flag is used by default\n\nIf using an array then if **any** value in the array passes the description test passes",
"examples": [
[
"/test$/i",
"look for this string literal"
]
]
},
"flairCssClass": {
"description": "A list of (user) flair css class values from the subreddit to match against",
"examples": [
@@ -432,6 +544,10 @@
},
"type": "array"
},
"shadowBanned": {
"description": "Is the author shadowbanned?\n\nThis is determined by trying to retrieve the author's profile. If a 404 is returned it is likely they are shadowbanned",
"type": "boolean"
},
"totalKarma": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
@@ -686,7 +802,112 @@
],
"type": "object"
},
"CacheConfig": {
"properties": {
"actionedEventsMax": {
"default": 25,
"description": "The **maximum** number of Events that the cache should store triggered result summaries for\n\nThese summaries are viewable through the Web UI.\n\nThe value specified by a subreddit cannot be larger than the value set by the Operator for the global/bot config (if set)",
"type": "number"
},
"authorTTL": {
"default": 60,
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"commentTTL": {
"default": 60,
"description": "Amount of time, in seconds, a comment should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"filterCriteriaTTL": {
"default": 60,
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
],
"description": "The cache provider and, optionally, a custom configuration for that provider\n\nIf not present or `null` provider will be `memory`.\n\nTo specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`"
},
"submissionTTL": {
"default": 60,
"description": "Amount of time, in seconds, a submission should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"subredditTTL": {
"default": 600,
"description": "Amount of time, in seconds, a subreddit (attributes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
600
],
"type": [
"number",
"boolean"
]
},
"userNotesTTL": {
"default": 300,
"description": "Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
300
],
"type": [
"number",
"boolean"
]
},
"wikiTTL": {
"default": 300,
"description": "Amount of time, in seconds, wiki content pages should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
300
],
"type": [
"number",
"boolean"
]
}
},
"type": "object"
},
"CacheOptions": {
"additionalProperties": {
},
"description": "Configure granular settings for a cache provider with this object",
"properties": {
"auth_pass": {
@@ -751,6 +972,25 @@
],
"type": "string"
},
"ClearProcessedOptions": {
"description": "For very long-running, high-volume subreddits clearing the list of processed activities helps manage memory bloat\n\nAll of these options have default values based on the limit and/or interval set for polling options on each subreddit stream. They only need to modified if the defaults are not sufficient.\n\nIf both `after` and `size` are defined whichever is hit first will trigger the list to clear. `after` will be reset after ever clear.",
"properties": {
"after": {
"description": "An interval the processed list should be cleared after.\n\n* EX `9 days`\n* EX `3 months`\n* EX `5 minutes`",
"pattern": "^\\s*(?<time>\\d+)\\s*(?<unit>days?|weeks?|months?|years?|hours?|minutes?|seconds?|milliseconds?)\\s*$",
"type": "string"
},
"retain": {
"description": "The number of activities to retain in processed list after clearing.\n\nDefaults to `limit` value from `PollingOptions`",
"type": "number"
},
"size": {
"description": "Number of activities found in processed list after which the list should be cleared.\n\nDefaults to the `limit` value from `PollingOptions`",
"type": "number"
}
},
"type": "object"
},
"CommentActionJson": {
"description": "Reply to the Activity. For a submission the reply will be a top-level comment.",
"properties": {
@@ -1045,6 +1285,11 @@
}
],
"properties": {
"age": {
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
"type": "string"
},
"approved": {
"type": "boolean"
},
@@ -1067,6 +1312,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
@@ -1247,23 +1502,28 @@
"type": "object"
},
"HistoryCriteria": {
"description": "If both `submission` and `comment` are defined then criteria will only trigger if BOTH thresholds are met",
"description": "Criteria will only trigger if ALL present thresholds (comment, submission, total) are met",
"properties": {
"comment": {
"description": "A string containing a comparison operator and a value to compare comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of all Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 comments as OP\n* EX `<= 25% as OP` => Comments as OP were less then or equal to 25% of **all Comments**",
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of unfiltered Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 filtered comments as OP\n* EX `<= 25% as OP` => **Filtered** comments as OP were less then or equal to 25% of **unfiltered Comments**",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"minActivityCount": {
"default": 5,
"description": "The minimum number of activities that must exist from the `window` results for this criteria to run",
"description": "The minimum number of **filtered** activities that must exist from the `window` results for this criteria to run",
"type": "number"
},
"name": {
"type": "string"
},
"submission": {
"description": "A string containing a comparison operator and a value to compare submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 submissions\n* EX `<= 75%` => submissions are equal to or less than 75% of all Activities",
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 filtered submissions\n* EX `<= 75%` => filtered submissions are equal to or less than 75% of unfiltered Activities",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"total": {
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`) activities against\n\n**Note:** This is only useful if using `include` or `exclude` otherwise percent will always be 100% and total === activityTotal\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 filtered activities\n* EX `<= 75%` => filtered activities are equal to or less than 75% of all Activities",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
@@ -1332,27 +1592,51 @@
"type": "array"
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n**Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"include": {
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are found in this list of Subreddits.\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n **Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"itemIs": {
@@ -1394,6 +1678,77 @@
],
"type": "object"
},
"ImageDetection": {
"description": "When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.\n\n**Note:** This is an **experimental feature**",
"properties": {
"enable": {
"description": "Is image detection enabled?",
"type": "boolean"
},
"fetchBehavior": {
"default": "extension",
"description": "Determines how and when to check if a URL is an image\n\n**Note:** After fetching a URL the **Content-Type** is validated to contain `image` before detection occurs\n\n**When `extension`:** (default)\n\n* Only URLs that end in known image extensions (.png, .jpg, etc...) are fetched\n\n**When `unknown`:**\n\n* URLs that end in known image extensions (.png, .jpg, etc...) are fetched\n* URLs with no extension or unknown (IE non-video, non-doc, etc...) are fetched\n\n**When `all`:**\n\n* All submissions that have URLs (non-self) will be fetched, regardless of extension\n* **Note:** This can be bandwidth/CPU intensive if history window is large so use with care",
"enum": [
"all",
"extension",
"unknown"
],
"type": "string"
},
"hash": {
"description": "Use perceptual hashing (blockhash-js) to compare images\n\nPros:\n\n* very fast\n* low cpu/memory usage\n* results can be cached\n\nCons:\n\n* not as accurate as pixel comparison\n* weaker for text-heavy images\n* mostly color-blind\n\nBest uses:\n\n* Detecting (general) duplicate images\n* Comparing large number of images",
"properties": {
"bits": {
"default": 32,
"description": "Bit count determines accuracy of hash and granularity of hash comparison (comparison to other hashes)\n\nDefault is `32`\n\n**NOTE:** Hashes of different sizes (bits) cannot be compared. If you are caching results make sure all rules where results may be shared use the same bit count to ensure hashes can be compared. Otherwise hashes will be recomputed.",
"type": "number"
},
"enable": {
"default": true,
"description": "Enabled by default.\n\nIf both `hash` and `pixel` are enabled then `pixel` will be used to verify image comparison when hashes matches",
"type": "boolean"
},
"hardThreshold": {
"description": "High Confidence Threshold\n\nIf the difference in comparison is equal to or less than this number the images are considered the same and pixel comparison WILL NOT occur\n\nDefaults to the parent-level `threshold` value if not present\n\nUse `null` if you want pixel comparison to ALWAYS occur (softThreshold must be present)",
"type": [
"null",
"number"
]
},
"softThreshold": {
"description": "Low Confidence Threshold -- only used if `pixel` is enabled\n\nIf the difference in comparison is\n\n1) equal to or less than this value and\n2) the value is greater than `hardThreshold`\n\nthe images will be compared using the `pixel` method",
"type": "number"
},
"ttl": {
"description": "Number of seconds to cache image hash",
"type": "number"
}
},
"type": "object"
},
"pixel": {
"description": "Use pixel counting to compare images\n\nPros:\n\n* most accurate\n* strong with text or color-only changes\n\nCons:\n\n* much slower than hashing\n* memory/cpu intensive\n\nBest uses:\n\n* Comparison text-only images\n* Comparison requires high degree of accuracy or changes are subtle",
"properties": {
"enable": {
"default": false,
"description": "Disabled by default.",
"type": "boolean"
},
"threshold": {
"description": "The percentage, as a whole number, of pixels that are **different** between the two images at which point the images are not considered the same.",
"type": "number"
}
},
"type": "object"
},
"threshold": {
"default": 5,
"description": "The percentage, as a whole number, of difference between two images at which point they will not be considered the same.\n\nWill be used as `hash.hardThreshold` and `pixel.threshold` if those values are not specified\n\nDefault is `5`",
"type": "number"
}
},
"type": "object"
},
"LockActionJson": {
"description": "Lock the Activity",
"properties": {
@@ -1567,6 +1922,16 @@
"title": {
"description": "The title of the message\n\nIf not specified will be defaulted to `Concerning your [Submission/Comment]`",
"type": "string"
},
"to": {
"description": "Entity to send message to.\n\nIf not present Message be will sent to the Author of the Activity being checked.\n\nValid formats:\n\n* `aUserName` -- send to /u/aUserName\n* `u/aUserName` -- send to /u/aUserName\n* `r/aSubreddit` -- sent to modmail of /r/aSubreddit\n\n**Note:** Reddit does not support sending a message AS a subreddit TO another subreddit",
"examples": [
"aUserName",
"u/aUserName",
"r/aSubreddit"
],
"pattern": "^\\s*(\\/[ru]\\/|[ru]\\/)*(\\w+)*\\s*$",
"type": "string"
}
},
"required": [
@@ -1651,6 +2016,10 @@
}
],
"properties": {
"clearProcessed": {
"$ref": "#/definitions/ClearProcessedOptions",
"description": "For very long-running, high-volume subreddits clearing the list of processed activities helps manage memory bloat\n\nAll of these options have default values based on the limit and/or interval set for polling options on each subreddit stream. They only need to modified if the defaults are not sufficient.\n\nIf both `after` and `size` are defined whichever is hit first will trigger the list to clear. `after` will be reset after ever clear."
},
"delayUntil": {
"description": "Delay processing Activity until it is `N` seconds old\n\nUseful if there are other bots that may process an Activity and you want this bot to run first/last/etc.\n\nIf the Activity is already `N` seconds old when it is initially retrieved no refresh of the Activity occurs (no API request is made) and it is immediately processed.",
"type": "number"
@@ -1709,6 +2078,10 @@
}
]
},
"imageDetection": {
"$ref": "#/definitions/ImageDetection",
"description": "When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.\n\n**Note:** This is an **experimental feature**"
},
"itemIs": {
"anyOf": [
{
@@ -1759,7 +2132,7 @@
"thresholds": {
"description": "A list of subreddits/count criteria that may trigger this rule. ANY SubThreshold will trigger this rule.",
"items": {
"$ref": "#/definitions/SubThreshold"
"$ref": "#/definitions/ActivityThreshold"
},
"minItems": 1,
"type": "array"
@@ -1796,6 +2169,49 @@
],
"type": "object"
},
"RegExp": {
"properties": {
"dotAll": {
"type": "boolean"
},
"flags": {
"type": "string"
},
"global": {
"type": "boolean"
},
"ignoreCase": {
"type": "boolean"
},
"lastIndex": {
"type": "number"
},
"multiline": {
"type": "boolean"
},
"source": {
"type": "string"
},
"sticky": {
"type": "boolean"
},
"unicode": {
"type": "boolean"
}
},
"required": [
"dotAll",
"flags",
"global",
"ignoreCase",
"lastIndex",
"multiline",
"source",
"sticky",
"unicode"
],
"type": "object"
},
"RegexCriteria": {
"properties": {
"activityMatchThreshold": {
@@ -1834,16 +2250,12 @@
"type": "string"
},
"regex": {
"description": "A valid Regular Expression to test content against\n\nDo not wrap expression in forward slashes\n\nEX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`",
"description": "A valid Regular Expression to test content against\n\nIf no flags are specified then the **global** flag is used by default",
"examples": [
"reddit|FoxxMD"
"/reddit|FoxxMD/ig"
],
"type": "string"
},
"regexFlags": {
"description": "Regex flags to use",
"type": "string"
},
"testOn": {
"default": [
"title",
@@ -2082,15 +2494,27 @@
]
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"gapAllowance": {
@@ -2098,15 +2522,27 @@
"type": "number"
},
"include": {
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"itemIs": {
@@ -2333,40 +2769,6 @@
],
"type": "object"
},
"SubThreshold": {
"additionalProperties": false,
"description": "At least one count property must be present. If both are present then either can trigger the rule",
"minProperties": 1,
"properties": {
"subreddits": {
"description": "A list of Subreddits (by name, case-insensitive) to look for.\n\nEX [\"mealtimevideos\",\"askscience\"]",
"examples": [
[
"mealtimevideos",
"askscience"
]
],
"items": {
"type": "string"
},
"minItems": 1,
"type": "array"
},
"threshold": {
"default": ">= 1",
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
"examples": [
">= 1"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
}
},
"required": [
"subreddits"
],
"type": "object"
},
"SubmissionCheckJson": {
"properties": {
"actions": {
@@ -2439,6 +2841,10 @@
}
]
},
"cacheUserResult": {
"$ref": "#/definitions/UserResultCacheOptions",
"description": "Cache the result of this check based on the comment author and the submission id\n\nThis is useful in this type of scenario:\n\n1. This check is configured to run on comments for specific submissions with high volume activity\n2. The rules being run are not dependent on the content of the comment\n3. The rule results are not likely to change while cache is valid"
},
"condition": {
"default": "AND",
"description": "Under what condition should a set of run `Rule` objects be considered \"successful\"?\n\nIf `OR` then **any** triggered `Rule` object results in success.\n\nIf `AND` then **all** `Rule` objects must be triggered to result in success.",
@@ -2549,6 +2955,11 @@
}
],
"properties": {
"age": {
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
"type": "string"
},
"approved": {
"type": "boolean"
},
@@ -2583,6 +2994,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
@@ -2599,70 +3020,40 @@
},
"type": "object"
},
"SubredditCacheConfig": {
"SubredditState": {
"description": "Different attributes a `Subreddit` can be in. Only include a property if you want to check it.",
"examples": [
{
"over18": true
}
],
"properties": {
"authorTTL": {
"default": 60,
"description": "Amount of time, in seconds, author activities (Comments/Submission) should be cached",
"examples": [
60
],
"type": "number"
},
"commentTTL": {
"default": 60,
"description": "Amount of time, in seconds, a comment should be cached",
"examples": [
60
],
"type": "number"
},
"filterCriteriaTTL": {
"default": 60,
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters",
"examples": [
60
],
"type": "number"
},
"provider": {
"name": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
"$ref": "#/definitions/RegExp"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
],
"description": "The name the subreddit.\n\nCan be a normal string (will check case-insensitive) or a regular expression\n\nEX `[\"mealtimevideos\", \"/onlyfans*\\/i\"]`",
"examples": [
"mealtimevideos",
"/onlyfans*/i"
]
},
"submissionTTL": {
"default": 60,
"description": "Amount of time, in seconds, a submission should be cached",
"examples": [
60
],
"type": "number"
"over18": {
"description": "Is subreddit NSFW/over 18?\n\n**Note**: This is **mod-controlled flag** so it is up to the mods of the subreddit to correctly mark their subreddit as NSFW",
"type": "boolean"
},
"userNotesTTL": {
"default": 300,
"description": "Amount of time, in milliseconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached",
"examples": [
300
],
"type": "number"
"quarantine": {
"description": "Is subreddit quarantined?",
"type": "boolean"
},
"wikiTTL": {
"default": 300,
"description": "Amount of time, in seconds, wiki content pages should be cached",
"examples": [
300
],
"type": "number"
"stateDescription": {
"description": "A friendly description of what this State is trying to parse",
"type": "string"
}
},
"type": "object"
@@ -2810,6 +3201,12 @@
"description": "Cache the result of this check based on the comment author and the submission id\n\nThis is useful in this type of scenario:\n\n1. This check is configured to run on comments for specific submissions with high volume activity\n2. The rules being run are not dependent on the content of the comment\n3. The rule results are not likely to change while cache is valid",
"properties": {
"enable": {
"default": false,
"type": "boolean"
},
"runActions": {
"default": true,
"description": "In the event the cache returns a triggered result should the actions for the check also be run?",
"type": "boolean"
},
"ttl": {
@@ -2826,7 +3223,7 @@
},
"properties": {
"caching": {
"$ref": "#/definitions/SubredditCacheConfig",
"$ref": "#/definitions/CacheConfig",
"description": "Per-subreddit config for caching TTL values. If set to `false` caching is disabled."
},
"checks": {

View File

@@ -23,74 +23,8 @@
"description": "The configuration for an **individual reddit account** ContextMod will run as a bot.\n\nMultiple bot configs may be specified (one per reddit account).\n\n**NOTE:** If `bots` is not specified in a `FILE` then a default `bot` is generated using `ENV/ARG` values IE `CLIENT_ID`, etc...but if `bots` IS specified the default is not generated.",
"properties": {
"caching": {
"description": "Settings to configure the default caching behavior for each suberddit",
"properties": {
"authorTTL": {
"default": 60,
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",
"examples": [
60
],
"type": "number"
},
"commentTTL": {
"default": 60,
"description": "Amount of time, in seconds, a comment should be cached",
"examples": [
60
],
"type": "number"
},
"filterCriteriaTTL": {
"default": 60,
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters",
"examples": [
60
],
"type": "number"
},
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
],
"description": "The cache provider and, optionally, a custom configuration for that provider\n\nIf not present or `null` provider will be `memory`.\n\nTo specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`"
},
"submissionTTL": {
"default": 60,
"description": "Amount of time, in seconds, a submission should be cached",
"examples": [
60
],
"type": "number"
},
"userNotesTTL": {
"default": 300,
"description": "Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached",
"examples": [
300
],
"type": "number"
},
"wikiTTL": {
"default": 300,
"description": "Amount of time, in seconds, wiki content pages should be cached",
"examples": [
300
],
"type": "number"
}
},
"type": "object"
"$ref": "#/definitions/OperatorCacheConfig",
"description": "Settings to configure the default caching behavior for this bot\n\nEvery setting not specified will default to what is specified by the global operator caching config"
},
"credentials": {
"$ref": "#/definitions/RedditCredentials",
@@ -145,6 +79,10 @@
"default": false,
"description": "If set to `true` all subreddits polling unmoderated/modqueue with default polling settings will share a request to \"r/mod\"\notherwise each subreddit will poll its own mod view\n\n* ENV => `SHARE_MOD`\n* ARG => `--shareMod`",
"type": "boolean"
},
"stagger": {
"description": "If sharing a mod stream stagger pushing relevant Activities to individual subreddits.\n\nUseful when running many subreddits and rules are potentially cpu/memory/traffic heavy -- allows spreading out load",
"type": "number"
}
},
"type": "object"
@@ -243,6 +181,8 @@
"type": "object"
},
"CacheOptions": {
"additionalProperties": {
},
"description": "Configure granular settings for a cache provider with this object",
"properties": {
"auth_pass": {
@@ -394,6 +334,114 @@
],
"type": "object"
},
"OperatorCacheConfig": {
"properties": {
"actionedEventsDefault": {
"default": 25,
"description": "The **default** number of Events that the cache will store triggered result summaries for\n\nThese summaries are viewable through the Web UI.\n\nThe value specified cannot be larger than `actionedEventsMax` for the global/bot config (if set)",
"type": "number"
},
"actionedEventsMax": {
"default": 25,
"description": "The **maximum** number of Events that the cache should store triggered result summaries for\n\nThese summaries are viewable through the Web UI.\n\nThe value specified by a subreddit cannot be larger than the value set by the Operator for the global/bot config (if set)",
"type": "number"
},
"authorTTL": {
"default": 60,
"description": "Amount of time, in seconds, author activity history (Comments/Submission) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache\n\n* ENV => `AUTHOR_TTL`\n* ARG => `--authorTTL <sec>`",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"commentTTL": {
"default": 60,
"description": "Amount of time, in seconds, a comment should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"filterCriteriaTTL": {
"default": 60,
"description": "Amount of time, in seconds, to cache filter criteria results (`authorIs` and `itemIs` results)\n\nThis is especially useful if when polling high-volume comments and your checks rely on author/item filters\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"none",
"redis"
],
"type": "string"
}
],
"description": "The cache provider and, optionally, a custom configuration for that provider\n\nIf not present or `null` provider will be `memory`.\n\nTo specify another `provider` but use its default configuration set this property to a string of one of the available providers: `memory`, `redis`, or `none`"
},
"submissionTTL": {
"default": 60,
"description": "Amount of time, in seconds, a submission should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
60
],
"type": [
"number",
"boolean"
]
},
"subredditTTL": {
"default": 600,
"description": "Amount of time, in seconds, a subreddit (attributes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
600
],
"type": [
"number",
"boolean"
]
},
"userNotesTTL": {
"default": 300,
"description": "Amount of time, in seconds, [Toolbox User Notes](https://www.reddit.com/r/toolbox/wiki/docs/usernotes) should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
300
],
"type": [
"number",
"boolean"
]
},
"wikiTTL": {
"default": 300,
"description": "Amount of time, in seconds, wiki content pages should be cached\n\n* If `0` or `true` will cache indefinitely (not recommended)\n* If `false` will not cache",
"examples": [
300
],
"type": [
"number",
"boolean"
]
}
},
"type": "object"
},
"PollingDefaults": {
"properties": {
"delayUntil": {
@@ -527,6 +575,10 @@
},
"type": "array"
},
"caching": {
"$ref": "#/definitions/OperatorCacheConfig",
"description": "Settings to configure the default caching behavior globally\n\nThese settings will be used by each bot, and subreddit, that does not specify their own"
},
"logging": {
"description": "Settings to configure global logging defaults",
"properties": {
@@ -605,6 +657,21 @@
"web": {
"description": "Settings for the web interface",
"properties": {
"caching": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"redis"
],
"type": "string"
}
],
"description": "Caching provider to use for session and invite data\n\nIf none is provided the top-level caching provider is used"
},
"clients": {
"description": "A list of CM Servers this Client should connect to.\n\nIf not specified a default `BotConnection` for this instance is generated",
"examples": [
@@ -631,6 +698,20 @@
}
]
},
"invites": {
"description": "Settings related to oauth flow invites",
"properties": {
"maxAge": {
"default": 0,
"description": "Number of seconds an invite should be valid for\n\n If `0` or not specified (default) invites do not expire",
"examples": [
0
],
"type": "number"
}
},
"type": "object"
},
"logLevel": {
"description": "The default log level to filter to in the web interface\n\nIf not specified or `null` will be same as global `logLevel`",
"enum": [
@@ -674,27 +755,16 @@
"session": {
"description": "Settings to configure the behavior of user sessions -- the session is what the web interface uses to identify logged in users.",
"properties": {
"provider": {
"anyOf": [
{
"$ref": "#/definitions/CacheOptions"
},
{
"enum": [
"memory",
"redis"
],
"type": "string"
}
],
"default": "memory",
"description": "The cache provider to use.\n\nThe default should be sufficient for almost all use cases",
"maxAge": {
"default": 86400,
"description": "Number of seconds a session should be valid for.\n\nDefault is 1 day",
"examples": [
"memory"
]
86400
],
"type": "number"
},
"secret": {
"description": "The secret value used to encrypt session data\n\nIf provider is persistent (redis) specifying a value here will ensure sessions are valid between application restarts\n\nWhen not present or `null` a random string is generated on application start",
"description": "The secret value used to encrypt session data\n\nIf provider is persistent (`redis`) specifying a value here will ensure sessions are valid between application restarts\n\nWhen not present or `null` a random string is generated on application start",
"examples": [
"definitelyARandomString"
],

View File

@@ -24,6 +24,72 @@
}
],
"definitions": {
"ActivityThreshold": {
"additionalProperties": false,
"description": "At least one count property must be present. If both are present then either can trigger the rule",
"minProperties": 1,
"properties": {
"commentState": {
"$ref": "#/definitions/CommentState",
"description": "When present, a Comment will only be counted if it meets this criteria",
"examples": [
{
"op": true,
"removed": false
}
]
},
"karma": {
"description": "Test the **combined karma** from Activities found in the specified subreddits\n\nValue is a string containing a comparison operator and a number of **combined karma** to compare against\n\nIf specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"submissionState": {
"$ref": "#/definitions/SubmissionState",
"description": "When present, a Submission will only be counted if it meets this criteria",
"examples": [
{
"over_18": true,
"removed": false
}
]
},
"subreddits": {
"description": "Activities will be counted if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"type": "array"
},
"threshold": {
"default": ">= 1",
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
"examples": [
">= 1"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
}
},
"type": "object"
},
"ActivityWindowCriteria": {
"additionalProperties": false,
"description": "Multiple properties that may be used to define what range of Activity to retrieve.\n\nMay specify one, or both properties along with the `satisfyOn` property, to affect the retrieval behavior.",
@@ -113,7 +179,7 @@
"properties": {
"aggregateOn": {
"default": "undefined",
"description": "If `domains` is not specified this list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `redditMedia` is included then aggregate on author's submissions history which are media hosted on reddit: galleries, videos, and images (i.redd.it / v.redd.it)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or domain is `reddit.com`\n* If `link` is included then aggregate author's submission history which is external links and not recognized as `media` by reddit\n\nIf nothing is specified or list is empty (default) rule will only aggregate on `link` and `media` (ignores reddit-hosted content and self-posts)",
"examples": [
[
]
@@ -122,12 +188,23 @@
"enum": [
"link",
"media",
"redditMedia",
"self"
],
"type": "string"
},
"type": "array"
},
"commentState": {
"$ref": "#/definitions/CommentState",
"description": "When present, Comments from `window` will only be counted if they meet this criteria",
"examples": [
{
"op": true,
"removed": false
}
]
},
"consolidateMediaDomains": {
"default": false,
"description": "Should the criteria consolidate recognized media domains into the parent domain?\n\nSubmissions to major media domains (youtube, vimeo) can be identified by individual Channel/Author...\n\n* If `false` then domains will be aggregated at the channel level IE Youtube Channel A (2 counts), Youtube Channel B (3 counts)\n* If `true` then then media domains will be consolidated at domain level and then aggregated IE youtube.com (5 counts)",
@@ -141,7 +218,7 @@
[
]
],
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nIf this is present then `aggregateOn` is ignored.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"items": {
"type": "string"
},
@@ -156,27 +233,37 @@
"type": "boolean"
},
"exclude": {
"description": "Do not include Activities from this list of Subreddits (by name, case-insensitive)\n\nWill be ignored if `include` is present.\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "When present, Activities WILL NOT be counted if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
},
"minItems": 1,
"type": "array"
},
"include": {
"description": "Only include Activities from this list of Subreddits (by name, case-insensitive)\n\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "When present, Activities WILL ONLY be counted if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
},
"minItems": 1,
"type": "array"
},
"minActivityCount": {
@@ -187,6 +274,16 @@
"name": {
"type": "string"
},
"submissionState": {
"$ref": "#/definitions/SubmissionState",
"description": "When present, Submissions from `window` will only be counted if they meet this criteria",
"examples": [
{
"over_18": true,
"removed": false
}
]
},
"threshold": {
"default": "> 10%",
"description": "A string containing a comparison operator and a value to compare comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 12` => greater than 12 activities originate from same attribution\n* EX `<= 10%` => less than 10% of all Activities have the same attribution",
@@ -298,11 +395,6 @@
],
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
"type": "string"
},
"useSubmissionAsReference": {
"default": true,
"description": "If activity is a Submission and is a link (not self-post) then only look at Submissions that contain this link, otherwise consider all activities.",
"type": "boolean"
}
},
"required": [
@@ -338,6 +430,26 @@
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"description": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "An (array of) string/regular expression to test contents of an Author's profile description against\n\nIf no flags are specified then the **insensitive** flag is used by default\n\nIf using an array then if **any** value in the array passes the description test passes",
"examples": [
[
"/test$/i",
"look for this string literal"
]
]
},
"flairCssClass": {
"description": "A list of (user) flair css class values from the subreddit to match against",
"examples": [
@@ -378,6 +490,10 @@
},
"type": "array"
},
"shadowBanned": {
"description": "Is the author shadowbanned?\n\nThis is determined by trying to retrieve the author's profile. If a 404 is returned it is likely they are shadowbanned",
"type": "boolean"
},
"totalKarma": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
@@ -516,6 +632,11 @@
}
],
"properties": {
"age": {
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
"type": "string"
},
"approved": {
"type": "boolean"
},
@@ -538,6 +659,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
@@ -611,23 +742,28 @@
"type": "object"
},
"HistoryCriteria": {
"description": "If both `submission` and `comment` are defined then criteria will only trigger if BOTH thresholds are met",
"description": "Criteria will only trigger if ALL present thresholds (comment, submission, total) are met",
"properties": {
"comment": {
"description": "A string containing a comparison operator and a value to compare comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of all Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 comments as OP\n* EX `<= 25% as OP` => Comments as OP were less then or equal to 25% of **all Comments**",
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of unfiltered Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 filtered comments as OP\n* EX `<= 25% as OP` => **Filtered** comments as OP were less then or equal to 25% of **unfiltered Comments**",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"minActivityCount": {
"default": 5,
"description": "The minimum number of activities that must exist from the `window` results for this criteria to run",
"description": "The minimum number of **filtered** activities that must exist from the `window` results for this criteria to run",
"type": "number"
},
"name": {
"type": "string"
},
"submission": {
"description": "A string containing a comparison operator and a value to compare submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 submissions\n* EX `<= 75%` => submissions are equal to or less than 75% of all Activities",
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 filtered submissions\n* EX `<= 75%` => filtered submissions are equal to or less than 75% of unfiltered Activities",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"total": {
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`) activities against\n\n**Note:** This is only useful if using `include` or `exclude` otherwise percent will always be 100% and total === activityTotal\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 filtered activities\n* EX `<= 75%` => filtered activities are equal to or less than 75% of all Activities",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
@@ -696,27 +832,51 @@
"type": "array"
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n**Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"include": {
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are found in this list of Subreddits.\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n **Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"itemIs": {
@@ -758,6 +918,77 @@
],
"type": "object"
},
"ImageDetection": {
"description": "When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.\n\n**Note:** This is an **experimental feature**",
"properties": {
"enable": {
"description": "Is image detection enabled?",
"type": "boolean"
},
"fetchBehavior": {
"default": "extension",
"description": "Determines how and when to check if a URL is an image\n\n**Note:** After fetching a URL the **Content-Type** is validated to contain `image` before detection occurs\n\n**When `extension`:** (default)\n\n* Only URLs that end in known image extensions (.png, .jpg, etc...) are fetched\n\n**When `unknown`:**\n\n* URLs that end in known image extensions (.png, .jpg, etc...) are fetched\n* URLs with no extension or unknown (IE non-video, non-doc, etc...) are fetched\n\n**When `all`:**\n\n* All submissions that have URLs (non-self) will be fetched, regardless of extension\n* **Note:** This can be bandwidth/CPU intensive if history window is large so use with care",
"enum": [
"all",
"extension",
"unknown"
],
"type": "string"
},
"hash": {
"description": "Use perceptual hashing (blockhash-js) to compare images\n\nPros:\n\n* very fast\n* low cpu/memory usage\n* results can be cached\n\nCons:\n\n* not as accurate as pixel comparison\n* weaker for text-heavy images\n* mostly color-blind\n\nBest uses:\n\n* Detecting (general) duplicate images\n* Comparing large number of images",
"properties": {
"bits": {
"default": 32,
"description": "Bit count determines accuracy of hash and granularity of hash comparison (comparison to other hashes)\n\nDefault is `32`\n\n**NOTE:** Hashes of different sizes (bits) cannot be compared. If you are caching results make sure all rules where results may be shared use the same bit count to ensure hashes can be compared. Otherwise hashes will be recomputed.",
"type": "number"
},
"enable": {
"default": true,
"description": "Enabled by default.\n\nIf both `hash` and `pixel` are enabled then `pixel` will be used to verify image comparison when hashes matches",
"type": "boolean"
},
"hardThreshold": {
"description": "High Confidence Threshold\n\nIf the difference in comparison is equal to or less than this number the images are considered the same and pixel comparison WILL NOT occur\n\nDefaults to the parent-level `threshold` value if not present\n\nUse `null` if you want pixel comparison to ALWAYS occur (softThreshold must be present)",
"type": [
"null",
"number"
]
},
"softThreshold": {
"description": "Low Confidence Threshold -- only used if `pixel` is enabled\n\nIf the difference in comparison is\n\n1) equal to or less than this value and\n2) the value is greater than `hardThreshold`\n\nthe images will be compared using the `pixel` method",
"type": "number"
},
"ttl": {
"description": "Number of seconds to cache image hash",
"type": "number"
}
},
"type": "object"
},
"pixel": {
"description": "Use pixel counting to compare images\n\nPros:\n\n* most accurate\n* strong with text or color-only changes\n\nCons:\n\n* much slower than hashing\n* memory/cpu intensive\n\nBest uses:\n\n* Comparison text-only images\n* Comparison requires high degree of accuracy or changes are subtle",
"properties": {
"enable": {
"default": false,
"description": "Disabled by default.",
"type": "boolean"
},
"threshold": {
"description": "The percentage, as a whole number, of pixels that are **different** between the two images at which point the images are not considered the same.",
"type": "number"
}
},
"type": "object"
},
"threshold": {
"default": 5,
"description": "The percentage, as a whole number, of difference between two images at which point they will not be considered the same.\n\nWill be used as `hash.hardThreshold` and `pixel.threshold` if those values are not specified\n\nDefault is `5`",
"type": "number"
}
},
"type": "object"
},
"RecentActivityRuleJSONConfig": {
"description": "Checks a user's history for any Activity (Submission/Comment) in the subreddits specified in thresholds\n\nAvailable data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):\n\n```\nsummary => comma-deliminated list of subreddits that hit the threshold and their count EX subredditA(1), subredditB(4),...\nsubCount => Total number of subreddits that hit the threshold\ntotalCount => Total number of all activity occurrences in subreddits\n```",
"properties": {
@@ -780,6 +1011,10 @@
}
]
},
"imageDetection": {
"$ref": "#/definitions/ImageDetection",
"description": "When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.\n\n**Note:** This is an **experimental feature**"
},
"itemIs": {
"anyOf": [
{
@@ -830,7 +1065,7 @@
"thresholds": {
"description": "A list of subreddits/count criteria that may trigger this rule. ANY SubThreshold will trigger this rule.",
"items": {
"$ref": "#/definitions/SubThreshold"
"$ref": "#/definitions/ActivityThreshold"
},
"minItems": 1,
"type": "array"
@@ -867,6 +1102,49 @@
],
"type": "object"
},
"RegExp": {
"properties": {
"dotAll": {
"type": "boolean"
},
"flags": {
"type": "string"
},
"global": {
"type": "boolean"
},
"ignoreCase": {
"type": "boolean"
},
"lastIndex": {
"type": "number"
},
"multiline": {
"type": "boolean"
},
"source": {
"type": "string"
},
"sticky": {
"type": "boolean"
},
"unicode": {
"type": "boolean"
}
},
"required": [
"dotAll",
"flags",
"global",
"ignoreCase",
"lastIndex",
"multiline",
"source",
"sticky",
"unicode"
],
"type": "object"
},
"RegexCriteria": {
"properties": {
"activityMatchThreshold": {
@@ -905,16 +1183,12 @@
"type": "string"
},
"regex": {
"description": "A valid Regular Expression to test content against\n\nDo not wrap expression in forward slashes\n\nEX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`",
"description": "A valid Regular Expression to test content against\n\nIf no flags are specified then the **global** flag is used by default",
"examples": [
"reddit|FoxxMD"
"/reddit|FoxxMD/ig"
],
"type": "string"
},
"regexFlags": {
"description": "Regex flags to use",
"type": "string"
},
"testOn": {
"default": [
"title",
@@ -1076,15 +1350,27 @@
]
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"gapAllowance": {
@@ -1092,15 +1378,27 @@
"type": "number"
},
"include": {
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"itemIs": {
@@ -1190,40 +1488,6 @@
],
"type": "object"
},
"SubThreshold": {
"additionalProperties": false,
"description": "At least one count property must be present. If both are present then either can trigger the rule",
"minProperties": 1,
"properties": {
"subreddits": {
"description": "A list of Subreddits (by name, case-insensitive) to look for.\n\nEX [\"mealtimevideos\",\"askscience\"]",
"examples": [
[
"mealtimevideos",
"askscience"
]
],
"items": {
"type": "string"
},
"minItems": 1,
"type": "array"
},
"threshold": {
"default": ">= 1",
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
"examples": [
">= 1"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
}
},
"required": [
"subreddits"
],
"type": "object"
},
"SubmissionState": {
"description": "Different attributes a `Submission` can be in. Only include a property if you want to check it.",
"examples": [
@@ -1233,6 +1497,11 @@
}
],
"properties": {
"age": {
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
"type": "string"
},
"approved": {
"type": "boolean"
},
@@ -1267,6 +1536,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
@@ -1283,6 +1562,44 @@
},
"type": "object"
},
"SubredditState": {
"description": "Different attributes a `Subreddit` can be in. Only include a property if you want to check it.",
"examples": [
{
"over18": true
}
],
"properties": {
"name": {
"anyOf": [
{
"$ref": "#/definitions/RegExp"
},
{
"type": "string"
}
],
"description": "The name the subreddit.\n\nCan be a normal string (will check case-insensitive) or a regular expression\n\nEX `[\"mealtimevideos\", \"/onlyfans*\\/i\"]`",
"examples": [
"mealtimevideos",
"/onlyfans*/i"
]
},
"over18": {
"description": "Is subreddit NSFW/over 18?\n\n**Note**: This is **mod-controlled flag** so it is up to the mods of the subreddit to correctly mark their subreddit as NSFW",
"type": "boolean"
},
"quarantine": {
"description": "Is subreddit quarantined?",
"type": "boolean"
},
"stateDescription": {
"description": "A friendly description of what this State is trying to parse",
"type": "string"
}
},
"type": "object"
},
"UserNoteCriteria": {
"properties": {
"count": {

View File

@@ -1,6 +1,72 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"ActivityThreshold": {
"additionalProperties": false,
"description": "At least one count property must be present. If both are present then either can trigger the rule",
"minProperties": 1,
"properties": {
"commentState": {
"$ref": "#/definitions/CommentState",
"description": "When present, a Comment will only be counted if it meets this criteria",
"examples": [
{
"op": true,
"removed": false
}
]
},
"karma": {
"description": "Test the **combined karma** from Activities found in the specified subreddits\n\nValue is a string containing a comparison operator and a number of **combined karma** to compare against\n\nIf specified then both `threshold` and `karma` must be met for this `SubThreshold` to be satisfied\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 50` => greater than 50 combined karma for all found Activities in specified subreddits",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"submissionState": {
"$ref": "#/definitions/SubmissionState",
"description": "When present, a Submission will only be counted if it meets this criteria",
"examples": [
{
"over_18": true,
"removed": false
}
]
},
"subreddits": {
"description": "Activities will be counted if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"type": "array"
},
"threshold": {
"default": ">= 1",
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
"examples": [
">= 1"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
}
},
"type": "object"
},
"ActivityWindowCriteria": {
"additionalProperties": false,
"description": "Multiple properties that may be used to define what range of Activity to retrieve.\n\nMay specify one, or both properties along with the `satisfyOn` property, to affect the retrieval behavior.",
@@ -90,7 +156,7 @@
"properties": {
"aggregateOn": {
"default": "undefined",
"description": "If `domains` is not specified this list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or reddit image/video (i.redd.it / v.redd.it)\n* If `link` is included then aggregate author's submission history which is external links but not media\n\nIf nothing is specified or list is empty (default) all domains are aggregated",
"description": "This list determines which categories of domains should be aggregated on. All aggregated domains will be tested against `threshold`\n\n* If `media` is included then aggregate author's submission history which reddit recognizes as media (youtube, vimeo, etc.)\n* If `redditMedia` is included then aggregate on author's submissions history which are media hosted on reddit: galleries, videos, and images (i.redd.it / v.redd.it)\n* If `self` is included then aggregate on author's submission history which are self-post (`self.[subreddit]`) or domain is `reddit.com`\n* If `link` is included then aggregate author's submission history which is external links and not recognized as `media` by reddit\n\nIf nothing is specified or list is empty (default) rule will only aggregate on `link` and `media` (ignores reddit-hosted content and self-posts)",
"examples": [
[
]
@@ -99,12 +165,23 @@
"enum": [
"link",
"media",
"redditMedia",
"self"
],
"type": "string"
},
"type": "array"
},
"commentState": {
"$ref": "#/definitions/CommentState",
"description": "When present, Comments from `window` will only be counted if they meet this criteria",
"examples": [
{
"op": true,
"removed": false
}
]
},
"consolidateMediaDomains": {
"default": false,
"description": "Should the criteria consolidate recognized media domains into the parent domain?\n\nSubmissions to major media domains (youtube, vimeo) can be identified by individual Channel/Author...\n\n* If `false` then domains will be aggregated at the channel level IE Youtube Channel A (2 counts), Youtube Channel B (3 counts)\n* If `true` then then media domains will be consolidated at domain level and then aggregated IE youtube.com (5 counts)",
@@ -118,7 +195,7 @@
[
]
],
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nIf this is present then `aggregateOn` is ignored.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"description": "A list of domains whose Activities will be tested against `threshold`.\n\nThe values are tested as partial strings so you do not need to include full URLs, just the part that matters.\n\nEX `[\"youtube\"]` will match submissions with the domain `https://youtube.com/c/aChannel`\nEX `[\"youtube.com/c/bChannel\"]` will NOT match submissions with the domain `https://youtube.com/c/aChannel`\n\nIf you wish to aggregate on self-posts for a subreddit use the syntax `self.[subreddit]` EX `self.AskReddit`\n\n**If this Rule is part of a Check for a Submission and you wish to aggregate on the domain of the Submission use the special string `AGG:SELF`**\n\nIf nothing is specified or list is empty (default) aggregate using `aggregateOn`",
"items": {
"type": "string"
},
@@ -133,27 +210,37 @@
"type": "boolean"
},
"exclude": {
"description": "Do not include Activities from this list of Subreddits (by name, case-insensitive)\n\nWill be ignored if `include` is present.\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "When present, Activities WILL NOT be counted if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
},
"minItems": 1,
"type": "array"
},
"include": {
"description": "Only include Activities from this list of Subreddits (by name, case-insensitive)\n\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "When present, Activities WILL ONLY be counted if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
},
"minItems": 1,
"type": "array"
},
"minActivityCount": {
@@ -164,6 +251,16 @@
"name": {
"type": "string"
},
"submissionState": {
"$ref": "#/definitions/SubmissionState",
"description": "When present, Submissions from `window` will only be counted if they meet this criteria",
"examples": [
{
"over_18": true,
"removed": false
}
]
},
"threshold": {
"default": "> 10%",
"description": "A string containing a comparison operator and a value to compare comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 12` => greater than 12 activities originate from same attribution\n* EX `<= 10%` => less than 10% of all Activities have the same attribution",
@@ -275,11 +372,6 @@
],
"pattern": "^[a-zA-Z]([\\w -]*[\\w])?$",
"type": "string"
},
"useSubmissionAsReference": {
"default": true,
"description": "If activity is a Submission and is a link (not self-post) then only look at Submissions that contain this link, otherwise consider all activities.",
"type": "boolean"
}
},
"required": [
@@ -315,6 +407,26 @@
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"description": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "string"
}
],
"description": "An (array of) string/regular expression to test contents of an Author's profile description against\n\nIf no flags are specified then the **insensitive** flag is used by default\n\nIf using an array then if **any** value in the array passes the description test passes",
"examples": [
[
"/test$/i",
"look for this string literal"
]
]
},
"flairCssClass": {
"description": "A list of (user) flair css class values from the subreddit to match against",
"examples": [
@@ -355,6 +467,10 @@
},
"type": "array"
},
"shadowBanned": {
"description": "Is the author shadowbanned?\n\nThis is determined by trying to retrieve the author's profile. If a 404 is returned it is likely they are shadowbanned",
"type": "boolean"
},
"totalKarma": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
@@ -493,6 +609,11 @@
}
],
"properties": {
"age": {
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
"type": "string"
},
"approved": {
"type": "boolean"
},
@@ -515,6 +636,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
@@ -588,23 +719,28 @@
"type": "object"
},
"HistoryCriteria": {
"description": "If both `submission` and `comment` are defined then criteria will only trigger if BOTH thresholds are met",
"description": "Criteria will only trigger if ALL present thresholds (comment, submission, total) are met",
"properties": {
"comment": {
"description": "A string containing a comparison operator and a value to compare comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of all Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 comments as OP\n* EX `<= 25% as OP` => Comments as OP were less then or equal to 25% of **all Comments**",
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) comments against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 comments\n* EX `<= 75%` => comments are equal to or less than 75% of unfiltered Activities\n\nIf your string also contains the text `OP` somewhere **after** `<number>[percent sign]`...:\n\n* EX `> 100 OP` => greater than 100 filtered comments as OP\n* EX `<= 25% as OP` => **Filtered** comments as OP were less then or equal to 25% of **unfiltered Comments**",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"minActivityCount": {
"default": 5,
"description": "The minimum number of activities that must exist from the `window` results for this criteria to run",
"description": "The minimum number of **filtered** activities that must exist from the `window` results for this criteria to run",
"type": "number"
},
"name": {
"type": "string"
},
"submission": {
"description": "A string containing a comparison operator and a value to compare submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 submissions\n* EX `<= 75%` => submissions are equal to or less than 75% of all Activities",
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`, if present) submissions against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 100` => greater than 100 filtered submissions\n* EX `<= 75%` => filtered submissions are equal to or less than 75% of unfiltered Activities",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"total": {
"description": "A string containing a comparison operator and a value to compare **filtered** (using `include` or `exclude`) activities against\n\n**Note:** This is only useful if using `include` or `exclude` otherwise percent will always be 100% and total === activityTotal\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign] [OP]`\n\n* EX `> 100` => greater than 100 filtered activities\n* EX `<= 75%` => filtered activities are equal to or less than 75% of all Activities",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
@@ -673,27 +809,51 @@
"type": "array"
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n**Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"include": {
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are found in this list of Subreddits.\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`\n\n **Note:** This affects **post-window retrieval** activities. So that:\n\n* `activityTotal` is number of activities retrieved from `window` -- NOT post-filtering\n* all comparisons using **percentages** will compare **post-filtering** results against **activity count from window**\n* -- to run this rule where all activities are only from include/exclude filtering instead use include/exclude in `window`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"itemIs": {
@@ -735,6 +895,77 @@
],
"type": "object"
},
"ImageDetection": {
"description": "When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.\n\n**Note:** This is an **experimental feature**",
"properties": {
"enable": {
"description": "Is image detection enabled?",
"type": "boolean"
},
"fetchBehavior": {
"default": "extension",
"description": "Determines how and when to check if a URL is an image\n\n**Note:** After fetching a URL the **Content-Type** is validated to contain `image` before detection occurs\n\n**When `extension`:** (default)\n\n* Only URLs that end in known image extensions (.png, .jpg, etc...) are fetched\n\n**When `unknown`:**\n\n* URLs that end in known image extensions (.png, .jpg, etc...) are fetched\n* URLs with no extension or unknown (IE non-video, non-doc, etc...) are fetched\n\n**When `all`:**\n\n* All submissions that have URLs (non-self) will be fetched, regardless of extension\n* **Note:** This can be bandwidth/CPU intensive if history window is large so use with care",
"enum": [
"all",
"extension",
"unknown"
],
"type": "string"
},
"hash": {
"description": "Use perceptual hashing (blockhash-js) to compare images\n\nPros:\n\n* very fast\n* low cpu/memory usage\n* results can be cached\n\nCons:\n\n* not as accurate as pixel comparison\n* weaker for text-heavy images\n* mostly color-blind\n\nBest uses:\n\n* Detecting (general) duplicate images\n* Comparing large number of images",
"properties": {
"bits": {
"default": 32,
"description": "Bit count determines accuracy of hash and granularity of hash comparison (comparison to other hashes)\n\nDefault is `32`\n\n**NOTE:** Hashes of different sizes (bits) cannot be compared. If you are caching results make sure all rules where results may be shared use the same bit count to ensure hashes can be compared. Otherwise hashes will be recomputed.",
"type": "number"
},
"enable": {
"default": true,
"description": "Enabled by default.\n\nIf both `hash` and `pixel` are enabled then `pixel` will be used to verify image comparison when hashes matches",
"type": "boolean"
},
"hardThreshold": {
"description": "High Confidence Threshold\n\nIf the difference in comparison is equal to or less than this number the images are considered the same and pixel comparison WILL NOT occur\n\nDefaults to the parent-level `threshold` value if not present\n\nUse `null` if you want pixel comparison to ALWAYS occur (softThreshold must be present)",
"type": [
"null",
"number"
]
},
"softThreshold": {
"description": "Low Confidence Threshold -- only used if `pixel` is enabled\n\nIf the difference in comparison is\n\n1) equal to or less than this value and\n2) the value is greater than `hardThreshold`\n\nthe images will be compared using the `pixel` method",
"type": "number"
},
"ttl": {
"description": "Number of seconds to cache image hash",
"type": "number"
}
},
"type": "object"
},
"pixel": {
"description": "Use pixel counting to compare images\n\nPros:\n\n* most accurate\n* strong with text or color-only changes\n\nCons:\n\n* much slower than hashing\n* memory/cpu intensive\n\nBest uses:\n\n* Comparison text-only images\n* Comparison requires high degree of accuracy or changes are subtle",
"properties": {
"enable": {
"default": false,
"description": "Disabled by default.",
"type": "boolean"
},
"threshold": {
"description": "The percentage, as a whole number, of pixels that are **different** between the two images at which point the images are not considered the same.",
"type": "number"
}
},
"type": "object"
},
"threshold": {
"default": 5,
"description": "The percentage, as a whole number, of difference between two images at which point they will not be considered the same.\n\nWill be used as `hash.hardThreshold` and `pixel.threshold` if those values are not specified\n\nDefault is `5`",
"type": "number"
}
},
"type": "object"
},
"RecentActivityRuleJSONConfig": {
"description": "Checks a user's history for any Activity (Submission/Comment) in the subreddits specified in thresholds\n\nAvailable data for [Action templating](https://github.com/FoxxMD/context-mod#action-templating):\n\n```\nsummary => comma-deliminated list of subreddits that hit the threshold and their count EX subredditA(1), subredditB(4),...\nsubCount => Total number of subreddits that hit the threshold\ntotalCount => Total number of all activity occurrences in subreddits\n```",
"properties": {
@@ -757,6 +988,10 @@
}
]
},
"imageDetection": {
"$ref": "#/definitions/ImageDetection",
"description": "When comparing submissions detect if the reference submission is an image and do a pixel-comparison to other detected image submissions.\n\n**Note:** This is an **experimental feature**"
},
"itemIs": {
"anyOf": [
{
@@ -807,7 +1042,7 @@
"thresholds": {
"description": "A list of subreddits/count criteria that may trigger this rule. ANY SubThreshold will trigger this rule.",
"items": {
"$ref": "#/definitions/SubThreshold"
"$ref": "#/definitions/ActivityThreshold"
},
"minItems": 1,
"type": "array"
@@ -844,6 +1079,49 @@
],
"type": "object"
},
"RegExp": {
"properties": {
"dotAll": {
"type": "boolean"
},
"flags": {
"type": "string"
},
"global": {
"type": "boolean"
},
"ignoreCase": {
"type": "boolean"
},
"lastIndex": {
"type": "number"
},
"multiline": {
"type": "boolean"
},
"source": {
"type": "string"
},
"sticky": {
"type": "boolean"
},
"unicode": {
"type": "boolean"
}
},
"required": [
"dotAll",
"flags",
"global",
"ignoreCase",
"lastIndex",
"multiline",
"source",
"sticky",
"unicode"
],
"type": "object"
},
"RegexCriteria": {
"properties": {
"activityMatchThreshold": {
@@ -882,16 +1160,12 @@
"type": "string"
},
"regex": {
"description": "A valid Regular Expression to test content against\n\nDo not wrap expression in forward slashes\n\nEX For the expression `/reddit|FoxxMD/` use the value should be `reddit|FoxxMD`",
"description": "A valid Regular Expression to test content against\n\nIf no flags are specified then the **global** flag is used by default",
"examples": [
"reddit|FoxxMD"
"/reddit|FoxxMD/ig"
],
"type": "string"
},
"regexFlags": {
"description": "Regex flags to use",
"type": "string"
},
"testOn": {
"default": [
"title",
@@ -1053,15 +1327,27 @@
]
},
"exclude": {
"description": "Do not include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are **NOT** found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"gapAllowance": {
@@ -1069,15 +1355,27 @@
"type": "number"
},
"include": {
"description": "Only include Submissions from this list of Subreddits (by name, case-insensitive)\n\nEX `[\"mealtimevideos\",\"askscience\"]`",
"description": "If present, activities will be counted only if they are found in this list of Subreddits\n\nEach value in the list can be either:\n\n * string (name of subreddit)\n * regular expression to run on the subreddit name\n * `SubredditState`\n\nEX `[\"mealtimevideos\",\"askscience\", \"/onlyfans*\\/i\", {\"over18\": true}]`",
"examples": [
"mealtimevideos",
"askscience"
[
"mealtimevideos",
"askscience",
"/onlyfans*/i",
{
"over18": true
}
]
],
"items": {
"type": "string"
"anyOf": [
{
"$ref": "#/definitions/SubredditState"
},
{
"type": "string"
}
]
},
"minItems": 1,
"type": "array"
},
"itemIs": {
@@ -1167,40 +1465,6 @@
],
"type": "object"
},
"SubThreshold": {
"additionalProperties": false,
"description": "At least one count property must be present. If both are present then either can trigger the rule",
"minProperties": 1,
"properties": {
"subreddits": {
"description": "A list of Subreddits (by name, case-insensitive) to look for.\n\nEX [\"mealtimevideos\",\"askscience\"]",
"examples": [
[
"mealtimevideos",
"askscience"
]
],
"items": {
"type": "string"
},
"minItems": 1,
"type": "array"
},
"threshold": {
"default": ">= 1",
"description": "A string containing a comparison operator and a value to compare recent activities against\n\nThe syntax is `(< OR > OR <= OR >=) <number>[percent sign]`\n\n* EX `> 3` => greater than 3 activities found in the listed subreddits\n* EX `<= 75%` => number of Activities in the subreddits listed are equal to or less than 75% of all Activities\n\n**Note:** If you use percentage comparison here as well as `useSubmissionAsReference` then \"all Activities\" is only pertains to Activities that had the Link of the Submission, rather than all Activities from this window.",
"examples": [
">= 1"
],
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
}
},
"required": [
"subreddits"
],
"type": "object"
},
"SubmissionState": {
"description": "Different attributes a `Submission` can be in. Only include a property if you want to check it.",
"examples": [
@@ -1210,6 +1474,11 @@
}
],
"properties": {
"age": {
"description": "A duration and how to compare it against a value\n\nThe syntax is `(< OR > OR <= OR >=) <number> <unit>` EX `> 100 days`, `<= 2 months`\n\n* EX `> 100 days` => Passes if the date being compared is before 100 days ago\n* EX `<= 2 months` => Passes if the date being compared is after or equal to 2 months\n\nUnit must be one of [DayJS Duration units](https://day.js.org/docs/en/durations/creating)\n\n[See] https://regexr.com/609n8 for example",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(days|weeks|months|years|hours|minutes|seconds|milliseconds)\\s*$",
"type": "string"
},
"approved": {
"type": "boolean"
},
@@ -1244,6 +1513,16 @@
"removed": {
"type": "boolean"
},
"reports": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"score": {
"description": "A string containing a comparison operator and a value to compare against\n\nThe syntax is `(< OR > OR <= OR >=) <number>`\n\n* EX `> 100` => greater than 100",
"pattern": "^\\s*(>|>=|<|<=)\\s*(\\d+)\\s*(%?)(.*)$",
"type": "string"
},
"spam": {
"type": "boolean"
},
@@ -1260,6 +1539,44 @@
},
"type": "object"
},
"SubredditState": {
"description": "Different attributes a `Subreddit` can be in. Only include a property if you want to check it.",
"examples": [
{
"over18": true
}
],
"properties": {
"name": {
"anyOf": [
{
"$ref": "#/definitions/RegExp"
},
{
"type": "string"
}
],
"description": "The name the subreddit.\n\nCan be a normal string (will check case-insensitive) or a regular expression\n\nEX `[\"mealtimevideos\", \"/onlyfans*\\/i\"]`",
"examples": [
"mealtimevideos",
"/onlyfans*/i"
]
},
"over18": {
"description": "Is subreddit NSFW/over 18?\n\n**Note**: This is **mod-controlled flag** so it is up to the mods of the subreddit to correctly mark their subreddit as NSFW",
"type": "boolean"
},
"quarantine": {
"description": "Is subreddit quarantined?",
"type": "boolean"
},
"stateDescription": {
"description": "A friendly description of what this State is trying to parse",
"type": "string"
}
},
"type": "object"
},
"UserNoteCriteria": {
"properties": {
"count": {

View File

@@ -3,19 +3,21 @@ import {Logger} from "winston";
import {SubmissionCheck} from "../Check/SubmissionCheck";
import {CommentCheck} from "../Check/CommentCheck";
import {
cacheStats,
cacheStats, createHistoricalStatsDisplay,
createRetryHandler,
determineNewResults, formatNumber,
mergeArr, parseFromJsonOrYamlToObject, pollingInfo, sleep, totalFromMapStats,
determineNewResults, findLastIndex, formatNumber,
mergeArr, parseFromJsonOrYamlToObject, pollingInfo, resultsSummary, sleep, totalFromMapStats, triggeredIndicator,
} from "../util";
import {Poll} from "snoostorm";
import pEvent from "p-event";
import {RuleResult} from "../Rule";
import {ConfigBuilder, buildPollingOptions} from "../ConfigBuilder";
import {
ActionedEvent,
ActionResult,
DEFAULT_POLLING_INTERVAL,
DEFAULT_POLLING_LIMIT, Invokee,
ManagerOptions, ManagerStateChangeOption, PAUSED,
ManagerOptions, ManagerStateChangeOption, ManagerStats, PAUSED,
PollingOptionsStrong, ResourceStats, RUNNING, RunState, STOPPED, SYSTEM, USER
} from "../Common/interfaces";
import Submission from "snoowrap/dist/objects/Submission";
@@ -36,6 +38,9 @@ import {queue, QueueObject} from 'async';
import {JSONConfig} from "../JsonConfig";
import {CheckStructuredJson} from "../Check";
import NotificationManager from "../Notification/NotificationManager";
import action from "../Web/Server/routes/authenticated/user/action";
import {createHistoricalDefaults, historicalDefaults} from "../Common/defaults";
import {ExtendedSnoowrap} from "../Utils/SnoowrapClients";
export interface RunningState {
state: RunState,
@@ -46,6 +51,7 @@ export interface runCheckOptions {
checkNames?: string[],
delayUntil?: number,
dryRun?: boolean,
refresh?: boolean,
}
export interface CheckTask {
@@ -61,42 +67,15 @@ export interface RuntimeManagerOptions extends ManagerOptions {
maxWorkers: number;
}
export interface ManagerStats {
eventsCheckedTotal: number
eventsCheckedSinceStartTotal: number
eventsAvg: number
checksRunTotal: number
checksRunSinceStartTotal: number
checksTriggered: number
checksTriggeredTotal: number
checksTriggeredSinceStart: number
checksTriggeredSinceStartTotal: number
rulesRunTotal: number
rulesRunSinceStartTotal: number
rulesCachedTotal: number
rulesCachedSinceStartTotal: number
rulesTriggeredTotal: number
rulesTriggeredSinceStartTotal: number
rulesAvg: number
actionsRun: number
actionsRunTotal: number
actionsRunSinceStart: number,
actionsRunSinceStartTotal: number
cache: {
provider: string,
currentKeyCount: number,
isShared: boolean,
totalRequests: number,
totalMiss: number,
missPercent: string,
requestRate: number,
types: ResourceStats
},
interface QueuedIdentifier {
id: string,
shouldRefresh: boolean
state: 'queued' | 'processing'
}
export class Manager {
subreddit: Subreddit;
client: Snoowrap;
client: ExtendedSnoowrap;
logger: Logger;
botName: string;
pollOptions: PollingOptionsStrong[] = [];
@@ -117,6 +96,15 @@ export class Manager {
globalDryRun?: boolean;
emitter: EventEmitter = new EventEmitter();
queue: QueueObject<CheckTask>;
// firehose is used to ensure all activities from different polling streams are unique
// that is -- if the same activities is in both modqueue and unmoderated we don't want to process the activity twice or use stale data
//
// so all activities get queued to firehose, it keeps track of items by id (using queuedItemsMeta)
// and ensures that if any activities are ingested while they are ALSO currently queued or working then they are properly handled by either
// 1) if queued, do not re-queue but instead tell worker to refresh before processing
// 2) if currently processing then re-queue but also refresh before processing
firehose: QueueObject<CheckTask>;
queuedItemsMeta: QueuedIdentifier[] = [];
globalMaxWorkers: number;
subMaxWorkers?: number;
@@ -145,49 +133,22 @@ export class Manager {
// use by api nanny to slow event consumption
delayBy?: number;
eventsCheckedTotal: number = 0;
eventsCheckedSinceStartTotal: number = 0;
eventsSample: number[] = [];
eventsSampleInterval: any;
eventsRollingAvg: number = 0;
checksRunTotal: number = 0;
checksRunSinceStartTotal: number = 0;
checksTriggered: Map<string, number> = new Map();
checksTriggeredSinceStart: Map<string, number> = new Map();
rulesRunTotal: number = 0;
rulesRunSinceStartTotal: number = 0;
rulesCachedTotal: number = 0;
rulesCachedSinceStartTotal: number = 0;
rulesTriggeredTotal: number = 0;
rulesTriggeredSinceStartTotal: number = 0;
rulesUniqueSample: number[] = [];
rulesUniqueSampleInterval: any;
rulesUniqueRollingAvg: number = 0;
actionsRun: Map<string, number> = new Map();
actionsRunSinceStart: Map<string, number> = new Map();
actionedEvents: ActionedEvent[] = [];
getStats = async (): Promise<ManagerStats> => {
const data: any = {
eventsCheckedTotal: this.eventsCheckedTotal,
eventsCheckedSinceStartTotal: this.eventsCheckedSinceStartTotal,
eventsAvg: formatNumber(this.eventsRollingAvg),
checksRunTotal: this.checksRunTotal,
checksRunSinceStartTotal: this.checksRunSinceStartTotal,
checksTriggered: this.checksTriggered,
checksTriggeredTotal: totalFromMapStats(this.checksTriggered),
checksTriggeredSinceStart: this.checksTriggeredSinceStart,
checksTriggeredSinceStartTotal: totalFromMapStats(this.checksTriggeredSinceStart),
rulesRunTotal: this.rulesRunTotal,
rulesRunSinceStartTotal: this.rulesRunSinceStartTotal,
rulesCachedTotal: this.rulesCachedTotal,
rulesCachedSinceStartTotal: this.rulesCachedSinceStartTotal,
rulesTriggeredTotal: this.rulesTriggeredTotal,
rulesTriggeredSinceStartTotal: this.rulesTriggeredSinceStartTotal,
rulesAvg: formatNumber(this.rulesUniqueRollingAvg),
actionsRun: this.actionsRun,
actionsRunTotal: totalFromMapStats(this.actionsRun),
actionsRunSinceStart: this.actionsRunSinceStart,
actionsRunSinceStartTotal: totalFromMapStats(this.actionsRunSinceStart),
historical: {
lastReload: createHistoricalStatsDisplay(createHistoricalDefaults()),
allTime: createHistoricalStatsDisplay(createHistoricalDefaults()),
},
cache: {
provider: 'none',
currentKeyCount: 0,
@@ -203,6 +164,7 @@ export class Manager {
if (this.resources !== undefined) {
const resStats = await this.resources.getStats();
data.historical = this.resources.getHistoricalDisplayStats();
data.cache = resStats.cache;
data.cache.currentKeyCount = await this.resources.getCacheKeyCount();
data.cache.isShared = this.resources.cacheSettingsHash === 'default';
@@ -219,7 +181,7 @@ export class Manager {
return this.displayLabel;
}
constructor(sub: Subreddit, client: Snoowrap, logger: Logger, cacheManager: BotResourcesManager, opts: RuntimeManagerOptions = {botName: 'ContextMod', maxWorkers: 1}) {
constructor(sub: Subreddit, client: ExtendedSnoowrap, logger: Logger, cacheManager: BotResourcesManager, opts: RuntimeManagerOptions = {botName: 'ContextMod', maxWorkers: 1}) {
const {dryRun, sharedModqueue = false, wikiLocation = 'botconfig/contextbot', botName, maxWorkers} = opts;
this.displayLabel = opts.nickname || `${sub.display_name_prefixed}`;
const getLabels = this.getCurrentLabels;
@@ -246,11 +208,13 @@ export class Manager {
this.queue = this.generateQueue(this.getMaxWorkers(this.globalMaxWorkers));
this.queue.pause();
this.firehose = this.generateFirehose();
this.eventsSampleInterval = setInterval((function(self) {
return function() {
const et = self.resources !== undefined ? self.resources.stats.historical.allTime.eventsCheckedTotal : 0;
const rollingSample = self.eventsSample.slice(0, 7)
rollingSample.unshift(self.eventsCheckedTotal)
rollingSample.unshift(et)
self.eventsSample = rollingSample;
const diff = self.eventsSample.reduceRight((acc: number[], curr, index) => {
if(self.eventsSample[index + 1] !== undefined) {
@@ -270,7 +234,8 @@ export class Manager {
this.rulesUniqueSampleInterval = setInterval((function(self) {
return function() {
const rollingSample = self.rulesUniqueSample.slice(0, 7)
rollingSample.unshift(self.rulesRunTotal - self.rulesCachedTotal);
const rt = self.resources !== undefined ? self.resources.stats.historical.allTime.rulesRunTotal - self.resources.stats.historical.allTime.rulesCachedTotal : 0;
rollingSample.unshift(rt);
self.rulesUniqueSample = rollingSample;
const diff = self.rulesUniqueSample.reduceRight((acc: number[], curr, index) => {
if(self.rulesUniqueSample[index + 1] !== undefined) {
@@ -306,6 +271,32 @@ export class Manager {
return maxWorkers;
}
protected generateFirehose() {
return queue(async (task: CheckTask, cb) => {
// items in queuedItemsMeta will be processing FIFO so earlier elements (by index) are older
//
// if we insert the same item again because it is currently being processed AND THEN we get the item AGAIN we only want to update the newest meta
// so search the array backwards to get the neweset only
const queuedItemIndex = findLastIndex(this.queuedItemsMeta, x => x.id === task.activity.id);
if(queuedItemIndex !== -1) {
const itemMeta = this.queuedItemsMeta[queuedItemIndex];
let msg = `Item ${itemMeta.id} is already ${itemMeta.state}.`;
if(itemMeta.state === 'queued') {
this.logger.debug(`${msg} Flagging to refresh data before processing.`);
this.queuedItemsMeta.splice(queuedItemIndex, 1, {...itemMeta, shouldRefresh: true});
} else {
this.logger.debug(`${msg} Re-queuing item but will also refresh data before processing.`);
this.queuedItemsMeta.push({id: task.activity.id, shouldRefresh: true, state: 'queued'});
this.queue.push(task);
}
} else {
this.queuedItemsMeta.push({id: task.activity.id, shouldRefresh: false, state: 'queued'});
this.queue.push(task);
}
}
, 1);
}
protected generateQueue(maxWorkers: number) {
if (maxWorkers > 1) {
this.logger.warn(`Setting max queue workers above 1 (specified: ${maxWorkers}) may have detrimental effects to log readability and api usage. Consult the documentation before using this advanced/experimental feature.`);
@@ -316,7 +307,16 @@ export class Manager {
this.logger.debug(`SOFT API LIMIT MODE: Delaying Event run by ${this.delayBy} seconds`);
await sleep(this.delayBy * 1000);
}
await this.runChecks(task.checkType, task.activity, task.options);
const queuedItemIndex = this.queuedItemsMeta.findIndex(x => x.id === task.activity.id);
try {
const itemMeta = this.queuedItemsMeta[queuedItemIndex];
this.queuedItemsMeta.splice(queuedItemIndex, 1, {...itemMeta, state: 'processing'});
await this.runChecks(task.checkType, task.activity, {...task.options, refresh: itemMeta.shouldRefresh});
} finally {
// always remove item meta regardless of success or failure since we are done with it meow
this.queuedItemsMeta.splice(queuedItemIndex, 1);
}
}
, maxWorkers);
q.error((err, task) => {
@@ -331,7 +331,7 @@ export class Manager {
return q;
}
protected parseConfigurationFromObject(configObj: object) {
protected async parseConfigurationFromObject(configObj: object) {
try {
const configBuilder = new ConfigBuilder({logger: this.logger});
const validJson = configBuilder.validateJson(configObj);
@@ -381,7 +381,7 @@ export class Manager {
caching,
client: this.client,
};
this.resources = this.cacheManager.set(this.subreddit.display_name, resourceConfig);
this.resources = await this.cacheManager.set(this.subreddit.display_name, resourceConfig);
this.resources.setLogger(this.logger);
this.logger.info('Subreddit-specific options updated');
@@ -476,7 +476,7 @@ export class Manager {
throw new ConfigParseError('Could not parse wiki page contents as JSON or YAML')
}
this.parseConfigurationFromObject(configObj);
await this.parseConfigurationFromObject(configObj);
this.logger.info('Checks updated');
if(!suppressNotification) {
@@ -493,8 +493,6 @@ export class Manager {
async runChecks(checkType: ('Comment' | 'Submission'), activity: (Submission | Comment), options?: runCheckOptions): Promise<void> {
const checks = checkType === 'Comment' ? this.commentChecks : this.submissionChecks;
let item = activity;
this.eventsCheckedTotal++;
this.eventsCheckedSinceStartTotal++;
const itemId = await item.id;
let allRuleResults: RuleResult[] = [];
const itemIdentifier = `${checkType === 'Submission' ? 'SUB' : 'COM'} ${itemId}`;
@@ -512,8 +510,11 @@ export class Manager {
checkNames = [],
delayUntil,
dryRun,
refresh = false,
} = options || {};
let wasRefreshed = false;
if (delayUntil !== undefined) {
const created = dayjs.unix(item.created_utc);
const diff = dayjs().diff(created, 's');
@@ -522,8 +523,16 @@ export class Manager {
await sleep(delayUntil - diff);
// @ts-ignore
item = await activity.refresh();
wasRefreshed = true;
}
}
// refresh signal from firehose if activity was ingested multiple times before processing or re-queued while processing
// want to make sure we have the most recent data
if(!wasRefreshed && refresh === true) {
this.logger.verbose('Refreshed data (probably due to signal from firehose)');
// @ts-ignore
item = await activity.refresh();
}
const startingApiLimit = this.client.ratelimitRemaining;
@@ -540,10 +549,25 @@ export class Manager {
let checksRun = 0;
let actionsRun = 0;
let totalRulesRun = 0;
let runActions: Action[] = [];
let runActions: ActionResult[] = [];
let actionedEvent: ActionedEvent = {
subreddit: this.subreddit.display_name_prefixed,
activity: {
peek: ePeek,
link: item.permalink
},
author: item.author.name,
timestamp: Date.now(),
check: '',
ruleSummary: '',
ruleResults: [],
actionResults: [],
}
let triggered = false;
let triggeredCheckName;
const checksRunNames = [];
const cachedCheckNames = [];
try {
let triggered = false;
for (const check of checks) {
if (checkNames.length > 0 && !checkNames.map(x => x.toLowerCase()).some(x => x === check.name.toLowerCase())) {
this.logger.warn(`Check ${check.name} not in array of requested checks to run, skipping...`);
@@ -553,16 +577,27 @@ export class Manager {
this.logger.info(`Check ${check.name} not run because it is not enabled, skipping...`);
continue;
}
checksRunNames.push(check.name);
checksRun++;
triggered = false;
let isFromCache = false;
let currentResults: RuleResult[] = [];
try {
const [checkTriggered, checkResults] = await check.runRules(item, allRuleResults);
await check.setCacheResult(item, checkTriggered);
const [checkTriggered, checkResults, fromCache = false] = await check.runRules(item, allRuleResults);
isFromCache = fromCache;
if(!fromCache) {
await check.setCacheResult(item, {result: checkTriggered, ruleResults: checkResults});
} else {
cachedCheckNames.push(check.name);
}
currentResults = checkResults;
totalRulesRun += checkResults.length;
allRuleResults = allRuleResults.concat(determineNewResults(allRuleResults, checkResults));
triggered = checkTriggered;
if(triggered && fromCache && !check.cacheUserResult.runActions) {
this.logger.info('Check was triggered but cache result options specified NOT to run actions...counting as check NOT triggered');
triggered = false;
}
} catch (e) {
if (e.logged !== true) {
this.logger.warn(`Running rules for Check ${check.name} failed due to uncaught exception`, e);
@@ -570,13 +605,19 @@ export class Manager {
}
if (triggered) {
this.checksTriggered.set(check.name, (this.checksTriggered.get(check.name) || 0) + 1);
this.checksTriggeredSinceStart.set(check.name, (this.checksTriggeredSinceStart.get(check.name) || 0) + 1);
triggeredCheckName = check.name;
actionedEvent.check = check.name;
actionedEvent.ruleResults = currentResults;
if(isFromCache) {
actionedEvent.ruleSummary = `Check result was found in cache: ${triggeredIndicator(true)}`;
} else {
actionedEvent.ruleSummary = resultsSummary(currentResults, check.condition);
}
runActions = await check.runActions(item, currentResults.filter(x => x.triggered), dryRun);
actionsRun = runActions.length;
if(check.notifyOnTrigger) {
const ar = runActions.map(x => x.getActionUniqueName()).join(', ');
const ar = runActions.map(x => x.name).join(', ');
this.notificationManager.handle('eventActioned', 'Check Triggered', `Check "${check.name}" was triggered on Event: \n\n ${ePeek} \n\n with the following actions run: ${ar}`);
}
break;
@@ -593,22 +634,9 @@ export class Manager {
}
} finally {
try {
const cachedTotal = totalRulesRun - allRuleResults.length;
const triggeredRulesTotal = allRuleResults.filter(x => x.triggered).length;
this.checksRunTotal += checksRun;
this.checksRunSinceStartTotal += checksRun;
this.rulesRunTotal += totalRulesRun;
this.rulesRunSinceStartTotal += totalRulesRun;
this.rulesCachedTotal += cachedTotal;
this.rulesCachedSinceStartTotal += cachedTotal;
this.rulesTriggeredTotal += triggeredRulesTotal;
this.rulesTriggeredSinceStartTotal += triggeredRulesTotal;
for (const a of runActions) {
const name = a.getActionUniqueName();
this.actionsRun.set(name, (this.actionsRun.get(name) || 0) + 1);
this.actionsRunSinceStart.set(name, (this.actionsRunSinceStart.get(name) || 0) + 1)
actionedEvent.actionResults = runActions;
if(triggered) {
await this.resources.addActionedEvent(actionedEvent);
}
this.logger.verbose(`Run Stats: Checks ${checksRun} | Rules => Total: ${totalRulesRun} Unique: ${allRuleResults.length} Cached: ${totalRulesRun - allRuleResults.length} Rolling Avg: ~${formatNumber(this.rulesUniqueRollingAvg)}/s | Actions ${actionsRun}`);
@@ -616,6 +644,18 @@ export class Manager {
this.currentLabels = [];
} catch (err) {
this.logger.error('Error occurred while cleaning up Activity check and generating stats', err);
} finally {
this.resources.updateHistoricalStats({
eventsCheckedTotal: 1,
eventsActionedTotal: triggered ? 1 : 0,
checksTriggered: triggeredCheckName !== undefined ? [triggeredCheckName] : [],
checksRun: checksRunNames,
checksFromCache: cachedCheckNames,
actionsRun: runActions.map(x => x.name),
rulesRun: allRuleResults.map(x => x.name),
rulesTriggered: allRuleResults.filter(x => x.triggered).map(x => x.name),
rulesCachedTotal: totalRulesRun - allRuleResults.length,
});
}
}
}
@@ -633,7 +673,8 @@ export class Manager {
pollOn,
limit,
interval,
delayUntil
delayUntil,
clearProcessed,
} = pollOpt;
let stream: SPoll<Snoowrap.Submission | Snoowrap.Comment>;
let modStreamType: string | undefined;
@@ -649,6 +690,7 @@ export class Manager {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
clearProcessed,
});
}
break;
@@ -662,6 +704,7 @@ export class Manager {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
clearProcessed
});
}
break;
@@ -670,6 +713,7 @@ export class Manager {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
clearProcessed
});
break;
case 'newComm':
@@ -677,6 +721,7 @@ export class Manager {
subreddit: this.subreddit.display_name,
limit: limit,
pollTime: interval * 1000,
clearProcessed
});
break;
}
@@ -710,15 +755,14 @@ export class Manager {
checkType = 'Comment';
}
if (checkType !== undefined) {
this.queue.push({checkType, activity: item, options: {delayUntil}})
this.firehose.push({checkType, activity: item, options: {delayUntil}})
}
};
stream.on('item', onItem);
if (modStreamType !== undefined) {
this.modStreamCallbacks.set(pollOn, onItem);
} else {
stream.on('item', onItem);
// @ts-ignore
stream.on('error', async (err: any) => {
@@ -743,14 +787,19 @@ export class Manager {
} else if (!this.validConfigLoaded) {
this.logger.warn('Cannot start activity processing queue while manager has an invalid configuration');
} else {
if(this.queueState.state === STOPPED) {
// extra precaution to make sure queue meta is cleared before starting queue
this.queuedItemsMeta = [];
}
this.queue.resume();
this.firehose.resume();
this.logger.info(`Activity processing queue started RUNNING with ${this.queue.length()} queued activities`);
this.queueState = {
state: RUNNING,
causedBy
}
if(!suppressNotification) {
this.notificationManager.handle('runStateChanged', 'Queue Started', reason, causedBy)
this.notificationManager.handle('runStateChanged', 'Queue Started', reason, causedBy);
}
}
}
@@ -812,7 +861,9 @@ export class Manager {
this.logger.verbose(`Activity processing queue is stopping...waiting for ${this.queue.running()} activities to finish processing`);
}
this.logger.info(`Activity processing queue stopped by ${causedBy} and ${this.queue.length()} queued activities cleared (waited ${dayjs().diff(pauseWaitStart, 's')} seconds while activity processing finished)`);
this.firehose.kill();
this.queue.kill();
this.queuedItemsMeta = [];
}
this.queueState = {
@@ -891,18 +942,12 @@ export class Manager {
s.end();
}
this.streams = [];
for (const [k, v] of this.modStreamCallbacks) {
const stream = this.cacheManager.modStreams.get(k) as Poll<Snoowrap.Submission | Snoowrap.Comment>;
stream.removeListener('item', v);
}
// for (const [k, v] of this.modStreamCallbacks) {
// const stream = this.cacheManager.modStreams.get(k) as Poll<Snoowrap.Submission | Snoowrap.Comment>;
// stream.removeListener('item', v);
// }
this.modStreamCallbacks = new Map();
this.startedAt = undefined;
this.eventsCheckedSinceStartTotal = 0;
this.checksRunSinceStartTotal = 0;
this.rulesRunSinceStartTotal = 0;
this.rulesCachedSinceStartTotal = 0;
this.rulesTriggeredSinceStartTotal = 0;
this.checksTriggeredSinceStart = new Map();
this.actionsRunSinceStart = new Map();
this.logger.info(`Events STOPPED by ${causedBy}`);
this.eventsState = {
state: STOPPED,

View File

@@ -2,51 +2,107 @@ import {Poll, SnooStormOptions} from "snoostorm"
import Snoowrap from "snoowrap";
import {EventEmitter} from "events";
import {PollConfiguration} from "snoostorm/out/util/Poll";
import {DEFAULT_POLLING_INTERVAL} from "../Common/interfaces";
import {ClearProcessedOptions, DEFAULT_POLLING_INTERVAL} from "../Common/interfaces";
import dayjs, {Dayjs} from "dayjs";
import { Duration } from "dayjs/plugin/duration";
import {parseDuration, sleep} from "../util";
import setRandomInterval from 'set-random-interval';
type Awaitable<T> = Promise<T> | T;
interface RCBPollingOptions extends SnooStormOptions {
subreddit: string,
clearProcessed?: ClearProcessedOptions
}
interface RCBPollConfiguration<T> extends PollConfiguration<T> {
clearProcessed?: ClearProcessedOptions
}
export class SPoll<T extends object> extends Poll<T> {
identifier: keyof T;
getter: () => Awaitable<T[]>;
frequency;
running: boolean = false;
clearProcessedDuration?: Duration;
clearProcessedSize?: number;
clearProcessedAfter?: Dayjs;
retainProcessed: number = 0;
randInterval?: { clear: () => void };
constructor(options: PollConfiguration<T>) {
constructor(options: RCBPollConfiguration<T>) {
super(options);
this.identifier = options.identifier;
this.getter = options.get;
this.frequency = options.frequency;
const {
after,
size,
retain = 0,
} = options.clearProcessed || {};
if(after !== undefined) {
this.clearProcessedDuration = parseDuration(after);
}
this.clearProcessedSize = size;
this.retainProcessed = retain;
if (this.clearProcessedDuration !== undefined) {
this.clearProcessedAfter = dayjs().add(this.clearProcessedDuration.asSeconds(), 's');
}
clearInterval(this.interval);
}
startInterval = () => {
this.running = true;
this.interval = setInterval(async () => {
try {
const batch = await this.getter();
const newItems: T[] = [];
for (const item of batch) {
const id = item[this.identifier];
if (this.processed.has(id)) continue;
this.randInterval = setRandomInterval((function (self) {
return async () => {
try {
// clear the tracked, processed activity ids after a set period or number of activities have been processed
// because when RCB is long-running and has streams from high-volume subreddits this list never gets smaller...
// Emit for new items and add it to the list
newItems.push(item);
this.processed.add(id);
this.emit("item", item);
// so clear if after time period
if ((self.clearProcessedAfter !== undefined && dayjs().isSameOrAfter(self.clearProcessedAfter))
// or clear if processed list is larger than defined max allowable size (default setting, 2 * polling option limit)
|| (self.clearProcessedSize !== undefined && self.processed.size >= self.clearProcessedSize)) {
if (self.retainProcessed === 0) {
self.processed = new Set();
} else {
// retain some processed so we have continuity between processed list resets -- this is default behavior and retains polling option limit # of activities
// we can slice from the set here because ID order is guaranteed for Set object so list is oldest -> newest
// -- retain last LIMIT number of activities (or all if retain # is larger than list due to user config error)
self.processed = new Set(Array.from(self.processed).slice(Math.max(0, self.processed.size - self.retainProcessed)));
}
// reset time interval if there is one
if (self.clearProcessedAfter !== undefined && self.clearProcessedDuration !== undefined) {
self.clearProcessedAfter = dayjs().add(self.clearProcessedDuration.asSeconds(), 's');
}
}
const batch = await self.getter();
const newItems: T[] = [];
for (const item of batch) {
const id = item[self.identifier];
if (self.processed.has(id)) continue;
// Emit for new items and add it to the list
newItems.push(item);
self.processed.add(id);
self.emit("item", item);
}
// Emit the new listing of all new items
self.emit("listing", newItems);
} catch (err) {
self.emit('error', err);
self.end();
}
// Emit the new listing of all new items
this.emit("listing", newItems);
} catch (err) {
this.emit('error', err);
this.end();
}
}, this.frequency);
})(this), this.frequency - 1, this.frequency + 1);
}
end = () => {
this.running = false;
if(this.randInterval !== undefined) {
this.randInterval.clear();
}
super.end();
}
}
@@ -54,11 +110,12 @@ export class SPoll<T extends object> extends Poll<T> {
export class UnmoderatedStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: SnooStormOptions & { subreddit: string }) {
options: RCBPollingOptions) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getSubreddit(options.subreddit).getUnmoderated(options),
identifier: "id",
clearProcessed: options.clearProcessed
});
}
}
@@ -66,11 +123,12 @@ export class UnmoderatedStream extends SPoll<Snoowrap.Submission | Snoowrap.Comm
export class ModQueueStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: SnooStormOptions & { subreddit: string }) {
options: RCBPollingOptions) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getSubreddit(options.subreddit).getModqueue(options),
identifier: "id",
clearProcessed: options.clearProcessed
});
}
}
@@ -78,11 +136,12 @@ export class ModQueueStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment
export class SubmissionStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: SnooStormOptions & { subreddit: string }) {
options: RCBPollingOptions) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getNew(options.subreddit, options),
identifier: "id",
clearProcessed: options.clearProcessed
});
}
}
@@ -90,11 +149,12 @@ export class SubmissionStream extends SPoll<Snoowrap.Submission | Snoowrap.Comme
export class CommentStream extends SPoll<Snoowrap.Submission | Snoowrap.Comment> {
constructor(
client: Snoowrap,
options: SnooStormOptions & { subreddit: string }) {
options: RCBPollingOptions) {
super({
frequency: options.pollTime || DEFAULT_POLLING_INTERVAL * 1000,
get: async () => client.getNewComments(options.subreddit, options),
identifier: "id",
clearProcessed: options.clearProcessed
});
}
}

View File

@@ -8,23 +8,39 @@ import {
getAuthorActivities,
testAuthorCriteria
} from "../Utils/SnoowrapUtils";
import Subreddit from 'snoowrap/dist/objects/Subreddit';
import winston, {Logger} from "winston";
import as from 'async';
import fetch from 'node-fetch';
import {
buildCacheOptionsFromProvider,
cacheStats, createCacheManager,
formatNumber,
mergeArr,
parseExternalUrl,
parseWikiContext
asSubmission,
buildCacheOptionsFromProvider, buildCachePrefix,
cacheStats, compareDurationValue, comparisonTextOp, createCacheManager, createHistoricalStatsDisplay,
formatNumber, getActivityAuthorName, getActivitySubredditName, isStrongSubredditState,
mergeArr, parseDurationComparison,
parseExternalUrl, parseGenericValueComparison,
parseWikiContext, shouldCacheSubredditStateCriteriaResult, subredditStateIsNameOnly, toStrongSubredditState
} from "../util";
import LoggedError from "../Utils/LoggedError";
import {
BotInstanceConfig,
CacheOptions, CommentState,
Footer, OperatorConfig, ResourceStats, SubmissionState,
SubredditCacheConfig, TTLConfig, TypedActivityStates
CacheOptions,
CommentState,
Footer,
OperatorConfig,
ResourceStats,
StrongCache,
SubmissionState,
CacheConfig,
TTLConfig,
TypedActivityStates,
UserResultCache,
ActionedEvent,
SubredditState,
StrongSubredditState,
HistoricalStats,
HistoricalStatUpdateData,
SubredditHistoricalStats,
SubredditHistoricalStatsDisplay,
} from "../Common/interfaces";
import UserNotes from "./UserNotes";
import Mustache from "mustache";
@@ -32,16 +48,20 @@ import he from "he";
import {AuthorCriteria} from "../Author/Author";
import {SPoll} from "./Streams";
import {Cache} from 'cache-manager';
import {Submission, Comment} from "snoowrap/dist/objects";
import {cacheTTLDefaults} from "../Common/defaults";
import {Submission, Comment, Subreddit} from "snoowrap/dist/objects";
import {cacheTTLDefaults, createHistoricalDefaults, historicalDefaults} from "../Common/defaults";
import {check} from "tcp-port-used";
import {ExtendedSnoowrap} from "../Utils/SnoowrapClients";
import dayjs from "dayjs";
import ImageData from "../Common/ImageData";
export const DEFAULT_FOOTER = '\r\n*****\r\nThis action was performed by [a bot.]({{botLink}}) Mention a moderator or [send a modmail]({{modmailLink}}) if you any ideas, questions, or concerns about this action.';
export interface SubredditResourceConfig extends Footer {
caching?: SubredditCacheConfig,
caching?: CacheConfig,
subreddit: Subreddit,
logger: Logger;
client: Snoowrap
client: ExtendedSnoowrap
}
interface SubredditResourceOptions extends Footer {
@@ -51,32 +71,41 @@ interface SubredditResourceOptions extends Footer {
cacheSettingsHash: string
subreddit: Subreddit,
logger: Logger;
client: Snoowrap;
client: ExtendedSnoowrap;
prefix?: string;
actionedEventsMax: number;
}
export interface SubredditResourceSetOptions extends SubredditCacheConfig, Footer {
export interface SubredditResourceSetOptions extends CacheConfig, Footer {
}
export class SubredditResources {
//enabled!: boolean;
protected useSubredditAuthorCache!: boolean;
protected authorTTL: number = cacheTTLDefaults.authorTTL;
protected wikiTTL: number = cacheTTLDefaults.wikiTTL;
protected submissionTTL: number = cacheTTLDefaults.submissionTTL;
protected commentTTL: number = cacheTTLDefaults.commentTTL;
protected filterCriteriaTTL: number = cacheTTLDefaults.filterCriteriaTTL;
protected authorTTL: number | false = cacheTTLDefaults.authorTTL;
protected subredditTTL: number | false = cacheTTLDefaults.subredditTTL;
protected wikiTTL: number | false = cacheTTLDefaults.wikiTTL;
protected submissionTTL: number | false = cacheTTLDefaults.submissionTTL;
protected commentTTL: number | false = cacheTTLDefaults.commentTTL;
protected filterCriteriaTTL: number | false = cacheTTLDefaults.filterCriteriaTTL;
name: string;
protected logger: Logger;
userNotes: UserNotes;
footer: false | string = DEFAULT_FOOTER;
subreddit: Subreddit
client: Snoowrap
client: ExtendedSnoowrap
cache: Cache
cacheType: string
cacheSettingsHash?: string;
pruneInterval?: any;
historicalSaveInterval?: any;
prefix?: string
actionedEventsMax: number;
stats: { cache: ResourceStats };
stats: {
cache: ResourceStats
historical: SubredditHistoricalStats
};
constructor(name: string, options: SubredditResourceOptions) {
const {
@@ -87,20 +116,30 @@ export class SubredditResources {
authorTTL,
wikiTTL,
filterCriteriaTTL,
submissionTTL,
commentTTL,
subredditTTL,
},
cache,
prefix,
cacheType,
actionedEventsMax,
cacheSettingsHash,
client,
} = options || {};
this.cacheSettingsHash = cacheSettingsHash;
this.cache = cache;
this.prefix = prefix;
this.client = client;
this.cacheType = cacheType;
this.authorTTL = authorTTL;
this.wikiTTL = wikiTTL;
this.filterCriteriaTTL = filterCriteriaTTL;
this.actionedEventsMax = actionedEventsMax;
this.authorTTL = authorTTL === true ? 0 : authorTTL;
this.submissionTTL = submissionTTL === true ? 0 : submissionTTL;
this.commentTTL = commentTTL === true ? 0 : commentTTL;
this.subredditTTL = subredditTTL === true ? 0 : subredditTTL;
this.wikiTTL = wikiTTL === true ? 0 : wikiTTL;
this.filterCriteriaTTL = filterCriteriaTTL === true ? 0 : filterCriteriaTTL;
this.subreddit = subreddit;
this.name = name;
if (logger === undefined) {
@@ -111,7 +150,11 @@ export class SubredditResources {
}
this.stats = {
cache: cacheStats()
cache: cacheStats(),
historical: {
allTime: createHistoricalDefaults(),
lastReload: createHistoricalDefaults()
}
};
const cacheUseCB = (miss: boolean) => {
@@ -122,7 +165,7 @@ export class SubredditResources {
this.userNotes = new UserNotes(userNotesTTL, this.subreddit, this.logger, this.cache, cacheUseCB)
if(this.cacheType === 'memory' && this.cacheSettingsHash !== 'default') {
const min = Math.min(...([wikiTTL, authorTTL, userNotesTTL].filter(x => x !== 0)));
const min = Math.min(...([this.wikiTTL, this.authorTTL, this.submissionTTL, this.commentTTL, this.filterCriteriaTTL].filter(x => typeof x === 'number' && x !== 0) as number[]));
if(min > 0) {
// set default prune interval
this.pruneInterval = setInterval(() => {
@@ -135,8 +178,98 @@ export class SubredditResources {
}
}
async initHistoricalStats() {
const at = await this.cache.wrap(`${this.name}-historical-allTime`, () => createHistoricalDefaults(), {ttl: 0}) as object;
const rehydratedAt: any = {};
for(const [k, v] of Object.entries(at)) {
if(Array.isArray(v)) {
rehydratedAt[k] = new Map(v);
} else {
rehydratedAt[k] = v;
}
}
this.stats.historical.allTime = rehydratedAt as HistoricalStats;
// const lr = await this.cache.wrap(`${this.name}-historical-lastReload`, () => createHistoricalDefaults(), {ttl: 0}) as object;
// const rehydratedLr: any = {};
// for(const [k, v] of Object.entries(lr)) {
// if(Array.isArray(v)) {
// rehydratedLr[k] = new Map(v);
// } else {
// rehydratedLr[k] = v;
// }
// }
// this.stats.historical.lastReload = rehydratedLr;
}
updateHistoricalStats(data: HistoricalStatUpdateData) {
for(const [k, v] of Object.entries(data)) {
if(this.stats.historical.lastReload[k] !== undefined) {
if(typeof v === 'number') {
this.stats.historical.lastReload[k] += v;
} else if(this.stats.historical.lastReload[k] instanceof Map) {
const keys = Array.isArray(v) ? v : [v];
for(const key of keys) {
this.stats.historical.lastReload[k].set(key, (this.stats.historical.lastReload[k].get(key) || 0) + 1);
}
}
}
if(this.stats.historical.allTime[k] !== undefined) {
if(typeof v === 'number') {
this.stats.historical.allTime[k] += v;
} else if(this.stats.historical.allTime[k] instanceof Map) {
const keys = Array.isArray(v) ? v : [v];
for(const key of keys) {
this.stats.historical.allTime[k].set(key, (this.stats.historical.allTime[k].get(key) || 0) + 1);
}
}
}
}
}
getHistoricalDisplayStats(): SubredditHistoricalStatsDisplay {
return {
allTime: createHistoricalStatsDisplay(this.stats.historical.allTime),
lastReload: createHistoricalStatsDisplay(this.stats.historical.lastReload)
}
}
async saveHistoricalStats() {
const atSerializable: any = {};
for(const [k, v] of Object.entries(this.stats.historical.allTime)) {
if(v instanceof Map) {
atSerializable[k] = Array.from(v.entries());
} else {
atSerializable[k] = v;
}
}
await this.cache.set(`${this.name}-historical-allTime`, atSerializable, {ttl: 0});
// const lrSerializable: any = {};
// for(const [k, v] of Object.entries(this.stats.historical.lastReload)) {
// if(v instanceof Map) {
// lrSerializable[k] = Array.from(v.entries());
// } else {
// lrSerializable[k] = v;
// }
// }
// await this.cache.set(`${this.name}-historical-lastReload`, lrSerializable, {ttl: 0});
}
setHistoricalSaveInterval() {
this.historicalSaveInterval = setInterval((function(self) {
return async () => {
await self.saveHistoricalStats();
}
})(this),10000);
}
async getCacheKeyCount() {
if (this.cache.store.keys !== undefined) {
if(this.cacheType === 'redis') {
const keys = await this.cache.store.keys(`${this.prefix}*`);
return keys.length;
}
return (await this.cache.store.keys()).length;
}
return 0;
@@ -200,16 +333,26 @@ export class SubredditResources {
this.logger = logger.child({labels: ['Resource Cache']}, mergeArr);
}
async getActionedEvents(): Promise<ActionedEvent[]> {
return await this.cache.wrap(`actionedEvents-${this.subreddit.display_name}`, () => []);
}
async addActionedEvent(ae: ActionedEvent) {
const events = await this.cache.wrap(`actionedEvents-${this.subreddit.display_name}`, () => []) as ActionedEvent[];
events.unshift(ae);
await this.cache.set(`actionedEvents-${this.subreddit.display_name}`, events.slice(0, this.actionedEventsMax), {ttl: 0});
}
async getActivity(item: Submission | Comment) {
try {
let hash = '';
if (item instanceof Submission && this.submissionTTL > 0) {
if (this.submissionTTL !== false && asSubmission(item)) {
hash = `sub-${item.name}`;
await this.stats.cache.submission.identifierRequestCount.set(hash, (await this.stats.cache.submission.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
this.stats.cache.submission.requestTimestamps.push(Date.now());
this.stats.cache.submission.requests++;
const cachedSubmission = await this.cache.get(hash);
if (cachedSubmission !== undefined) {
if (cachedSubmission !== undefined && cachedSubmission !== null) {
this.logger.debug(`Cache Hit: Submission ${item.name}`);
return cachedSubmission;
}
@@ -218,13 +361,13 @@ export class SubredditResources {
this.stats.cache.submission.miss++;
await this.cache.set(hash, submission, {ttl: this.submissionTTL});
return submission;
} else if (this.commentTTL > 0) {
} else if (this.commentTTL !== false) {
hash = `comm-${item.name}`;
await this.stats.cache.comment.identifierRequestCount.set(hash, (await this.stats.cache.comment.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
this.stats.cache.comment.requestTimestamps.push(Date.now());
this.stats.cache.comment.requests++;
const cachedComment = await this.cache.get(hash);
if (cachedComment !== undefined) {
if (cachedComment !== undefined && cachedComment !== null) {
this.logger.debug(`Cache Hit: Comment ${item.name}`);
return cachedComment;
}
@@ -243,30 +386,88 @@ export class SubredditResources {
}
}
async getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
if (this.authorTTL > 0) {
const userName = user.name;
const hashObj: any = {...options, userName};
if (this.useSubredditAuthorCache) {
hashObj.subreddit = this.name;
// @ts-ignore
async getSubreddit(item: Submission | Comment) {
try {
let hash = '';
const subName = getActivitySubredditName(item);
if (this.subredditTTL !== false) {
hash = `sub-${subName}`;
await this.stats.cache.subreddit.identifierRequestCount.set(hash, (await this.stats.cache.subreddit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
this.stats.cache.subreddit.requestTimestamps.push(Date.now());
this.stats.cache.subreddit.requests++;
const cachedSubreddit = await this.cache.get(hash);
if (cachedSubreddit !== undefined && cachedSubreddit !== null) {
this.logger.debug(`Cache Hit: Subreddit ${subName}`);
// @ts-ignore
return cachedSubreddit as Subreddit;
}
// @ts-ignore
const subreddit = await this.client.getSubreddit(subName).fetch() as Subreddit;
this.stats.cache.subreddit.miss++;
// @ts-ignore
await this.cache.set(hash, subreddit, {ttl: this.subredditTTL});
// @ts-ignore
return subreddit as Subreddit;
} else {
// @ts-ignore
let subreddit = await this.client.getSubreddit(subName);
return subreddit as Subreddit;
}
const hash = objectHash.sha1({...options, userName});
} catch (err) {
this.logger.error('Error while trying to fetch a cached activity', err);
throw err.logged;
}
}
async hasSubreddit(name: string) {
if (this.subredditTTL !== false) {
const hash = `sub-${name}`;
this.stats.cache.subreddit.requests++
this.stats.cache.subreddit.requestTimestamps.push(Date.now());
await this.stats.cache.subreddit.identifierRequestCount.set(hash, (await this.stats.cache.subreddit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
const val = await this.cache.get(hash);
if(val === undefined || val === null) {
this.stats.cache.subreddit.miss++;
}
return val !== undefined && val !== null;
}
return false;
}
async getAuthorActivities(user: RedditUser, options: AuthorTypedActivitiesOptions): Promise<Array<Submission | Comment>> {
const userName = getActivityAuthorName(user);
if (this.authorTTL !== false) {
const hashObj: any = options;
if (this.useSubredditAuthorCache) {
hashObj.subreddit = this.subreddit;
}
const hash = `authorActivities-${userName}-${options.type || 'overview'}-${objectHash.sha1(hashObj)}`;
this.stats.cache.author.requests++;
await this.stats.cache.author.identifierRequestCount.set(user.name, (await this.stats.cache.author.identifierRequestCount.wrap(user.name, () => 0) as number) + 1);
await this.stats.cache.author.identifierRequestCount.set(userName, (await this.stats.cache.author.identifierRequestCount.wrap(userName, () => 0) as number) + 1);
this.stats.cache.author.requestTimestamps.push(Date.now());
let miss = false;
const cacheVal = await this.cache.wrap(hash, async () => {
miss = true;
if(typeof user === 'string') {
// @ts-ignore
user = await this.client.getUser(userName);
}
return await getAuthorActivities(user, options);
}, {ttl: this.authorTTL});
if (!miss) {
this.logger.debug(`Cache Hit: ${userName} (${options.type || 'overview'})`);
this.logger.debug(`Cache Hit: ${userName} (Hash ${hash})`);
} else {
this.stats.cache.author.miss++;
}
return cacheVal as Array<Submission | Comment>;
}
if(typeof user === 'string') {
// @ts-ignore
user = await this.client.getUser(userName);
}
return await getAuthorActivities(user, options);
}
@@ -298,14 +499,14 @@ export class SubredditResources {
}
// try to get cached value first
let hash = `${subreddit.display_name}-${cacheKey}`;
if (this.wikiTTL > 0) {
let hash = `${subreddit.display_name}-content-${cacheKey}`;
if (this.wikiTTL !== false) {
await this.stats.cache.content.identifierRequestCount.set(cacheKey, (await this.stats.cache.content.identifierRequestCount.wrap(cacheKey, () => 0) as number) + 1);
this.stats.cache.content.requestTimestamps.push(Date.now());
this.stats.cache.content.requests++;
const cachedContent = await this.cache.get(hash);
if (cachedContent !== undefined) {
this.logger.debug(`Cache Hit: ${cacheKey}`);
if (cachedContent !== undefined && cachedContent !== null) {
this.logger.debug(`Content Cache Hit: ${cacheKey}`);
return cachedContent as string;
} else {
this.stats.cache.content.miss++;
@@ -349,17 +550,125 @@ export class SubredditResources {
}
}
if (this.wikiTTL > 0) {
if (this.wikiTTL !== false) {
this.cache.set(hash, wikiContent, {ttl: this.wikiTTL});
}
return wikiContent;
}
async cacheSubreddits(subs: (Subreddit | string)[]) {
const allSubs = subs.map(x => typeof x !== 'string' ? x.display_name : x);
const subNames = [...new Set(allSubs)];
const uncachedSubs = [];
for(const s of subNames) {
if(!(await this.hasSubreddit(s))) {
uncachedSubs.push(s);
}
}
if(uncachedSubs.length > 0) {
// cache all uncached subs batchly-like
const subResults = await this.client.getManySubreddits(uncachedSubs);
for(const s of subResults) {
// @ts-ignore
await this.cache.set(`sub-${s.display_name}`, s, {ttl: this.subredditTTL});
}
}
}
async batchTestSubredditCriteria(items: (Comment | Submission)[], states: (SubredditState | StrongSubredditState)[]): Promise<(Comment | Submission)[]> {
let passedItems: (Comment | Submission)[] = [];
let unpassedItems: (Comment | Submission)[] = [];
const {nameOnly = [], full = []} = states.reduce((acc: {nameOnly: (SubredditState | StrongSubredditState)[], full: (SubredditState | StrongSubredditState)[]}, curr) => {
if(subredditStateIsNameOnly(curr)) {
return {...acc, nameOnly: acc.nameOnly.concat(curr)};
}
return {...acc, full: acc.full.concat(curr)};
}, {nameOnly: [], full: []});
if(nameOnly.length === 0) {
unpassedItems = items;
} else {
for(const item of items) {
const subName = getActivitySubredditName(item);
for(const state of nameOnly) {
if(await this.isSubreddit({display_name: subName} as Subreddit, state, this.logger)) {
passedItems.push(item);
break;
}
}
unpassedItems.push(item);
}
}
if(unpassedItems.length > 0 && full.length > 0) {
await this.cacheSubreddits(unpassedItems.map(x => x.subreddit));
for(const item of unpassedItems) {
for(const state of full) {
if(await this.isSubreddit(await this.getSubreddit(item), state, this.logger)) {
passedItems.push(item);
break;
}
}
}
}
return passedItems;
}
async testSubredditCriteria(item: (Comment | Submission), state: SubredditState | StrongSubredditState) {
if(Object.keys(state).length === 0) {
return true;
}
// optimize for name-only criteria checks
// -- we don't need to store cache results for this since we know subreddit name is always available from item (no request required)
const critCount = Object.entries(state).filter(([key, val]) => {
return val !== undefined && !['name','stateDescription'].includes(key);
}).length;
if(critCount === 0) {
const subName = getActivitySubredditName(item);
return await this.isSubreddit({display_name: subName} as Subreddit, state, this.logger);
}
// see comments on shouldCacheSubredditStateCriteriaResult() for why this is needed
if (this.filterCriteriaTTL !== false && shouldCacheSubredditStateCriteriaResult(state)) {
try {
const hash = `subredditCrit-${getActivitySubredditName(item)}-${objectHash.sha1(state)}`;
await this.stats.cache.subredditCrit.identifierRequestCount.set(hash, (await this.stats.cache.subredditCrit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
this.stats.cache.subredditCrit.requestTimestamps.push(Date.now());
this.stats.cache.subredditCrit.requests++;
const cachedItem = await this.cache.get(hash);
if (cachedItem !== undefined && cachedItem !== null) {
this.logger.debug(`Cache Hit: Subreddit Check on ${getActivitySubredditName(item)} (Hash ${hash})`);
return cachedItem as boolean;
}
const itemResult = await this.isSubreddit(await this.getSubreddit(item), state, this.logger);
this.stats.cache.subredditCrit.miss++;
await this.cache.set(hash, itemResult, {ttl: this.filterCriteriaTTL});
return itemResult;
} catch (err) {
if (err.logged !== true) {
this.logger.error('Error occurred while testing subreddit criteria', err);
}
throw err;
}
}
return await this.isSubreddit(await this.getSubreddit(item), state, this.logger);
}
async testAuthorCriteria(item: (Comment | Submission), authorOpts: AuthorCriteria, include = true) {
if (this.filterCriteriaTTL > 0) {
const hashObj = {itemId: item.id, ...authorOpts, include};
const hash = `authorCrit-${objectHash.sha1(hashObj)}`;
if (this.filterCriteriaTTL !== false) {
// in the criteria check we only actually use the `item` to get the author flair
// which will be the same for the entire subreddit
//
// so we can create a hash only using subreddit-author-criteria
// and ignore the actual item
const hashObj = {...authorOpts, include};
const userName = getActivityAuthorName(item.author);
const hash = `authorCrit-${this.subreddit.display_name}-${userName}-${objectHash.sha1(hashObj)}`;
await this.stats.cache.authorCrit.identifierRequestCount.set(hash, (await this.stats.cache.authorCrit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
this.stats.cache.authorCrit.requestTimestamps.push(Date.now());
this.stats.cache.authorCrit.requests++;
@@ -367,9 +676,9 @@ export class SubredditResources {
const cachedAuthorTest = await this.cache.wrap(hash, async () => {
miss = true;
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
}, {ttl: this.authorTTL});
}, {ttl: this.filterCriteriaTTL});
if (!miss) {
this.logger.debug(`Cache Hit: Author Check on ${item.id}`);
this.logger.debug(`Cache Hit: Author Check on ${userName} (Hash ${hash})`);
} else {
this.stats.cache.authorCrit.miss++;
}
@@ -379,10 +688,14 @@ export class SubredditResources {
return await testAuthorCriteria(item, authorOpts, include, this.userNotes);
}
async testItemCriteria(i: (Comment | Submission), s: TypedActivityStates) {
if (this.filterCriteriaTTL > 0) {
async testItemCriteria(i: (Comment | Submission), activityStates: TypedActivityStates) {
// return early if nothing is being checked for so we don't store an empty cache result for this (duh)
if(activityStates.length === 0) {
return true;
}
if (this.filterCriteriaTTL !== false) {
let item = i;
let states = s;
let states = activityStates;
// optimize for submission only checks on comment item
if (item instanceof Comment && states.length === 1 && Object.keys(states[0]).length === 1 && (states[0] as CommentState).submissionState !== undefined) {
// @ts-ignore
@@ -392,19 +705,18 @@ export class SubredditResources {
states = (states[0] as CommentState).submissionState as SubmissionState[];
}
try {
const hashObj = {itemId: item.name, ...states};
const hash = `itemCrit-${objectHash.sha1(hashObj)}`;
const hash = `itemCrit-${item.name}-${objectHash.sha1(states)}`;
await this.stats.cache.itemCrit.identifierRequestCount.set(hash, (await this.stats.cache.itemCrit.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
this.stats.cache.itemCrit.requestTimestamps.push(Date.now());
this.stats.cache.itemCrit.requests++;
const cachedItem = await this.cache.get(hash);
if (cachedItem !== undefined) {
this.logger.debug(`Cache Hit: Item Check on ${item.name}`);
if (cachedItem !== undefined && cachedItem !== null) {
this.logger.debug(`Cache Hit: Item Check on ${item.name} (Hash ${hash})`);
return cachedItem as boolean;
}
const itemResult = await this.isItem(item, states, this.logger);
this.stats.cache.itemCrit.miss++;
const res = await this.cache.set(hash, itemResult, {ttl: this.filterCriteriaTTL});
await this.cache.set(hash, itemResult, {ttl: this.filterCriteriaTTL});
return itemResult;
} catch (err) {
if (err.logged !== true) {
@@ -414,7 +726,61 @@ export class SubredditResources {
}
}
return await this.isItem(i, s, this.logger);
return await this.isItem(i, activityStates, this.logger);
}
async isSubreddit (subreddit: Subreddit, stateCriteria: SubredditState | StrongSubredditState, logger: Logger) {
delete stateCriteria.stateDescription;
if (Object.keys(stateCriteria).length === 0) {
return true;
}
const crit = isStrongSubredditState(stateCriteria) ? stateCriteria : toStrongSubredditState(stateCriteria, {defaultFlags: 'i'});
const log = logger.child({leaf: 'Subreddit Check'}, mergeArr);
return await (async () => {
for (const k of Object.keys(crit)) {
// @ts-ignore
if (crit[k] !== undefined) {
switch (k) {
case 'name':
const nameReg = crit[k] as RegExp;
if(!nameReg.test(subreddit.display_name)) {
return false;
}
break;
case 'over18':
case 'over_18':
// handling an edge case where user may have confused Comment/Submission state "over_18" with SubredditState "over18"
// @ts-ignore
if (crit[k] !== subreddit.over18) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${subreddit.over18}`)
return false
}
break;
default:
// @ts-ignore
if (subreddit[k] !== undefined) {
// @ts-ignore
if (crit[k] !== subreddit[k]) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${subreddit[k]}`)
return false
}
} else {
log.warn(`Tried to test for Subreddit property '${k}' but it did not exist`);
}
break;
}
}
}
log.debug(`Passed: ${JSON.stringify(stateCriteria)}`);
return true;
})() as boolean;
}
async isItem (item: Submission | Comment, stateCriteria: TypedActivityStates, logger: Logger) {
@@ -446,6 +812,26 @@ export class SubredditResources {
return false;
}
break;
case 'score':
const scoreCompare = parseGenericValueComparison(crit[k] as string);
if(!comparisonTextOp(item.score, scoreCompare.operator, scoreCompare.value)) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${item.score}`)
return false
}
break;
case 'reports':
if (!item.can_mod_post) {
log.debug(`Cannot test for reports on Activity in a subreddit bot account is not a moderato Activist. Skipping criteria...`);
break;
}
const reportCompare = parseGenericValueComparison(crit[k] as string);
if(!comparisonTextOp(item.num_reports, reportCompare.operator, reportCompare.value)) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${item.num_reports}`)
return false
}
break;
case 'removed':
const removed = activityIsRemoved(item);
if (removed !== crit['removed']) {
@@ -463,6 +849,10 @@ export class SubredditResources {
}
break;
case 'filtered':
if (!item.can_mod_post) {
log.debug(`Cannot test for 'filtered' state on Activity in a subreddit bot account is not a moderator for. Skipping criteria...`);
break;
}
const filtered = activityIsFiltered(item);
if (filtered !== crit['filtered']) {
// @ts-ignore
@@ -470,6 +860,13 @@ export class SubredditResources {
return false
}
break;
case 'age':
const ageTest = compareDurationValue(parseDurationComparison(crit[k] as string), dayjs.unix(await item.created));
if (!ageTest) {
log.debug(`Failed: Activity did not pass age test "${crit[k] as string}"`);
return false;
}
break;
case 'title':
if((item instanceof Comment)) {
log.warn('`title` is not allowed in `itemIs` criteria when the main Activity is a Comment');
@@ -478,7 +875,7 @@ export class SubredditResources {
// @ts-ignore
const titleReg = crit[k] as string;
try {
if(null === item.title.match(titleReg)) {
if (null === item.title.match(titleReg)) {
// @ts-ignore
log.debug(`Failed to match title as regular expression: ${titleReg}`);
return false;
@@ -488,6 +885,19 @@ export class SubredditResources {
return false
}
break;
case 'approved':
case 'spam':
if(!item.can_mod_post) {
log.debug(`Cannot test for '${k}' state on Activity in a subreddit bot account is not a moderator for. Skipping criteria...`);
break;
}
// @ts-ignore
if (item[k] !== crit[k]) {
// @ts-ignore
log.debug(`Failed: Expected => ${k}:${crit[k]} | Found => ${k}:${item[k]}`)
return false
}
break;
default:
// @ts-ignore
if (item[k] !== undefined) {
@@ -498,7 +908,11 @@ export class SubredditResources {
return false
}
} else {
log.warn(`Tried to test for Item property '${k}' but it did not exist`);
if(!item.can_mod_post) {
log.warn(`Tried to test for Activity property '${k}' but it did not exist. This Activity is not in a subreddit the bot can mod so it may be that this property is only available to mods of that subreddit. Or the property may be misspelled.`);
} else {
log.warn(`Tried to test for Activity property '${k}' but it did not exist. Check the spelling of the property.`);
}
}
break;
}
@@ -514,36 +928,28 @@ export class SubredditResources {
return false
}
async getCommentCheckCacheResult(item: Comment, checkConfig: object): Promise<boolean | undefined> {
const criteria = {
author: item.author.name,
submission: item.link_id,
...checkConfig
}
const hash = objectHash.sha1(criteria);
async getCommentCheckCacheResult(item: Comment, checkConfig: object): Promise<UserResultCache | undefined> {
const userName = getActivityAuthorName(item.author);
const hash = `commentUserResult-${userName}-${item.link_id}-${objectHash.sha1(checkConfig)}`;
this.stats.cache.commentCheck.requests++;
const result = await this.cache.get(hash) as boolean | undefined;
this.stats.cache.commentCheck.requestTimestamps.push(Date.now());
await this.stats.cache.commentCheck.identifierRequestCount.set(hash, (await this.stats.cache.commentCheck.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
let result = await this.cache.get(hash) as UserResultCache | undefined | null;
if(result === null) {
result = undefined;
}
if(result === undefined) {
this.stats.cache.commentCheck.miss++;
}
this.logger.debug(`Cache Hit: Comment Check for ${item.author.name} in Submission ${item.link_id}`);
this.logger.debug(`Cache Hit: Comment Check for ${userName} in Submission ${item.link_id} (Hash ${hash})`);
return result;
}
async setCommentCheckCacheResult(item: Comment, checkConfig: object, result: boolean, ttl: number) {
const criteria = {
author: item.author.name,
submission: item.link_id,
...checkConfig
}
const hash = objectHash.sha1(criteria);
// don't set if result is already cached
if(undefined !== await this.cache.get(hash)) {
this.logger.debug(`Check result already cached for User ${item.author.name} on Submission ${item.link_id}`);
} else {
await this.cache.set(hash, result, { ttl });
this.logger.debug(`Cached check result '${result}' for User ${item.author.name} on Submission ${item.link_id} for ${ttl} seconds`);
}
async setCommentCheckCacheResult(item: Comment, checkConfig: object, result: UserResultCache, ttl: number) {
const userName = getActivityAuthorName(item.author);
const hash = `commentUserResult-${userName}-${item.link_id}-${objectHash.sha1(checkConfig)}`
await this.cache.set(hash, result, { ttl });
this.logger.debug(`Cached check result '${result.result}' for User ${userName} on Submission ${item.link_id} for ${ttl} seconds (Hash ${hash})`);
}
async generateFooter(item: Submission | Comment, actionFooter?: false | string) {
@@ -558,6 +964,33 @@ export class SubredditResources {
const footerRawContent = await this.getContent(footer, item.subreddit);
return he.decode(Mustache.render(footerRawContent, {subName, permaLink, modmailLink, botLink: BOT_LINK}));
}
async getImageHash(img: ImageData): Promise<string|undefined> {
const hash = `imgHash-${img.baseUrl}`;
const result = await this.cache.get(hash) as string | undefined | null;
this.stats.cache.imageHash.requests++
this.stats.cache.imageHash.requestTimestamps.push(Date.now());
await this.stats.cache.imageHash.identifierRequestCount.set(hash, (await this.stats.cache.imageHash.identifierRequestCount.wrap(hash, () => 0) as number) + 1);
if(result !== undefined && result !== null) {
return result;
}
this.stats.cache.commentCheck.miss++;
return undefined;
// const hash = await this.cache.wrap(img.baseUrl, async () => await img.hash(true), { ttl }) as string;
// if(img.hashResult === undefined) {
// img.hashResult = hash;
// }
// return hash;
}
async setImageHash(img: ImageData, hash: string, ttl: number): Promise<void> {
await this.cache.set(`imgHash-${img.baseUrl}`, hash, {ttl});
// const hash = await this.cache.wrap(img.baseUrl, async () => await img.hash(true), { ttl }) as string;
// if(img.hashResult === undefined) {
// img.hashResult = hash;
// }
// return hash;
}
}
export class BotResourcesManager {
@@ -566,9 +999,12 @@ export class BotResourcesManager {
enabled: boolean = true;
modStreams: Map<string, SPoll<Snoowrap.Submission | Snoowrap.Comment>> = new Map();
defaultCache: Cache;
defaultCacheConfig: StrongCache
cacheType: string = 'none';
cacheHash: string;
ttlDefaults: Required<TTLConfig>;
actionedEventsMaxDefault?: number;
actionedEventsDefault: number;
pruneInterval: any;
constructor(config: BotInstanceConfig) {
@@ -579,19 +1015,29 @@ export class BotResourcesManager {
wikiTTL,
commentTTL,
submissionTTL,
subredditTTL,
filterCriteriaTTL,
provider,
actionedEventsMax,
actionedEventsDefault,
},
name,
credentials,
caching,
} = config;
this.cacheHash = objectHash.sha1(caching);
this.ttlDefaults = {authorTTL, userNotesTTL, wikiTTL, commentTTL, submissionTTL, filterCriteriaTTL};
caching.provider.prefix = buildCachePrefix([caching.provider.prefix, 'SHARED']);
const {actionedEventsMax: eMax, actionedEventsDefault: eDef, ...relevantCacheSettings} = caching;
this.cacheHash = objectHash.sha1(relevantCacheSettings);
this.defaultCacheConfig = caching;
this.ttlDefaults = {authorTTL, userNotesTTL, wikiTTL, commentTTL, submissionTTL, filterCriteriaTTL, subredditTTL};
const options = provider;
this.cacheType = options.store;
this.actionedEventsMaxDefault = actionedEventsMax;
this.actionedEventsDefault = actionedEventsDefault;
this.defaultCache = createCacheManager(options);
if (this.cacheType === 'memory') {
const min = Math.min(...([this.ttlDefaults.wikiTTL, this.ttlDefaults.authorTTL, this.ttlDefaults.userNotesTTL].filter(x => x !== 0)));
const min = Math.min(...([this.ttlDefaults.wikiTTL, this.ttlDefaults.authorTTL, this.ttlDefaults.userNotesTTL].filter(x => typeof x === 'number' && x !== 0) as number[]));
if (min > 0) {
// set default prune interval
this.pruneInterval = setInterval(() => {
@@ -613,7 +1059,7 @@ export class BotResourcesManager {
return undefined;
}
set(subName: string, initOptions: SubredditResourceConfig): SubredditResources {
async set(subName: string, initOptions: SubredditResourceConfig): Promise<SubredditResources> {
let hash = 'default';
const { caching, ...init } = initOptions;
@@ -622,11 +1068,13 @@ export class BotResourcesManager {
cacheType: this.cacheType,
cacheSettingsHash: hash,
ttl: this.ttlDefaults,
prefix: this.defaultCacheConfig.provider.prefix,
actionedEventsMax: this.actionedEventsMaxDefault !== undefined ? Math.min(this.actionedEventsDefault, this.actionedEventsMaxDefault) : this.actionedEventsDefault,
...init,
};
if(caching !== undefined) {
const {provider = 'memory', ...rest} = caching;
const {provider = this.defaultCacheConfig.provider, actionedEventsMax = this.actionedEventsDefault, ...rest} = caching;
let cacheConfig = {
provider: buildCacheOptionsFromProvider(provider),
ttl: {
@@ -638,10 +1086,16 @@ export class BotResourcesManager {
// only need to create private if there settings are actually different than the default
if(hash !== this.cacheHash) {
const {provider: trueProvider, ...trueRest} = cacheConfig;
const defaultPrefix = trueProvider.prefix;
const subPrefix = defaultPrefix === this.defaultCacheConfig.provider.prefix ? buildCachePrefix([(defaultPrefix !== undefined ? defaultPrefix.replace('SHARED', '') : defaultPrefix), subName]) : trueProvider.prefix;
trueProvider.prefix = subPrefix;
const eventsMax = this.actionedEventsMaxDefault !== undefined ? Math.min(actionedEventsMax, this.actionedEventsMaxDefault) : actionedEventsMax;
opts = {
cache: createCacheManager(trueProvider),
actionedEventsMax: eventsMax,
cacheType: trueProvider.store,
cacheSettingsHash: hash,
prefix: subPrefix,
...init,
...trueRest,
};
@@ -655,6 +1109,8 @@ export class BotResourcesManager {
res.cache.reset();
}
resource = new SubredditResources(subName, opts);
await resource.initHistoricalStats();
resource.setHistoricalSaveInterval();
this.resources.set(subName, resource);
} else {
// just set non-cache related settings
@@ -665,6 +1121,7 @@ export class BotResourcesManager {
// reset cache stats when configuration is reloaded
resource.stats.cache = cacheStats();
}
resource.stats.historical.lastReload = createHistoricalDefaults();
return resource;
}

View File

@@ -1,6 +1,13 @@
import dayjs, {Dayjs} from "dayjs";
import {Comment, RedditUser, WikiPage} from "snoowrap";
import {COMMENT_URL_ID, deflateUserNotes, inflateUserNotes, parseLinkIdentifier, SUBMISSION_URL_ID} from "../util";
import {
COMMENT_URL_ID,
deflateUserNotes, getActivityAuthorName,
inflateUserNotes,
isScopeError,
parseLinkIdentifier,
SUBMISSION_URL_ID
} from "../util";
import Subreddit from "snoowrap/dist/objects/Subreddit";
import {Logger} from "winston";
import LoggedError from "../Utils/LoggedError";
@@ -48,7 +55,7 @@ export interface RawNote {
export type UserNotesConstants = Pick<any, "users" | "warnings">;
export class UserNotes {
notesTTL: number;
notesTTL: number | false;
subreddit: Subreddit;
wiki: WikiPage;
moderators?: RedditUser[];
@@ -63,8 +70,8 @@ export class UserNotes {
debounceCB: any;
batchCount: number = 0;
constructor(ttl: number, subreddit: Subreddit, logger: Logger, cache: Cache, cacheCB: Function) {
this.notesTTL = ttl;
constructor(ttl: number | boolean, subreddit: Subreddit, logger: Logger, cache: Cache, cacheCB: Function) {
this.notesTTL = ttl === true ? 0 : ttl;
this.subreddit = subreddit;
this.logger = logger;
this.wiki = subreddit.getWikiPage('usernotes');
@@ -74,10 +81,11 @@ export class UserNotes {
}
async getUserNotes(user: RedditUser): Promise<UserNote[]> {
const userName = getActivityAuthorName(user);
let notes: UserNote[] | undefined = [];
if (this.users !== undefined) {
notes = this.users.get(user.name);
notes = this.users.get(userName);
if (notes !== undefined) {
this.logger.debug('Returned cached notes');
return notes;
@@ -85,7 +93,7 @@ export class UserNotes {
}
const payload = await this.retrieveData();
const rawNotes = payload.blob[user.name];
const rawNotes = payload.blob[userName];
if (rawNotes !== undefined) {
if (this.moderators === undefined) {
this.moderators = await this.subreddit.getModerators();
@@ -94,7 +102,7 @@ export class UserNotes {
// sort in ascending order by time
notes.sort((a, b) => a.time.isBefore(b.time) ? -1 : 1);
if (this.notesTTL > 0 && this.cache !== undefined) {
this.users.set(user.name, notes);
this.users.set(userName, notes);
}
return notes;
} else {
@@ -105,6 +113,7 @@ export class UserNotes {
async addUserNote(item: (Submission|Comment), type: string | number, text: string = ''): Promise<UserNote>
{
const payload = await this.retrieveData();
const userName = getActivityAuthorName(item.author);
// idgaf
// @ts-ignore
@@ -120,16 +129,16 @@ export class UserNotes {
}
const newNote = new UserNote(dayjs(), text, mod, type, `https://reddit.com${item.permalink}`);
if(payload.blob[item.author.name] === undefined) {
payload.blob[item.author.name] = {ns: []};
if(payload.blob[userName] === undefined) {
payload.blob[userName] = {ns: []};
}
payload.blob[item.author.name].ns.push(newNote.toRaw(payload.constants));
payload.blob[userName].ns.push(newNote.toRaw(payload.constants));
await this.saveData(payload);
if(this.notesTTL > 0) {
const currNotes = this.users.get(item.author.name) || [];
const currNotes = this.users.get(userName) || [];
currNotes.push(newNote);
this.users.set(item.author.name, currNotes);
this.users.set(userName, currNotes);
}
return newNote;
}
@@ -144,7 +153,7 @@ export class UserNotes {
let cacheMiss;
if (this.notesTTL > 0) {
const cachedPayload = await this.cache.get(this.identifier);
if (cachedPayload !== undefined) {
if (cachedPayload !== undefined && cachedPayload !== null) {
this.cacheCB(false);
return cachedPayload as unknown as RawUserNotesPayload;
}
@@ -153,14 +162,15 @@ export class UserNotes {
}
try {
if(cacheMiss && this.debounceCB !== undefined) {
// timeout is still delayed. its our wiki data and we want it now! cm cacheworth 877 cache now
this.logger.debug(`Detected missed cache on usernotes retrieval while batch (${this.batchCount}) save is in progress, executing save immediately before retrieving new notes...`);
clearTimeout(this.saveDebounce);
await this.debounceCB();
this.debounceCB = undefined;
this.saveDebounce = undefined;
}
// DISABLED for now because I think its causing issues
// if(cacheMiss && this.debounceCB !== undefined) {
// // timeout is still delayed. its our wiki data and we want it now! cm cacheworth 877 cache now
// this.logger.debug(`Detected missed cache on usernotes retrieval while batch (${this.batchCount}) save is in progress, executing save immediately before retrieving new notes...`);
// clearTimeout(this.saveDebounce);
// await this.debounceCB();
// this.debounceCB = undefined;
// this.saveDebounce = undefined;
// }
// @ts-ignore
this.wiki = await this.subreddit.getWikiPage('usernotes').fetch();
const wikiContent = this.wiki.content_md;
@@ -169,7 +179,7 @@ export class UserNotes {
userNotes.blob = inflateUserNotes(userNotes.blob);
if (this.notesTTL > 0) {
if (this.notesTTL !== false) {
await this.cache.set(`${this.subreddit.display_name}-usernotes`, userNotes, {ttl: this.notesTTL});
this.users = new Map();
}
@@ -187,30 +197,36 @@ export class UserNotes {
const blob = deflateUserNotes(payload.blob);
const wikiPayload = {text: JSON.stringify({...payload, blob}), reason: 'ContextBot edited usernotes'};
try {
if (this.notesTTL > 0) {
if (this.notesTTL !== false) {
// DISABLED for now because if it fails throws an uncaught rejection
// and need to figured out how to handle this other than just logging (want to interrupt action flow too?)
//
// debounce usernote save by 5 seconds -- effectively batch usernote saves
//
// so that if we are processing a ton of checks that write user notes we aren't calling to save the wiki page on every call
// since we also have everything in cache (most likely...)
//
// TODO might want to increase timeout to 10 seconds
if(this.saveDebounce !== undefined) {
clearTimeout(this.saveDebounce);
}
this.debounceCB = (async function () {
const p = wikiPayload;
// @ts-ignore
const self = this as UserNotes;
// @ts-ignore
self.wiki = await self.subreddit.getWikiPage('usernotes').edit(p);
self.logger.debug(`Batch saved ${self.batchCount} usernotes`);
self.debounceCB = undefined;
self.saveDebounce = undefined;
self.batchCount = 0;
}).bind(this);
this.saveDebounce = setTimeout(this.debounceCB,5000);
this.batchCount++;
this.logger.debug(`Saving Usernotes has been debounced for 5 seconds (${this.batchCount} batched)`)
// if(this.saveDebounce !== undefined) {
// clearTimeout(this.saveDebounce);
// }
// this.debounceCB = (async function () {
// const p = wikiPayload;
// // @ts-ignore
// const self = this as UserNotes;
// // @ts-ignore
// self.wiki = await self.subreddit.getWikiPage('usernotes').edit(p);
// self.logger.debug(`Batch saved ${self.batchCount} usernotes`);
// self.debounceCB = undefined;
// self.saveDebounce = undefined;
// self.batchCount = 0;
// }).bind(this);
// this.saveDebounce = setTimeout(this.debounceCB,5000);
// this.batchCount++;
// this.logger.debug(`Saving Usernotes has been debounced for 5 seconds (${this.batchCount} batched)`)
// @ts-ignore
await this.subreddit.getWikiPage('usernotes').edit(wikiPayload);
await this.cache.set(this.identifier, payload, {ttl: this.notesTTL});
this.users = new Map();
} else {
@@ -220,7 +236,13 @@ export class UserNotes {
return payload as RawUserNotesPayload;
} catch (err) {
const msg = `Could not edit usernotes. Make sure at least one moderator has used toolbox and usernotes before and that this account has editing permissions`;
let msg = 'Could not edit usernotes.';
// Make sure at least one moderator has used toolbox and usernotes before and that this account has editing permissions`;
if(isScopeError(err)) {
msg = `${msg} The bot account did not have sufficient OAUTH scope to perform this action. You must re-authenticate the bot and ensure it has has 'wikiedit' permissions.`
} else {
msg = `${msg} Make sure at least one moderator has used toolbox, created a usernote, and that this account has editing permissions for the wiki page.`;
}
this.logger.error(msg, err);
throw new LoggedError(msg);
}

View File

@@ -1,4 +1,6 @@
import Snoowrap from "snoowrap";
import Snoowrap, {Listing} from "snoowrap";
import {Subreddit} from "snoowrap/dist/objects";
import {parseSubredditName} from "../util";
// const proxyFactory = (endpoint: string) => {
// return class ProxiedSnoowrap extends Snoowrap {
@@ -12,7 +14,28 @@ import Snoowrap from "snoowrap";
// }
// }
export class RequestTrackingSnoowrap extends Snoowrap {
export class ExtendedSnoowrap extends Snoowrap {
/**
* https://www.reddit.com/r/redditdev/comments/jfltfx/comment/g9le48w/?utm_source=reddit&utm_medium=web2x&context=3
* */
async getManySubreddits(subs: (Subreddit | string)[]): Promise<Listing<Subreddit>> {
// parse all names
const names = subs.map(x => {
if(typeof x !== 'string') {
return x.display_name;
}
try {
return parseSubredditName(x);
} catch (err) {
return x;
}
});
return await this.oauthRequest({uri: '/api/info', method: 'get', qs: { sr_name: names.join(',')}}) as Listing<Subreddit>;
}
}
export class RequestTrackingSnoowrap extends ExtendedSnoowrap {
requestCount: number = 0;
oauthRequest(...args: any) {

View File

@@ -13,10 +13,16 @@ import {
TypedActivityStates
} from "../Common/interfaces";
import {
compareDurationValue, comparisonTextOp,
isActivityWindowCriteria,
normalizeName, parseDuration,
parseDurationComparison, parseGenericValueComparison, parseGenericValueOrPercentComparison, parseSubredditName,
compareDurationValue,
comparisonTextOp, escapeRegex, getActivityAuthorName,
isActivityWindowCriteria, isStatusError,
normalizeName,
parseDuration,
parseDurationComparison,
parseGenericValueComparison,
parseGenericValueOrPercentComparison,
parseRuleResultsToMarkdownSummary, parseStringToRegex,
parseSubredditName,
truncateStringToLength
} from "../util";
import UserNotes from "../Subreddit/UserNotes";
@@ -119,17 +125,25 @@ export async function getAuthorActivities(user: RedditUser, options: AuthorTyped
let items: Array<Submission | Comment> = [];
//let count = 1;
let listing;
switch (options.type) {
case 'comment':
listing = await user.getComments({limit: chunkSize});
break;
case 'submission':
listing = await user.getSubmissions({limit: chunkSize});
break;
default:
listing = await user.getOverview({limit: chunkSize});
break;
let listing = [];
try {
switch (options.type) {
case 'comment':
listing = await user.getComments({limit: chunkSize});
break;
case 'submission':
listing = await user.getSubmissions({limit: chunkSize});
break;
default:
listing = await user.getOverview({limit: chunkSize});
break;
}
} catch (err) {
if(isStatusError(err) && err.statusCode === 404) {
throw new SimpleError('Reddit returned a 404 for user history. Likely this user is shadowbanned.');
} else {
throw err;
}
}
let hitEnd = false;
let offset = chunkSize;
@@ -305,185 +319,239 @@ export const renderContent = async (template: string, data: (Submission | Commen
};
}, {});
const view = {item: templateData, rules: normalizedRuleResults};
const view = {item: templateData, ruleSummary: parseRuleResultsToMarkdownSummary(ruleResults), rules: normalizedRuleResults};
const rendered = Mustache.render(template, view) as string;
return he.decode(rendered);
}
export const testAuthorCriteria = async (item: (Comment | Submission), authorOpts: AuthorCriteria, include = true, userNotes: UserNotes) => {
// @ts-ignore
const author: RedditUser = await item.author;
for (const k of Object.keys(authorOpts)) {
// @ts-ignore
if (authorOpts[k] !== undefined) {
switch (k) {
case 'name':
const authPass = () => {
// @ts-ignore
for (const n of authorOpts[k]) {
if (n.toLowerCase() === author.name.toLowerCase()) {
return true;
}
}
return false;
}
const authResult = authPass();
if ((include && !authResult) || (!include && authResult)) {
return false;
}
break;
case 'flairCssClass':
const css = await item.author_flair_css_class;
const cssPass = () => {
// @ts-ignore
for (const c of authorOpts[k]) {
if (c === css) {
return;
}
}
return false;
}
const cssResult = cssPass();
if ((include && !cssResult) || (!include && cssResult)) {
return false;
}
break;
case 'flairText':
const text = await item.author_flair_text;
const textPass = () => {
// @ts-ignore
for (const c of authorOpts[k]) {
if (c === text) {
return
}
}
return false;
};
const textResult = textPass();
if ((include && !textResult) || (!include && textResult)) {
return false;
}
break;
case 'isMod':
const mods: RedditUser[] = await item.subreddit.getModerators();
const isModerator = mods.some(x => x.name === item.author.name);
const modMatch = authorOpts.isMod === isModerator;
if ((include && !modMatch) || (!include && modMatch)) {
return false;
}
break;
case 'age':
const ageTest = compareDurationValue(parseDurationComparison(await authorOpts.age as string), dayjs.unix(await item.author.created));
if ((include && !ageTest) || (!include && ageTest)) {
return false;
}
break;
case 'linkKarma':
const lkCompare = parseGenericValueOrPercentComparison(await authorOpts.linkKarma as string);
let lkMatch;
if (lkCompare.isPercent) {
// @ts-ignore
const tk = author.total_karma as number;
lkMatch = comparisonTextOp(author.link_karma / tk, lkCompare.operator, lkCompare.value / 100);
} else {
lkMatch = comparisonTextOp(author.link_karma, lkCompare.operator, lkCompare.value);
}
if ((include && !lkMatch) || (!include && lkMatch)) {
return false;
}
break;
case 'commentKarma':
const ckCompare = parseGenericValueOrPercentComparison(await authorOpts.commentKarma as string);
let ckMatch;
if (ckCompare.isPercent) {
// @ts-ignore
const ck = author.total_karma as number;
ckMatch = comparisonTextOp(author.comment_karma / ck, ckCompare.operator, ckCompare.value / 100);
} else {
ckMatch = comparisonTextOp(author.comment_karma, ckCompare.operator, ckCompare.value);
}
if ((include && !ckMatch) || (!include && ckMatch)) {
return false;
}
break;
case 'totalKarma':
const tkCompare = parseGenericValueComparison(await authorOpts.totalKarma as string);
if (tkCompare.isPercent) {
throw new SimpleError(`'totalKarma' value on AuthorCriteria cannot be a percentage`);
}
// @ts-ignore
const totalKarma = author.total_karma as number;
const tkMatch = comparisonTextOp(totalKarma, tkCompare.operator, tkCompare.value);
if ((include && !tkMatch) || (!include && tkMatch)) {
return false;
}
break;
case 'verified':
const vMatch = await author.has_verified_mail === authorOpts.verified as boolean;
if ((include && !vMatch) || (!include && vMatch)) {
return false;
}
break;
case 'userNotes':
const notes = await userNotes.getUserNotes(item.author);
const notePass = () => {
for (const noteCriteria of authorOpts[k] as UserNoteCriteria[]) {
const {count = '>= 1', search = 'current', type} = noteCriteria;
const {
value,
operator,
isPercent,
extra = ''
} = parseGenericValueOrPercentComparison(count);
const order = extra.includes('asc') ? 'ascending' : 'descending';
switch (search) {
case 'current':
if (notes.length > 0 && notes[notes.length - 1].noteType === type) {
return true;
}
break;
case 'consecutive':
let orderedNotes = notes;
if (order === 'descending') {
orderedNotes = [...notes];
orderedNotes.reverse();
}
let currCount = 0;
for (const note of orderedNotes) {
if (note.noteType === type) {
currCount++;
} else {
currCount = 0;
}
if (isPercent) {
throw new SimpleError(`When comparing UserNotes with 'consecutive' search 'count' cannot be a percentage. Given: ${count}`);
}
if (comparisonTextOp(currCount, operator, value)) {
return true;
}
}
break;
case 'total':
if (isPercent) {
if (comparisonTextOp(notes.filter(x => x.noteType === type).length / notes.length, operator, value / 100)) {
return true;
}
} else if (comparisonTextOp(notes.filter(x => x.noteType === type).length, operator, value)) {
return true;
}
}
}
return false;
}
const noteResult = notePass();
if ((include && !noteResult) || (!include && noteResult)) {
return false;
}
break;
const {shadowBanned, ...rest} = authorOpts;
if(shadowBanned !== undefined) {
try {
// @ts-ignore
await item.author.fetch();
// user is not shadowbanned
// if criteria specifies they SHOULD be shadowbanned then return false now
if(shadowBanned) {
return false;
}
} catch (err) {
if(isStatusError(err) && err.statusCode === 404) {
// user is shadowbanned
// if criteria specifies they should not be shadowbanned then return false now
if(!shadowBanned) {
return false;
}
} else {
throw err;
}
}
}
return true;
try {
const authorName = getActivityAuthorName(item.author);
for (const k of Object.keys(rest)) {
// @ts-ignore
if (authorOpts[k] !== undefined) {
switch (k) {
case 'name':
const authPass = () => {
// @ts-ignore
for (const n of authorOpts[k]) {
if (n.toLowerCase() === authorName.toLowerCase()) {
return true;
}
}
return false;
}
const authResult = authPass();
if ((include && !authResult) || (!include && authResult)) {
return false;
}
break;
case 'flairCssClass':
const css = await item.author_flair_css_class;
const cssPass = () => {
// @ts-ignore
for (const c of authorOpts[k]) {
if (c === css) {
return true;
}
}
return false;
}
const cssResult = cssPass();
if ((include && !cssResult) || (!include && cssResult)) {
return false;
}
break;
case 'flairText':
const text = await item.author_flair_text;
const textPass = () => {
// @ts-ignore
for (const c of authorOpts[k]) {
if (c === text) {
return true;
}
}
return false;
};
const textResult = textPass();
if ((include && !textResult) || (!include && textResult)) {
return false;
}
break;
case 'isMod':
const mods: RedditUser[] = await item.subreddit.getModerators();
const isModerator = mods.some(x => x.name === authorName);
const modMatch = authorOpts.isMod === isModerator;
if ((include && !modMatch) || (!include && modMatch)) {
return false;
}
break;
case 'age':
const ageTest = compareDurationValue(parseDurationComparison(await authorOpts.age as string), dayjs.unix(await item.author.created));
if ((include && !ageTest) || (!include && ageTest)) {
return false;
}
break;
case 'linkKarma':
const lkCompare = parseGenericValueOrPercentComparison(await authorOpts.linkKarma as string);
let lkMatch;
if (lkCompare.isPercent) {
// @ts-ignore
const tk = await item.author.total_karma as number;
lkMatch = comparisonTextOp(item.author.link_karma / tk, lkCompare.operator, lkCompare.value / 100);
} else {
lkMatch = comparisonTextOp(item.author.link_karma, lkCompare.operator, lkCompare.value);
}
if ((include && !lkMatch) || (!include && lkMatch)) {
return false;
}
break;
case 'commentKarma':
const ckCompare = parseGenericValueOrPercentComparison(await authorOpts.commentKarma as string);
let ckMatch;
if (ckCompare.isPercent) {
// @ts-ignore
const ck = await item.author.total_karma as number;
ckMatch = comparisonTextOp(item.author.comment_karma / ck, ckCompare.operator, ckCompare.value / 100);
} else {
ckMatch = comparisonTextOp(item.author.comment_karma, ckCompare.operator, ckCompare.value);
}
if ((include && !ckMatch) || (!include && ckMatch)) {
return false;
}
break;
case 'totalKarma':
const tkCompare = parseGenericValueComparison(await authorOpts.totalKarma as string);
if (tkCompare.isPercent) {
throw new SimpleError(`'totalKarma' value on AuthorCriteria cannot be a percentage`);
}
// @ts-ignore
const totalKarma = await item.author.total_karma as number;
const tkMatch = comparisonTextOp(totalKarma, tkCompare.operator, tkCompare.value);
if ((include && !tkMatch) || (!include && tkMatch)) {
return false;
}
break;
case 'verified':
const vMatch = await item.author.has_verified_mail === authorOpts.verified as boolean;
if ((include && !vMatch) || (!include && vMatch)) {
return false;
}
break;
case 'description':
// @ts-ignore
const desc = await item.author.subreddit?.display_name.public_description;
const dVals = authorOpts[k] as string[];
let passed = false;
for(const val of dVals) {
let reg = parseStringToRegex(val, 'i');
if(reg === undefined) {
reg = parseStringToRegex(`/.*${escapeRegex(val.trim())}.*/`, 'i');
if(reg === undefined) {
throw new SimpleError(`Could not convert 'description' value to a valid regex: ${authorOpts[k] as string}`);
}
}
if(reg.test(desc)) {
passed = true;
break;
}
}
if(!passed) {
return false;
}
break;
case 'userNotes':
const notes = await userNotes.getUserNotes(item.author);
const notePass = () => {
for (const noteCriteria of authorOpts[k] as UserNoteCriteria[]) {
const {count = '>= 1', search = 'current', type} = noteCriteria;
const {
value,
operator,
isPercent,
extra = ''
} = parseGenericValueOrPercentComparison(count);
const order = extra.includes('asc') ? 'ascending' : 'descending';
switch (search) {
case 'current':
if (notes.length > 0 && notes[notes.length - 1].noteType === type) {
return true;
}
break;
case 'consecutive':
let orderedNotes = notes;
if (order === 'descending') {
orderedNotes = [...notes];
orderedNotes.reverse();
}
let currCount = 0;
for (const note of orderedNotes) {
if (note.noteType === type) {
currCount++;
} else {
currCount = 0;
}
if (isPercent) {
throw new SimpleError(`When comparing UserNotes with 'consecutive' search 'count' cannot be a percentage. Given: ${count}`);
}
if (comparisonTextOp(currCount, operator, value)) {
return true;
}
}
break;
case 'total':
if (isPercent) {
if (comparisonTextOp(notes.filter(x => x.noteType === type).length / notes.length, operator, value / 100)) {
return true;
}
} else if (comparisonTextOp(notes.filter(x => x.noteType === type).length, operator, value)) {
return true;
}
}
}
return false;
}
const noteResult = notePass();
if ((include && !noteResult) || (!include && noteResult)) {
return false;
}
break;
}
}
}
return true;
} catch (err) {
if(isStatusError(err) && err.statusCode === 404) {
throw new SimpleError('Reddit returned a 404 while trying to retrieve User profile. It is likely this user is shadowbanned.');
} else {
throw err;
}
}
}
export interface ItemContent {
@@ -504,7 +572,8 @@ export const itemContentPeek = async (item: (Comment | Submission), peekLength =
peek = `${truncatePeek(item.title)} by ${author} https://reddit.com${item.permalink}`;
} else if (item instanceof Comment) {
content = truncatePeek(item.body);
// replace newlines with spaces to make peek more compact
content = truncatePeek(item.body.replaceAll('\n', ' '));
peek = `${truncatePeek(content)} by ${author} in https://reddit.com${item.permalink}`;
}
@@ -606,18 +675,29 @@ export const getAttributionIdentifier = (sub: Submission, useParentMediaDomain =
if (displayDomain === '') {
displayDomain = domain;
}
if(domainIdents.length === 0 && domain !== '') {
domainIdents.push(domain);
}
return {display: displayDomain, domain, aliases: domainIdents, provider, mediaType};
}
export const activityIsRemoved = (item: Submission | Comment): boolean => {
if (item instanceof Submission) {
// when automod filters a post it gets this category
return item.banned_at_utc !== null && item.removed_by_category !== 'automod_filtered';
if(item.can_mod_post) {
if (item instanceof Submission) {
// when automod filters a post it gets this category
return item.banned_at_utc !== null && item.removed_by_category !== 'automod_filtered';
}
// when automod filters a comment item.removed === false
// so if we want to processing filtered comments we need to check for this
return item.banned_at_utc !== null && item.removed;
} else {
if (item instanceof Submission) {
return item.removed_by_category === 'moderator' || item.removed_by_category === 'deleted';
}
// in subreddits the bot does not mod it is not possible to tell the difference between a comment that was removed by the user and one that was removed by a mod
return item.body === '[removed]';
}
// when automod filters a comment item.removed === false
// so if we want to processing filtered comments we need to check for this
return item.banned_at_utc !== null && item.removed;
}
export const activityIsFiltered = (item: Submission | Comment): boolean => {

View File

@@ -8,12 +8,13 @@ import passport from 'passport';
import {Strategy as CustomStrategy} from 'passport-custom';
import {OperatorConfig, BotConnection, LogInfo} from "../../Common/interfaces";
import {
buildCachePrefix,
createCacheManager, filterLogBySubreddit,
formatLogLineToHtml,
intersect, isLogLineMinLevel,
LogEntry, parseFromJsonOrYamlToObject, parseInstanceLogInfoName, parseInstanceLogName,
parseSubredditLogName, permissions,
randomId, sleep
randomId, sleep, triggeredIndicator
} from "../../util";
import {Cache} from "cache-manager";
import session, {Session, SessionData} from "express-session";
@@ -42,6 +43,7 @@ import {booleanMiddle} from "../Common/middleware";
import {BotInstance, CMInstance} from "../interfaces";
import { URL } from "url";
import {MESSAGE} from "triple-beam";
import Autolinker from "autolinker";
const emitter = new EventEmitter();
@@ -119,9 +121,16 @@ const webClient = async (options: OperatorConfig) => {
},
web: {
port,
caching,
caching: {
prefix
},
invites: {
maxAge: invitesMaxAge,
},
session: {
provider,
secret,
maxAge: sessionMaxAge,
},
maxLogs,
clients,
@@ -134,8 +143,6 @@ const webClient = async (options: OperatorConfig) => {
},
} = options;
const connectedUsers: ConnectUserObj = {};
const webOps = operators.map(x => x.toLowerCase());
const logger = getLogger({defaultLabel: 'Web', ...options.logging}, 'Web');
@@ -159,11 +166,15 @@ const webClient = async (options: OperatorConfig) => {
throw new SimpleError(`Specified port for web interface (${port}) is in use or not available. Cannot start web server.`);
}
if (provider.store === 'none') {
logger.warn(`Cannot use 'none' for session store or else no one can use the interface...falling back to 'memory'`);
provider.store = 'memory';
if (caching.store === 'none') {
logger.warn(`Cannot use 'none' for web caching or else no one can use the interface...falling back to 'memory'`);
caching.store = 'memory';
}
//const webCache = createCacheManager(provider) as Cache;
//const webCachePrefix = buildCachePrefix([prefix, 'web']);
const webCache = createCacheManager({...caching, prefix: buildCachePrefix([prefix, 'web'])}) as Cache;
//const previousSessions = await webCache.get
const connectedUsers: ConnectUserObj = {};
//<editor-fold desc=Session and Auth>
/*
@@ -217,9 +228,9 @@ const webClient = async (options: OperatorConfig) => {
const sessionObj = session({
cookie: {
maxAge: provider.ttl,
maxAge: sessionMaxAge * 1000,
},
store: new CacheManagerStore(createCacheManager(provider) as Cache),
store: new CacheManagerStore(webCache, {prefix: 'sess:'}),
resave: false,
saveUninitialized: false,
secret,
@@ -279,7 +290,7 @@ const webClient = async (options: OperatorConfig) => {
return res.render('error', {error: errContent});
}
// @ts-ignore
const invite = invites.get(req.session.inviteId) as inviteData;
const invite = await webCache.get(`invite:${req.session.inviteId}`) as InviteData;
const client = await Snoowrap.fromAuthCode({
userAgent: `web:contextBot:web`,
clientId: invite.clientId,
@@ -290,7 +301,7 @@ const webClient = async (options: OperatorConfig) => {
// @ts-ignore
const user = await client.getMe();
// @ts-ignore
invites.delete(req.session.inviteId);
await webCache.del(`invite:${req.session.inviteId}`);
let data: any = {
accessToken: client.accessToken,
refreshToken: client.refreshToken,
@@ -346,7 +357,7 @@ const webClient = async (options: OperatorConfig) => {
});
let token = randomId();
interface inviteData {
interface InviteData {
permissions: string[],
subreddit?: string,
instance?: string,
@@ -355,7 +366,6 @@ const webClient = async (options: OperatorConfig) => {
redirectUri: string
creator: string
}
const invites: Map<string, inviteData> = new Map();
const helperAuthed = async (req: express.Request, res: express.Response, next: Function) => {
@@ -386,14 +396,14 @@ const webClient = async (options: OperatorConfig) => {
});
});
app.getAsync('/auth/invite', (req, res) => {
app.getAsync('/auth/invite', async (req, res) => {
const {invite: inviteId} = req.query;
if(inviteId === undefined) {
return res.render('error', {error: '`invite` param is missing from URL'});
}
const invite = invites.get(inviteId as string);
if(invite === undefined) {
const invite = await webCache.get(`invite:${inviteId}`) as InviteData | undefined | null;
if(invite === undefined || invite === null) {
return res.render('error', {error: 'Invite with the given id does not exist'});
}
@@ -429,7 +439,7 @@ const webClient = async (options: OperatorConfig) => {
}
const inviteId = code || randomId();
invites.set(inviteId, {
await webCache.set(`invite:${inviteId}`, {
permissions,
clientId: (ci || clientId).trim(),
clientSecret: (ce || clientSecret).trim(),
@@ -437,7 +447,7 @@ const webClient = async (options: OperatorConfig) => {
instance,
subreddit,
creator: (req.user as Express.User).name,
});
}, {ttl: invitesMaxAge * 1000});
return res.send(inviteId);
});
@@ -446,8 +456,8 @@ const webClient = async (options: OperatorConfig) => {
if(inviteId === undefined) {
return res.render('error', {error: '`invite` param is missing from URL'});
}
const invite = invites.get(inviteId as string);
if(invite === undefined) {
const invite = await webCache.get(`invite:${inviteId}`) as InviteData | undefined | null;
if(invite === undefined || invite === null) {
return res.render('error', {error: 'Invite with the given id does not exist'});
}
@@ -643,9 +653,9 @@ const webClient = async (options: OperatorConfig) => {
req.session.level = 'verbose';
req.session.sort = 'descending';
req.session.save();
// @ts-ignore
connectedUsers[req.session.id] = {};
}
// @ts-ignore
connectedUsers[req.session.id] = {};
next();
}
@@ -822,6 +832,73 @@ const webClient = async (options: OperatorConfig) => {
return res.send(resp);
});
app.getAsync('/events', [ensureAuthenticatedApi, defaultSession, instanceWithPermissions, botWithPermissions, createUserToken], async (req: express.Request, res: express.Response) => {
const {subreddit} = req.query as any;
const resp = await got.get(`${(req.instance as CMInstance).normalUrl}/events`, {
headers: {
'Authorization': `Bearer ${req.token}`,
},
searchParams: {
subreddit,
bot: req.bot?.botName
}
}).json() as [any];
return res.render('events', {
data: resp.map((x) => {
const {timestamp, activity: {peek, link}, ruleResults = [], actionResults = [], ...rest} = x;
const time = dayjs(timestamp).local().format('YY-MM-DD HH:mm:ss z');
const formattedPeek = Autolinker.link(peek, {
email: false,
phone: false,
mention: false,
hashtag: false,
stripPrefix: false,
sanitizeHtml: true,
});
const formattedRuleResults = ruleResults.map((y: any) => {
const {triggered, result, ...restY} = y;
let t = triggeredIndicator(false);
if(triggered === null) {
t = 'Skipped';
} else if(triggered === true) {
t = triggeredIndicator(true);
}
return {
...restY,
triggered: t,
result: result || '-'
};
});
const formattedActionResults = actionResults.map((y: any) => {
const {run, runReason, success, result, dryRun, ...restA} = y;
let res = '';
if(!run) {
res = `Not Run - ${runReason === undefined ? '(No Reason)' : runReason}`;
} else {
res = `${triggeredIndicator(success)}${result !== undefined ? ` - ${result}` : ''}`;
}
return {
...restA,
dryRun: dryRun ? ' (DRYRUN)' : '',
result: res
};
});
return {
...rest,
timestamp: time,
activity: {
link,
peek: formattedPeek,
},
ruleResults: formattedRuleResults,
actionResults: formattedActionResults
}
}),
title: `${subreddit !== undefined ? `${subreddit} ` : ''}Actioned Events`
});
});
app.getAsync('/logs/settings/update',[ensureAuthenticated], async (req: express.Request, res: express.Response) => {
const e = req.query;
for (const [setting, val] of Object.entries(req.query)) {

View File

@@ -1,13 +1,10 @@
import {BotStats, BotStatusResponse, SubredditDataResponse} from "./interfaces";
import {ManagerStats, RunningState} from "../../Subreddit/Manager";
import {Invokee, RunState} from "../../Common/interfaces";
import {cacheStats} from "../../util";
import {RunningState} from "../../Subreddit/Manager";
import {Invokee, ManagerStats, RunState} from "../../Common/interfaces";
import {cacheStats, createHistoricalStatsDisplay} from "../../util";
import {createHistoricalDefaults, historicalDefaults} from "../../Common/defaults";
const managerStats: ManagerStats = {
actionsRun: 0,
actionsRunSinceStart: 0,
actionsRunSinceStartTotal: 0,
actionsRunTotal: 0,
cache: {
currentKeyCount: 0,
isShared: false,
@@ -18,22 +15,12 @@ const managerStats: ManagerStats = {
totalRequests: 0,
types: cacheStats()
},
checksRunSinceStartTotal: 0,
checksRunTotal: 0,
checksTriggered: 0,
checksTriggeredSinceStart: 0,
checksTriggeredSinceStartTotal: 0,
checksTriggeredTotal: 0,
historical: {
lastReload: createHistoricalStatsDisplay(createHistoricalDefaults()),
allTime: createHistoricalStatsDisplay(createHistoricalDefaults()),
},
eventsAvg: 0,
eventsCheckedSinceStartTotal: 0,
eventsCheckedTotal: 0,
rulesAvg: 0,
rulesCachedSinceStartTotal: 0,
rulesCachedTotal: 0,
rulesRunSinceStartTotal: 0,
rulesRunTotal: 0,
rulesTriggeredSinceStartTotal: 0,
rulesTriggeredTotal: 0
};
const botStats: BotStats = {
apiAvg: '-',

View File

@@ -1,4 +1,5 @@
import {ManagerStats, RunningState} from "../../Subreddit/Manager";
import {RunningState} from "../../Subreddit/Manager";
import {ManagerStats} from "../../Common/interfaces";
export interface BotStats {
startedAtHuman: string,

View File

@@ -31,3 +31,26 @@ export const botRoute = (required = true) => async (req: Request, res: Response,
}
return next();
}
export const subredditRoute = (required = true) => async (req: Request, res: Response, next: Function) => {
const bot = req.serverBot;
const {subreddit} = req.query as any;
if(subreddit === undefined && required === false) {
next();
} else {
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
if (!isOperator && !realManagers.includes(subreddit)) {
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
}
const manager = bot.subManagers.find(x => x.displayLabel === subreddit);
if (manager === undefined) {
return res.status(400).send('Cannot access route for subreddit you do not manage or is not run by the bot')
}
req.manager = manager;
next();
}
}

View File

@@ -67,7 +67,7 @@ const action = async (req: express.Request, res: express.Response) => {
if (type === 'unmoderated') {
const activities = await manager.subreddit.getUnmoderated({limit: 100});
for (const a of activities.reverse()) {
await manager.queue.push({
await manager.firehose.push({
checkType: a instanceof Submission ? 'Submission' : 'Comment',
activity: a,
});
@@ -75,7 +75,7 @@ const action = async (req: express.Request, res: express.Response) => {
} else {
const activities = await manager.subreddit.getModqueue({limit: 100});
for (const a of activities.reverse()) {
await manager.queue.push({
await manager.firehose.push({
checkType: a instanceof Submission ? 'Submission' : 'Comment',
activity: a,
});

View File

@@ -1,31 +1,51 @@
import {Request, Response} from 'express';
import {authUserCheck, botRoute} from "../../../middleware";
import {authUserCheck, botRoute, subredditRoute} from "../../../middleware";
import Submission from "snoowrap/dist/objects/Submission";
import winston from 'winston';
import {COMMENT_URL_ID, parseLinkIdentifier, SUBMISSION_URL_ID} from "../../../../../util";
import {booleanMiddle} from "../../../../Common/middleware";
import {Manager} from "../../../../../Subreddit/Manager";
import {ActionedEvent} from "../../../../../Common/interfaces";
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);
const config = async (req: Request, res: Response) => {
const bot = req.serverBot;
const {subreddit} = req.query as any;
const {name: userName, realManagers = [], isOperator} = req.user as Express.User;
if (!isOperator && !realManagers.includes(subreddit)) {
return res.status(400).send('Cannot retrieve config for subreddit you do not manage or is not run by the bot')
}
const manager = bot.subManagers.find(x => x.displayLabel === subreddit);
if (manager === undefined) {
return res.status(400).send('Cannot retrieve config for subreddit you do not manage or is not run by the bot')
}
const manager = req.manager as Manager;
// @ts-ignore
const wiki = await manager.subreddit.getWikiPage(manager.wikiLocation).fetch();
return res.send(wiki.content_md);
};
export const configRoute = [authUserCheck(), botRoute(), config];
export const configRoute = [authUserCheck(), botRoute(), subredditRoute(), config];
const actionedEvents = async (req: Request, res: Response) => {
let managers: Manager[] = [];
const manager = req.manager as Manager | undefined;
if(manager !== undefined) {
managers.push(manager);
} else {
for(const manager of req.serverBot.subManagers) {
if((req.user?.realManagers as string[]).includes(manager.displayLabel)) {
managers.push(manager);
}
}
}
let events: ActionedEvent[] = [];
for(const m of managers) {
if(m.resources !== undefined) {
events = events.concat(await m.resources.getActionedEvents());
}
}
events.sort((a, b) => b.timestamp - a.timestamp);
return res.json(events);
};
export const actionedEventsRoute = [authUserCheck(), botRoute(), subredditRoute(false), actionedEvents];
const action = async (req: Request, res: Response) => {
const bot = req.serverBot;

View File

@@ -160,13 +160,19 @@ const status = () => {
submissions: acc.checks.submissions + curr.checks.submissions,
comments: acc.checks.comments + curr.checks.comments,
},
eventsCheckedTotal: acc.eventsCheckedTotal + curr.stats.eventsCheckedTotal,
checksRunTotal: acc.checksRunTotal + curr.stats.checksRunTotal,
checksTriggeredTotal: acc.checksTriggeredTotal + curr.stats.checksTriggeredTotal,
rulesRunTotal: acc.rulesRunTotal + curr.stats.rulesRunTotal,
rulesCachedTotal: acc.rulesCachedTotal + curr.stats.rulesCachedTotal,
rulesTriggeredTotal: acc.rulesTriggeredTotal + curr.stats.rulesTriggeredTotal,
actionsRunTotal: acc.actionsRunTotal + curr.stats.actionsRunTotal,
historical: {
allTime: {
eventsCheckedTotal: acc.historical.allTime.eventsCheckedTotal + curr.stats.historical.allTime.eventsCheckedTotal,
eventsActionedTotal: acc.historical.allTime.eventsActionedTotal + curr.stats.historical.allTime.eventsActionedTotal,
checksRunTotal: acc.historical.allTime.checksRunTotal + curr.stats.historical.allTime.checksRunTotal,
checksFromCacheTotal: acc.historical.allTime.checksFromCacheTotal + curr.stats.historical.allTime.checksFromCacheTotal,
checksTriggeredTotal: acc.historical.allTime.checksTriggeredTotal + curr.stats.historical.allTime.checksTriggeredTotal,
rulesRunTotal: acc.historical.allTime.rulesRunTotal + curr.stats.historical.allTime.rulesRunTotal,
rulesCachedTotal: acc.historical.allTime.rulesCachedTotal + curr.stats.historical.allTime.rulesCachedTotal,
rulesTriggeredTotal: acc.historical.allTime.rulesTriggeredTotal + curr.stats.historical.allTime.rulesTriggeredTotal,
actionsRunTotal: acc.historical.allTime.actionsRunTotal + curr.stats.historical.allTime.actionsRunTotal,
}
},
maxWorkers: acc.maxWorkers + curr.maxWorkers,
subMaxWorkers: acc.subMaxWorkers + curr.subMaxWorkers,
globalMaxWorkers: acc.globalMaxWorkers + curr.globalMaxWorkers,
@@ -178,13 +184,19 @@ const status = () => {
submissions: 0,
comments: 0,
},
eventsCheckedTotal: 0,
checksRunTotal: 0,
checksTriggeredTotal: 0,
rulesRunTotal: 0,
rulesCachedTotal: 0,
rulesTriggeredTotal: 0,
actionsRunTotal: 0,
historical: {
allTime: {
eventsCheckedTotal: 0,
eventsActionedTotal: 0,
checksRunTotal: 0,
checksFromCacheTotal: 0,
checksTriggeredTotal: 0,
rulesRunTotal: 0,
rulesCachedTotal: 0,
rulesTriggeredTotal: 0,
actionsRunTotal: 0,
}
},
maxWorkers: 0,
subMaxWorkers: 0,
globalMaxWorkers: 0,
@@ -221,7 +233,8 @@ const status = () => {
}, cumRaw);
const cacheReq = subManagerData.reduce((acc, curr) => acc + curr.stats.cache.totalRequests, 0);
const cacheMiss = subManagerData.reduce((acc, curr) => acc + curr.stats.cache.totalMiss, 0);
const aManagerWithDefaultResources = bot.subManagers.find(x => x.resources !== undefined && x.resources.cacheSettingsHash === 'default');
const sharedSub = subManagerData.find(x => x.stats.cache.isShared);
const sharedCount = sharedSub !== undefined ? sharedSub.stats.cache.currentKeyCount : 0;
let allManagerData: any = {
name: 'All',
status: bot.running ? 'RUNNING' : 'NOT RUNNING',
@@ -243,7 +256,7 @@ const status = () => {
stats: {
...rest,
cache: {
currentKeyCount: aManagerWithDefaultResources !== undefined ? await aManagerWithDefaultResources.resources.getCacheKeyCount() : 'N/A',
currentKeyCount: sharedCount + subManagerData.reduce((acc, curr) => curr.stats.cache.isShared ? acc : acc + curr.stats.cache.currentKeyCount,0),
isShared: false,
totalRequests: cacheReq,
totalMiss: cacheMiss,

View File

@@ -22,7 +22,7 @@ import SimpleError from "../../Utils/SimpleError";
import {heartbeat} from "./routes/authenticated/applicationRoutes";
import logs from "./routes/authenticated/user/logs";
import status from './routes/authenticated/user/status';
import {actionRoute, configRoute} from "./routes/authenticated/user";
import {actionedEventsRoute, actionRoute, configRoute} from "./routes/authenticated/user";
import action from "./routes/authenticated/user/action";
import {authUserCheck, botRoute} from "./middleware";
import {opStats} from "../Common/util";
@@ -189,6 +189,8 @@ const rcbServer = async function (options: OperatorConfig) {
server.getAsync('/config', ...configRoute);
server.getAsync('/events', ...actionedEventsRoute);
server.getAsync('/action', ...action);
server.getAsync('/check', ...actionRoute);

View File

@@ -4,7 +4,7 @@ a {
.loading {
height: 35px;
fill: black;
fill: white;
display: none;
}
@@ -12,10 +12,6 @@ a {
display: inline;
}
.dark .loading {
fill: white;
}
.sub {
display: none;
}
@@ -91,3 +87,19 @@ a {
pointer-events: initial;
text-decoration: initial;
}
.blue {
color: rgb(97, 175, 239)
}
.red {
color: rgb(255 123 133);
}
.green {
color: rgb(170 255 109);
}
.purple {
color: rgb(224 124 253);
}
.yellow {
color: rgb(253 198 94);
}

View File

@@ -0,0 +1,10 @@
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<symbol id="q" viewBox="0 0 24 24">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
</symbol>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 486 B

File diff suppressed because one or more lines are too long

View File

@@ -1,12 +1,11 @@
<html>
<html lang="en">
<%- include('partials/head', {title: 'CM OAuth Helper'}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<body class="bg-gray-900 text-white font-sans">
<div class="min-w-screen min-h-screen">
<%- include('partials/title', {title: ' OAuth Helper'}) %>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="bg-gray-600">
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Congrats! You did the thing.</div>
<div class="space-y-3">

View File

@@ -1,11 +1,6 @@
<html>
<html lang="en">
<head>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/tailwindcss/2.0.3/tailwind.min.css"
integrity="sha512-wl80ucxCRpLkfaCnbM88y4AxnutbGk327762eM9E/rRTvY/ZGAHWMZrYUq66VQBYMIYDFpDdJAOGSLyIPHZ2IQ=="
crossorigin="anonymous"/>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/tailwindcss/2.0.3/tailwind-dark.min.css"
integrity="sha512-WvyKyiVHgInX5UQt67447ExtRRZG/8GUijaq1MpqTNYp8wY4/EJOG5bI80sRp/5crDy4Z6bBUydZI2OFV3Vbtg=="
crossorigin="anonymous"/>
<link rel="stylesheet" href="/public/tailwind.min.css"/>
<script src="https://code.iconify.design/1/1.0.4/iconify.min.js"></script>
<link rel="stylesheet" href="/public/themeToggle.css">
<link rel="stylesheet" href="/public/app.css">
@@ -15,13 +10,13 @@
<meta name="viewport" content="width=device-width,initial-scale=1.0">
<!--icons from https://heroicons.com -->
</head>
<body style="user-select: none;" class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<body style="user-select: none;" class="bg-gray-900 text-white font-sans">
<div class="min-w-screen min-h-screen bg-gray-800">
<%- include('partials/title') %>
<div class="container mx-auto">
<div class="grid">
<div class="dark:text-white mb-3 pl-2">
<div class="my-3 pl-2">
Schema <a href="/config?schema=subreddit" id="subredditSchemaType">Subreddit</a> / <a href="/config?schema=operator" id="operatorSchemaType">Operator</a> |
<span class="has-tooltip">
<span style="z-index:999; margin-top: 30px;" class='tooltip rounded shadow-lg p-3 bg-gray-100 text-black space-y-2'>
<div>Copy + paste your configuration here to get:</div>
@@ -39,7 +34,6 @@
</ul>
<div>When done editing hit Ctrl+A (Command+A on macOS) to select all text, then copy + paste back into your wiki/file</div>
</span>
<span id="schemaType"></span> |
<span class="cursor-help">
How To Use
<span>
@@ -55,7 +49,7 @@
</span>
</span>
</span>
| <a id="schemaOpen" href="">Open With Operator Schema</a>
| <input id="configUrl" class="text-black placeholder-gray-500 rounded mx-2" style="min-width:400px;" placeholder="URL of a config to load"/> <a href="#" id="loadConfig">Load</a>
<div id="error" class="font-semibold"></div>
</div>
<div style="min-height: 80vh" id="editor"></div>
@@ -63,35 +57,6 @@
</div>
<%- include('partials/footer') %>
</div>
<script>
document.querySelectorAll('.theme').forEach(el => {
el.addEventListener('click', e => {
e.preventDefault();
if (e.target.id === 'dark') {
document.body.classList.add('dark');
localStorage.setItem('ms-dark', 'yes');
} else {
document.body.classList.remove('dark');
localStorage.setItem('ms-dark', 'no');
}
document.querySelectorAll('.theme').forEach(el => {
el.classList.remove('font-bold', 'no-underline', 'pointer-events-none');
});
e.target.classList.add('font-bold', 'no-underline', 'pointer-events-none');
})
})
document.querySelector("#themeToggle").checked = localStorage.getItem('ms-dark') !== 'no';
document.querySelector("#themeToggle").onchange = (e) => {
if (e.target.checked === true) {
document.body.classList.add('dark');
localStorage.setItem('ms-dark', 'yes');
} else {
document.body.classList.remove('dark');
localStorage.setItem('ms-dark', 'no');
}
}
</script>
<script src="/monaco/dev/vs/loader.js"></script>
<script>
require.config({ paths: { vs: 'monaco/dev/vs' } });
@@ -104,22 +69,22 @@
var searchParams = new URLSearchParams(window.location.search);
let schemaType;
let schemaFile;
if(searchParams.get('schema') === 'operator') {
schemaType = 'OperatorConfig.json';
schemaType = 'operator';
schemaFile = 'OperatorConfig.json';
preamble.push('// automatic validation of your OPERATOR configuration');
document.querySelector('#schemaTypeList').innerHTML = 'automatic validation of your OPERATOR configuration (yellow squiggly)';
document.querySelector('#schemaType').innerHTML = 'Operator Configuration';
document.querySelector('#schemaOpen').href = '/config?schema=subreddit';
document.querySelector('#schemaOpen').innerHTML = 'Open with Subreddit Schema';
document.querySelector('#operatorSchemaType').classList.add('font-bold', 'no-underline', 'pointer-events-none');
} else {
schemaType = 'App.json';
schemaType = 'subreddit';
schemaFile = 'App.json';
preamble.push('// automatic validation of your SUBREDDIT configuration');
document.querySelector('#schemaTypeList').innerHTML = 'automatic validation of your SUBREDDIT configuration (yellow squiggly)'
document.querySelector('#schemaType').innerHTML = 'Subreddit Configuration';
document.querySelector('#schemaOpen').href = '/config?schema=operator';
document.querySelector('#subredditSchemaType').classList.add('font-bold', 'no-underline', 'pointer-events-none');
}
const schemaUri = `${document.location.origin}/schemas/${schemaType}`;
const schemaUri = `${document.location.origin}/schemas/${schemaFile}`;
require(['vs/editor/editor.main'], function () {
const modelUri = monaco.Uri.parse("a://b/foo.json");
@@ -135,15 +100,44 @@
schema: schemaData
}]
});
if(searchParams.get('subreddit') !== null) {
fetch(`${document.location.origin}/config/content${document.location.search}`).then((resp) => {
var model = monaco.editor.createModel(preamble.join('\r\n'), "json", modelUri);
document.querySelector('#loadConfig').addEventListener('click', (e) => {
e.preventDefault();
const newUrl = document.querySelector('#configUrl').value;
fetch(newUrl).then((resp) => {
if(!resp.ok) {
resp.text().then(data => {
document.querySelector('#error').innerHTML = `Error occurred while fetching configuration => ${data}`
});
} else {
var sp = new URLSearchParams();
sp.append('schema', schemaType);
sp.append('url', newUrl);
history.pushState(null, '', `${window.location.pathname}?${sp.toString()}`);
resp.text().then(data => {
//model = monaco.editor.createModel(data, "json", modelUri);
model.setValue(data);
})
}
});
});
let dlUrl = searchParams.get('url');
if(dlUrl === null && searchParams.get('subreddit') !== null) {
dlUrl = `${document.location.origin}/config/content${document.location.search}`
}
if(dlUrl !== null) {
document.querySelector('#configUrl').value = dlUrl;
fetch(dlUrl).then((resp) => {
if(!resp.ok) {
resp.text().then(data => {
document.querySelector('#error').innerHTML = `Error occurred while fetching configuration => ${data}`
});
} else {
resp.text().then(data => {
var model = monaco.editor.createModel(data, "json", modelUri);
model.setValue(data);
//model = monaco.editor.createModel(data, "json", modelUri);
var editor = monaco.editor.create(document.getElementById('editor'), {
model,
theme: 'vs-dark',
@@ -154,9 +148,8 @@
editor;
})
}
})
});
} else {
var model = monaco.editor.createModel(preamble.join('\r\n'), "json", modelUri);
var editor = monaco.editor.create(document.getElementById('editor'), {
model,
theme: 'vs-dark',

View File

@@ -1,12 +1,11 @@
<html>
<html lang="en">
<%- include('partials/head', {title: 'CM'}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<body class="bg-gray-900 text-white font-sans">
<div class="min-w-screen min-h-screen">
<%- include('partials/title', {title: 'Error'}) %>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="bg-gray-600">
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Oops 😬</div>
<div class="space-y-3">

View File

@@ -0,0 +1,66 @@
<html lang="en">
<head>
<link rel="stylesheet" href="/public/tailwind.min.css"/>
<script src="https://code.iconify.design/1/1.0.4/iconify.min.js"></script>
<link rel="stylesheet" href="/public/themeToggle.css">
<link rel="stylesheet" href="/public/app.css">
<title><%= title %></title>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width,initial-scale=1.0">
<!--icons from https://heroicons.com -->
<style>
.peek a {
display: none;
}
</style>
</head>
<body class="bg-gray-900 text-white font-sans">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen">
<%- include('partials/title') %>
<div class="container mx-auto">
<div class="grid">
<div class="px-3 py-6 space-y-3">
<% if(data.length === 0) { %>
No events have been actioned yet!
<% } %>
<% data.forEach(function (eRes){ %>
<div class="shadow-lg">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-700">
<div class="flex items-center justify-between">
<div>
<span class="peek"><%- eRes.activity.peek %></span><a target="_blank" href="https://reddit.com<%= eRes.activity.link%>">(Link)</a>
</div>
<div class="flex items-center flex-end">
<%= eRes.subreddit %> @ <%= eRes.timestamp %>
</div>
</div>
</div>
<div class="p-4 pl-6 pt-3 space-y-2 bg-gray-500">
<div><span class="font-semibold">Check:</span> <%= eRes.check %><span class="px-3">&#10132;</span><%= eRes.ruleSummary %></div>
<div>
<span class="font-semibold">Rules:</span>
<ul class="list-inside list-disc">
<% eRes.ruleResults.forEach(function (ruleResult) { %>
<li><%= ruleResult.name %> - <%= ruleResult.triggered%> - <%= ruleResult.result %></li>
<% }) %>
</ul>
</div>
<div><span class="font-semibold">Actions</span>
<ul class="list-inside list-disc">
<% eRes.actionResults.forEach(function (aRes) { %>
<li><%= aRes.name %><%= aRes.dryRun %> - <%= aRes.result %></li>
<% }) %>
</ul>
</div>
</div>
</div>
<% }) %>
</div>
</div>
</div>
<%- include('partials/footer') %>
</div>
</body>
</html>

View File

@@ -1,12 +1,11 @@
<html>
<html lang="en">
<%- include('partials/head', {title: 'CM OAuth Helper'}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<body class="bg-gray-900 text-white font-sans">
<div class="min-w-screen min-h-screen">
<%- include('partials/title', {title: ' OAuth Helper'}) %>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="bg-gray-600">
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Hi! Looks like you're setting up your bot. To get running:</div>
<div class="text-lg text-semibold my-3">1. Set your redirect URL</div>

View File

@@ -1,12 +1,11 @@
<html>
<html lang="en">
<%- include('partials/head', {title: 'CM OAuth Helper'}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<body class="bg-gray-900 text-white">
<div class="min-w-screen min-h-screen font-sans">
<%- include('partials/title', {title: ' OAuth Helper'}) %>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="bg-gray-600">
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Hi! Looks like you're accepting an invite to authorize an account to run on this ContextMod instance:</div>
<div class="text-lg text-semibold my-3">1. Review permissions</div>

View File

@@ -1,12 +1,11 @@
<html>
<html lang="en">
<%- include('partials/head', {title: 'Access Denied'}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<body class="bg-gray-900 text-white font-sans">
<div class="min-w-screen min-h-screen">
<%- include('partials/title', {title: ''}) %>
<div class="container mx-auto">
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="bg-gray-600">
<div class="p-6 md:px-10 md:py-6">
<div class="text-xl mb-4">Sorry!</div>
<div class="space-y-3">
@@ -28,6 +27,5 @@
</div>
<%- include('partials/footer') %>
</div>
<%- include('partials/themeJs') %>
</body>
</html>

View File

@@ -1,14 +1,13 @@
<html>
<html lang="en">
<%- include('partials/head', {title: undefined}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<body class="bg-gray-900 text-white font-sans">
<div class="min-w-screen min-h-screen">
<%- include('partials/header') %>
<%- include('partials/botsTab') %>
<div class="container mx-auto">
<%- include('partials/subredditsTab') %>
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="bg-gray-700">
<div class="pb-6 md:px-7">
<div class="sub active" data-subreddit="All" data-bot="All">
Instance is currently <b>OFFLINE</b>
@@ -31,7 +30,6 @@
</div>
<%- include('partials/footer') %>
<%- include('partials/instanceTabJs') %>
<%- include('partials/themeJs') %>
<%- include('partials/logSettingsJs') %>
<script src="https://cdn.socket.io/4.1.2/socket.io.min.js" integrity="sha384-toS6mmwu70G0fw54EGlWWeA4z3dyJ+dlXBtSURSKN4vyRFOcxd3Bzjj/AoOwY+Rg" crossorigin="anonymous"></script>
<script>

View File

@@ -1,9 +1,9 @@
<div class="space-x-4 py-1 md:px-10 leading-6 font-semibold bg-gray-500 dark:bg-gray-700 text-white">
<div class="space-x-4 py-1 md:px-10 leading-6 font-semibold bg-gray-700">
<div class="container mx-auto">
<% if(locals.bots !== undefined) { %>
<ul id="botTabs" class="inline-flex flex-wrap">
<% bots.forEach(function (data){ %>
<li class="my-3 px-3 dark:text-white">
<li class="my-3 px-3">
<span data-bot="<%= data.system.name %>" class="rounded-md py-2 px-3 tabSelectWrapper">
<a class="tabSelect font-normal pointer hover:font-bold"
data-bot="<%= data.system.name %>">
@@ -16,7 +16,7 @@
</li>
<% }) %>
<% if(locals.isOperator === true && locals.instanceId !== undefined) { %>
<li class="my-3 px-3 dark:text-white">
<li class="my-3 px-3">
<span class="rounded-md py-2 px-3 border">
<a class="font-normal pointer hover:font-bold" href="/auth/helper">
Add Bot +

View File

@@ -1,4 +1,4 @@
<div class="py-3 flex items-center justify-around font-semibold text-white">
<div class="py-3 flex items-center justify-around font-semibold">
<div>
<a href="https://github.com/FoxxMD/context-mod">ContextMod Web</a> created by /u/FoxxMD
</div>

View File

@@ -1,10 +1,5 @@
<head>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/tailwindcss/2.0.3/tailwind.min.css"
integrity="sha512-wl80ucxCRpLkfaCnbM88y4AxnutbGk327762eM9E/rRTvY/ZGAHWMZrYUq66VQBYMIYDFpDdJAOGSLyIPHZ2IQ=="
crossorigin="anonymous"/>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/tailwindcss/2.0.3/tailwind-dark.min.css"
integrity="sha512-WvyKyiVHgInX5UQt67447ExtRRZG/8GUijaq1MpqTNYp8wY4/EJOG5bI80sRp/5crDy4Z6bBUydZI2OFV3Vbtg=="
crossorigin="anonymous"/>
<link rel="stylesheet" href="/public/tailwind.min.css"/>
<script src="https://code.iconify.design/1/1.0.4/iconify.min.js"></script>
<link rel="stylesheet" href="/public/themeToggle.css">
<link rel="stylesheet" href="/public/app.css">

View File

@@ -1,11 +1,11 @@
<div class="space-x-4 p-6 md:px-10 md:py-6 leading-6 font-semibold bg-gray-800 text-white">
<div class="space-x-4 p-6 md:px-10 md:py-6 leading-6 font-semibold bg-gray-500 text-white">
<div class="container mx-auto">
<div class="flex items-center justify-between">
<div class="flex items-center flex-grow pr-4">
<% if(locals.instances !== undefined) { %>
<ul class="inline-flex flex-wrap">
<% instances.forEach(function (data) { %>
<li class="my-3 px-3 dark:text-white">
<li class="my-3 px-3">
<span data-instance="<%= data.friendly %>" class="has-tooltip instanceSelectWrapper rounded-md py-3 px-3">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black' style="margin-top:3em">
<div class="stats">
@@ -34,20 +34,6 @@
<% } %>
</div>
<div class="flex items-center flex-end text-sm">
<span class="inline-block mr-4">
<label style="font-size:2.5px;">
<input class='toggle-checkbox' type='checkbox' id="themeToggle" checked></input>
<div class='toggle-slot'>
<div class='sun-icon-wrapper'>
<div class="iconify sun-icon" data-icon="feather-sun" data-inline="false"></div>
</div>
<div class='toggle-button'></div>
<div class='moon-icon-wrapper'>
<div class="iconify moon-icon" data-icon="feather-moon" data-inline="false"></div>
</div>
</div>
</label>
</span>
<a href="logout">Logout</a>
</div>
</div>

View File

@@ -1,10 +1,10 @@
<div class="space-x-4 pt-2 md:px-5 leading-6 font-semibold bg-gray-white dark:bg-gray-500 text-white">
<div class="space-x-4 pt-2 md:px-5 leading-6 font-semibold bg-gray-800">
<div class="container mx-auto">
<% if(locals.bots !== undefined) { %>
<% bots.forEach(function (botData){ %>
<ul data-bot="<%= botData.system.name %>" class="inline-flex flex-wrap subreddit nestedTabs">
<% botData.subreddits.forEach(function (data){ %>
<li class="my-3 pr-3 dark:text-white">
<li class="my-3 pr-3">
<span data-subreddit="<%= data.name %>" class="rounded-md py-2 px-3 tabSelectWrapper">
<a class="tabSelect font-normal pointer hover:font-bold"
data-subreddit="<%= data.name %>">

View File

@@ -1,32 +0,0 @@
<script>
document.querySelectorAll('.theme').forEach(el => {
el.addEventListener('click', e => {
e.preventDefault();
if (e.target.id === 'dark') {
document.body.classList.add('dark');
localStorage.setItem('ms-dark', 'yes');
} else {
document.body.classList.remove('dark');
localStorage.setItem('ms-dark', 'no');
}
document.querySelectorAll('.theme').forEach(el => {
el.classList.remove('font-bold', 'no-underline', 'pointer-events-none');
});
e.target.classList.add('font-bold', 'no-underline', 'pointer-events-none');
})
});
const themeToggle = document.querySelector("#themeToggle");
if(themeToggle !== null) {
themeToggle.checked = localStorage.getItem('ms-dark') !== 'no';
themeToggle.onchange = (e) => {
if (e.target.checked === true) {
document.body.classList.add('dark');
localStorage.setItem('ms-dark', 'yes');
} else {
document.body.classList.remove('dark');
localStorage.setItem('ms-dark', 'no');
}
}
}
</script>

View File

@@ -1,4 +1,4 @@
<div class="space-x-4 p-6 md:px-10 md:py-6 leading-6 font-semibold bg-gray-800 text-white">
<div class="space-x-4 p-6 md:px-10 md:py-6 leading-6 font-semibold bg-gray-500">
<div class="container mx-auto">
<div class="flex items-center justify-between">
<div class="flex items-center flex-grow pr-4">
@@ -7,20 +7,7 @@
<% } %>
</div>
<div class="flex items-center flex-end text-sm">
<span class="inline-block mr-4">
<label style="font-size:2.5px;">
<input class='toggle-checkbox' type='checkbox' id="themeToggle" checked></input>
<div class='toggle-slot'>
<div class='sun-icon-wrapper'>
<div class="iconify sun-icon" data-icon="feather-sun" data-inline="false"></div>
</div>
<div class='toggle-button'></div>
<div class='moon-icon-wrapper'>
<div class="iconify moon-icon" data-icon="feather-moon" data-inline="false"></div>
</div>
</div>
</label>
</span>
<a href="logout">Logout</a>
</div>
</div>

View File

@@ -1,21 +1,20 @@
<html>
<html lang="en">
<%- include('partials/head', {title: undefined}) %>
<body class="">
<script>localStorage.getItem('ms-dark') === 'no' ? document.body.classList.remove('dark') : document.body.classList.add('dark')</script>
<div class="min-w-screen min-h-screen bg-gray-100 bg-gray-100 dark:bg-gray-800 font-sans">
<body class="bg-gray-900 text-white">
<div class="min-w-screen min-h-screen font-sans">
<%- include('partials/header') %>
<%- include('partials/botsTab') %>
<%- include('partials/subredditsTab') %>
<div class="container mx-auto">
<%- include('partials/subredditsTab') %>
<div class="grid">
<div class="bg-white dark:bg-gray-500 dark:text-white">
<div class="">
<div class="pb-6 md:px-7">
<% bots.forEach(function (bot){ %>
<% bot.subreddits.forEach(function (data){ %>
<div class="sub <%= bot.system.running ? '' : 'offline' %>" data-subreddit="<%= data.name %>" data-bot="<%= bot.system.name %>">
<div class="grid grid-cols-1 lg:grid-cols-2 xl:grid-cols-3 2xl:grid-cols-3 gap-5">
<div class="bg-white shadow-md rounded my-3 dark:bg-gray-500 dark:text-white">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 dark:bg-gray-700 dark:text-white">
<div class="bg-white shadow-md rounded my-3 bg-gray-600 ">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 bg-gray-700 ">
<div class="flex items-center justify-between">
<h4>Overview</h4>
<% if (data.name === 'All') { %>
@@ -63,10 +62,7 @@
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
<use xlink:href="public/questionsymbol.svg#q" />
</svg>
</span>
</span>
@@ -99,10 +95,7 @@
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
<use xlink:href="public/questionsymbol.svg#q" />
</svg>
</span>
</span>
@@ -136,10 +129,7 @@
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
<use xlink:href="public/questionsymbol.svg#q" />
</svg>
</span>
</span>
@@ -209,11 +199,8 @@
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
</svg>
<use xlink:href="public/questionsymbol.svg#q" />
</svg>
</span>
</span>
</label>
@@ -235,8 +222,8 @@
</div>
</div>
<% if (data.name === 'All') { %>
<div class="bg-white shadow-md rounded my-3 dark:bg-gray-500 dark:text-white">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 dark:bg-gray-700 dark:text-white">
<div class="bg-white shadow-md rounded my-3 bg-gray-600 ">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 bg-gray-700 ">
<h4>API</h4>
</div>
<div class="p-4">
@@ -250,9 +237,40 @@
<label>Api Usage</label>
<span><span id="apiLimit"><%= data.apiLimit %></span>/600 (~<span
id="apiAvg"><%= data.apiAvg %></span>req/s)</span>
<label>Depleted</label>
<label>
<span class="has-tooltip">
<span style="margin-top:55px" class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black space-y-3 p-2 text-left'>
<p>Reddit restricts api usage, per account, to <b>600 requests every 10 minutes.</b></p>
<p><b>Depleted</b> is the estimated time until all 600 requests are used based on <b>Api Usage.</b></p>
<p>API usage is sustainable when <b>Depleted</b> is greater than <b>Limit Reset</b></p>
</span>
<span>
Depleted <svg xmlns="http://www.w3.org/2000/svg"
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<use xlink:href="public/questionsymbol.svg#q" />
</svg>
</span>
</span>
</label>
<span>in ~<span id="apiDepletion"><%= data.apiDepletion %></span></span>
<label>Limit Reset</label>
<label>
<span class="has-tooltip">
<span style="margin-top:35px" class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black space-y-3 p-2 text-left'>
<p>Reddit restricts api usage, per account, to <b>600 requests every 10 minutes.</b></p>
<p><b>Limit Reset</b> is the amount of time remaining in the current 10 minute period until the limit is reset to 600.</p>
</span>
<span>
Limit Reset <svg xmlns="http://www.w3.org/2000/svg"
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<use xlink:href="public/questionsymbol.svg#q" />
</svg>
</span>
</span>
</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span id="limitReset"><%= data.limitReset %></span>
@@ -264,8 +282,8 @@
</div>
<% } %>
<% if (data.name !== 'All') { %>
<div class="bg-white shadow-md rounded my-3 dark:bg-gray-500 dark:text-white">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 dark:bg-gray-700 dark:text-white">
<div class="bg-white shadow-md rounded my-3 bg-gray-600 ">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-700 ">
<h4>Config
<span>
<span class="has-tooltip">
@@ -316,8 +334,8 @@
</div>
</div>
<% } %>
<div class="bg-white shadow-md rounded my-3 dark:bg-gray-500 dark:text-white">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-300 dark:bg-gray-700 dark:text-white">
<div class="bg-white shadow-md rounded my-3 bg-gray-600 ">
<div class="space-x-4 px-4 p-2 leading-2 font-semibold bg-gray-700 ">
<h4>Usage</h4>
</div>
<div class="p-4">
@@ -333,54 +351,63 @@
<% } %>
<% if (data.name !== 'All') { %>
<div data-subreddit="<%= data.name %>"
class="stats botStats reloadStats">
class="stats botStats reloadStats mb-2">
<label>Events</label>
<span><%= data.stats.eventsCheckedSinceStartTotal === undefined ? '-' : data.stats.eventsCheckedSinceStartTotal %></span>
<span>
<%= data.stats.historical.lastReload.eventsCheckedTotal === undefined ? '-' : data.stats.historical.lastReload.eventsCheckedTotal %>
</span>
<label>Checks</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.checksTriggeredSinceStartTotal %></span> Triggered / <span><%= data.stats.checksRunSinceStartTotal %></span> Run
<span><%= data.stats.historical.lastReload.checksTriggeredTotal %></span> Triggered / <span><%= data.stats.historical.lastReload.checksRunTotal %></span> Run / <span><%= data.stats.historical.lastReload.checksFromCacheTotal %></span> Cached
</span>
<span><%= data.stats.checksTriggeredSinceStartTotal %></span> T / <span><%= data.stats.checksRunSinceStartTotal %></span> R
<span class="cursor-help underline" style="text-decoration-style: dotted"><%= data.stats.historical.lastReload.checksTriggeredTotal %> Triggered</span>
</span>
<label>Rules</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.rulesTriggeredSinceStartTotal %></span> Triggered / <span><%= data.stats.rulesCachedSinceStartTotal %></span> Cached / <span><%= data.stats.rulesRunSinceStartTotal %></span> Run
<span><%= data.stats.historical.lastReload.rulesTriggeredTotal %></span> Triggered / <span><%= data.stats.historical.lastReload.rulesCachedTotal %></span> Cached / <span><%= data.stats.historical.lastReload.rulesRunTotal %></span> Run
</span>
<span class="cursor-help">
<span><%= data.stats.rulesTriggeredSinceStartTotal %></span> T / <span><%= data.stats.rulesCachedSinceStartTotal %></span> C / <span><%= data.stats.rulesRunSinceStartTotal %></span> R</span>
<span class="cursor-help cursor-help underline" style="text-decoration-style: dotted">
<span><%= data.stats.historical.lastReload.rulesTriggeredTotal %></span> Triggered</span>
</span>
<label>Actions</label>
<span class="cursor-help"><%= data.stats.actionsRunSinceStartTotal === undefined ? '-' : data.stats.actionsRunSinceStartTotal %></span>
<span><%= data.stats.historical.lastReload.actionsRunTotal === undefined ? '0' : data.stats.historical.lastReload.actionsRunTotal %> Run</span>
</div>
<% } %>
<div data-subreddit="<%= data.name %>" class="stats botStats allStats">
<div data-subreddit="<%= data.name %>" class="stats botStats allStats mb-2">
<label>Events</label>
<span><%= data.stats.eventsCheckedTotal %></span>
<span>
<%= data.stats.historical.allTime.eventsCheckedTotal %>
</span>
<label>Checks</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.checksTriggeredTotal %></span> Triggered / <span><%= data.stats.checksRunTotal %></span> Run
<span><%= data.stats.historical.allTime.checksTriggeredTotal %></span> Triggered / <span><%= data.stats.historical.allTime.checksRunTotal %></span> Run / <span><%= data.stats.historical.allTime.checksFromCacheTotal %></span> Cached
</span>
<span class="cursor-help"><span><%= data.stats.checksTriggeredTotal %></span> T / <span><%= data.stats.checksRunTotal %></span> R</span>
<span class="cursor-help underline" style="text-decoration-style: dotted"><%= data.stats.historical.allTime.checksTriggeredTotal %> Triggered</span>
</span>
<label>Rules</label>
<span class="has-tooltip">
<span class='tooltip rounded shadow-lg p-1 bg-gray-100 text-black -mt-2'>
<span><%= data.stats.rulesTriggeredTotal %></span> Triggered / <span><%= data.stats.rulesCachedTotal %></span> Cached / <span><%= data.stats.rulesRunTotal %></span> Run
<span><%= data.stats.historical.allTime.rulesTriggeredTotal %></span> Triggered / <span><%= data.stats.historical.allTime.rulesCachedTotal %></span> Cached / <span><%= data.stats.historical.allTime.rulesRunTotal %></span> Run
</span>
<span class="cursor-help"><span><%= data.stats.rulesTriggeredTotal %></span> T / <span><%= data.stats.rulesCachedTotal %></span> C / <span><%= data.stats.rulesRunTotal %></span> R</span>
<span class="cursor-help underline" style="text-decoration-style: dotted"><span><%= data.stats.historical.allTime.rulesTriggeredTotal %></span> Triggered</span>
</span>
<label>Actions</label>
<span><%= data.stats.actionsRunTotal %> Run</span>
<span>
<span><%= data.stats.historical.allTime.actionsRunTotal === undefined ? '0' : data.stats.historical.allTime.actionsRunTotal %> Run</span>
</span>
</div>
<% if (data.name !== 'All') { %>
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>&subreddit=<%= data.name %>" style="text-decoration-style: dotted">Actioned Events</a>
<% } else { %>
<a target="_blank" href="/events?instance=<%= instanceId %>&bot=<%= bot.system.name %>">Actioned Events</a>
<% } %>
</div>
<div>
<div class="text-left pb-2">
@@ -431,10 +458,7 @@
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
<use xlink:href="public/questionsymbol.svg#q" />
</svg>
</span>
</span>
@@ -485,6 +509,16 @@
<span>
</span>
<label class="newRow">Subreddit Criteria</label>
<span class="newRow"><%= data.stats.cache.types.subredditCrit.requests %> | <%= data.stats.cache.types.subredditCrit.miss %> (<%= data.stats.cache.types.subredditCrit.missPercent %>) miss</span>
<span class="newRow">
- <span class="font-mono">SubredditState</span> results
</span>
<label>Avgs</label>
<span>Hits/Key <%= data.stats.cache.types.subredditCrit.identifierAverageHit %> | Hit Interval <%= data.stats.cache.types.subredditCrit.averageTimeBetweenHits %>s</span>
<span>
</span>
<label class="newRow">Comment Check</label>
<span class="newRow"><%= data.stats.cache.types.commentCheck.requests %> | <%= data.stats.cache.types.commentCheck.miss %> (<%= data.stats.cache.types.commentCheck.missPercent %>) miss</span>
<span class="newRow">
@@ -511,6 +545,14 @@
<span>
</span>
<label class="newRow">Subreddits</label>
<span class="newRow"><%= data.stats.cache.types.subreddit.requests %> | <%= data.stats.cache.types.subreddit.miss %> (<%= data.stats.cache.types.subreddit.missPercent %>) miss</span>
<span class="newRow"></span>
<label>Avgs</label>
<span>Hits/Key <%= data.stats.cache.types.subreddit.identifierAverageHit %> | Hit Interval <%= data.stats.cache.types.subreddit.averageTimeBetweenHits %>s</span>
<span>
</span>
<label class="newRow">Content</label>
<span class="newRow"><%= data.stats.cache.types.content.requests %> | <%= data.stats.cache.types.content.miss %> (<%= data.stats.cache.types.content.missPercent %>) miss</span>
<span class="newRow">
@@ -553,10 +595,7 @@
class="h-4 w-4 inline-block cursor-help"
fill="none"
viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
<use xlink:href="public/questionsymbol.svg#q" />
</svg>
</span>
</span>
@@ -601,7 +640,6 @@
<%- include('partials/footer') %>
</div>
<%- include('partials/instanceTabJs') %>
<%- include('partials/themeJs') %>
<%- include('partials/logSettingsJs') %>
<script>
window.sort = 'desc';
@@ -632,7 +670,7 @@
const url = urlInput.value;
const dryRun = dryRunCheck.checked ? 1 : 0;
const fetchUrl = `/api/check?instanceId=<%= instanceId %>%bot=${bot}&url=${url}&dryRun=${dryRun}&subreddit=${subreddit}`;
const fetchUrl = `/api/check?instance=<%= instanceId %>&bot=${bot}&url=${url}&dryRun=${dryRun}&subreddit=${subreddit}`;
fetch(fetchUrl);
urlInput.value = '';

View File

@@ -1,6 +1,7 @@
import {App} from "../../../App";
import Bot from "../../../Bot";
import {BotInstance, CMInstance} from "../../interfaces";
import {Manager} from "../../../Subreddit/Manager";
declare global {
declare namespace Express {
@@ -10,6 +11,7 @@ declare global {
instance?: CMInstance,
bot?: BotInstance,
serverBot: Bot,
manager?: Manager,
}
interface User {
name: string

View File

@@ -2,6 +2,8 @@ import winston from 'winston';
import 'winston-daily-rotate-file';
import dayjs from 'dayjs';
import utc from 'dayjs/plugin/utc.js';
import advancedFormat from 'dayjs/plugin/advancedFormat';
import tz from 'dayjs/plugin/timezone';
import dduration from 'dayjs/plugin/duration.js';
import relTime from 'dayjs/plugin/relativeTime.js';
import sameafter from 'dayjs/plugin/isSameOrAfter.js';
@@ -20,7 +22,7 @@ import {App} from "./App";
import apiServer from './Web/Server/server';
import clientServer from './Web/Client';
import Submission from "snoowrap/dist/objects/Submission";
import {COMMENT_URL_ID, parseLinkIdentifier, SUBMISSION_URL_ID} from "./util";
import {COMMENT_URL_ID, isScopeError, parseLinkIdentifier, SUBMISSION_URL_ID} from "./util";
import LoggedError from "./Utils/LoggedError";
import {buildOperatorConfigWithDefaults, parseOperatorConfigFromSources} from "./ConfigBuilder";
import {getLogger} from "./Utils/loggerFactory";
@@ -31,6 +33,8 @@ dayjs.extend(dduration);
dayjs.extend(relTime);
dayjs.extend(sameafter);
dayjs.extend(samebefore);
dayjs.extend(tz);
dayjs.extend(advancedFormat);
const commentReg = parseLinkIdentifier([COMMENT_URL_ID]);
const submissionReg = parseLinkIdentifier([SUBMISSION_URL_ID]);
@@ -182,7 +186,7 @@ const program = new Command();
for(const manager of b.subManagers) {
const activities = await manager.subreddit.getUnmoderated();
for (const a of activities.reverse()) {
manager.queue.push({
manager.firehose.push({
checkType: a instanceof Submission ? 'Submission' : 'Comment',
activity: a,
options: {checkNames: checks}
@@ -197,11 +201,8 @@ const program = new Command();
} catch (err) {
if (!err.logged && !(err instanceof LoggedError)) {
const logger = winston.loggers.get('app');
if (err.name === 'StatusCodeError' && err.response !== undefined) {
const authHeader = err.response.headers['www-authenticate'];
if (authHeader !== undefined && authHeader.includes('insufficient_scope')) {
logger.error('Reddit responded with a 403 insufficient_scope, did you choose the correct scopes?');
}
if(isScopeError(err)) {
logger.error('Reddit responded with a 403 insufficient_scope which means the bot is lacking necessary OAUTH scopes to perform general actions.');
}
logger.error(err);
}

View File

@@ -1,7 +1,7 @@
import winston, {Logger} from "winston";
import jsonStringify from 'safe-stable-stringify';
import dayjs, {Dayjs, OpUnitType} from 'dayjs';
import {isRuleSetResult, RulePremise, RuleResult, RuleSetResult} from "./Rule";
import {FormattedRuleResult, isRuleSetResult, RulePremise, RuleResult, RuleSetResult} from "./Rule";
import deepEqual from "fast-deep-equal";
import {Duration} from 'dayjs/plugin/duration.js';
import Ajv from "ajv";
@@ -9,12 +9,30 @@ import {InvalidOptionArgumentError} from "commander";
import Submission from "snoowrap/dist/objects/Submission";
import {Comment} from "snoowrap";
import {inflateSync, deflateSync} from "zlib";
import pixelmatch from 'pixelmatch';
import os from 'os';
import {
ActivityWindowCriteria, CacheOptions, CacheProvider,
ActivityWindowCriteria,
CacheOptions,
CacheProvider,
DurationComparison,
GenericComparison, LogInfo, NamedGroup,
PollingOptionsStrong, RegExResult, ResourceStats,
StringOperator
GenericComparison,
HistoricalStats,
HistoricalStatsDisplay, ImageComparisonResult,
//ImageData,
ImageDetection,
//ImageDownloadOptions,
LogInfo,
NamedGroup,
PollingOptionsStrong,
RedditEntity,
RedditEntityType,
RegExResult,
ResourceStats,
StatusCodeError,
StringOperator,
StrongSubredditState,
SubredditState
} from "./Common/interfaces";
import JSON5 from "json5";
import yaml, {JSON_SCHEMA} from "js-yaml";
@@ -28,6 +46,25 @@ import crypto from "crypto";
import Autolinker from 'autolinker';
import {create as createMemoryStore} from './Utils/memoryStore';
import {MESSAGE} from "triple-beam";
import {RedditUser} from "snoowrap/dist/objects";
import reRegExp from '@stdlib/regexp-regexp';
import fetch, {Response} from "node-fetch";
import { URL } from "url";
import ImageData from "./Common/ImageData";
import {Sharp, SharpOptions} from "sharp";
// @ts-ignore
import {blockhashData, hammingDistance} from 'blockhash';
//import {ResembleSingleCallbackComparisonResult} from "resemblejs";
// want to guess how many concurrent image comparisons we should be doing
// assuming, conservatively and based on real-world results, that comparing 30 images takes about ~30MB memory...
// and we really want to use less than a fourth of available ram (should be low-footprint!)...
// and base-line operation of RCB is usually around 40MB (liberal)
const availMemory = (os.freemem() / (1024 * 1024)) / 4 + 40;
export const imageCompareMaxConcurrencyGuess = Math.min(3, Math.max(Math.floor(availMemory/30), 1));
const ReReg = reRegExp();
const {format} = winston;
const {combine, printf, timestamp, label, splat, errors} = format;
@@ -87,6 +124,7 @@ export const defaultFormat = (defaultLabel = 'App') => printf(({
leaf,
itemId,
timestamp,
durationMs,
// @ts-ignore
[SPLAT]: splatObj,
stack,
@@ -116,7 +154,7 @@ export const defaultFormat = (defaultLabel = 'App') => printf(({
}
const labelContent = `${nodes.map((x: string) => `[${x}]`).join(' ')}`;
return `${timestamp} ${level.padEnd(7)}: ${instance !== undefined ? `|${instance}| ` : ''}${bot !== undefined ? `~${bot}~ ` : ''}${subreddit !== undefined ? `{${subreddit}} ` : ''}${labelContent} ${msg}${stringifyValue !== '' ? ` ${stringifyValue}` : ''}${stackMsg}`;
return `${timestamp} ${level.padEnd(7)}: ${instance !== undefined ? `|${instance}| ` : ''}${bot !== undefined ? `~${bot}~ ` : ''}${subreddit !== undefined ? `{${subreddit}} ` : ''}${labelContent} ${msg}${durationMs !== undefined ? ` Elapsed: ${durationMs}ms (${formatNumber(durationMs/1000)}s) ` : ''}${stringifyValue !== '' ? ` ${stringifyValue}` : ''}${stackMsg}`;
});
@@ -569,6 +607,24 @@ export const parseSubredditName = (val:string): string => {
return matches[1] as string;
}
export const REDDIT_ENTITY_REGEX: RegExp = /^\s*(?<entityType>\/[ru]\/|[ru]\/)*(?<name>\w+)*\s*$/;
export const REDDIT_ENTITY_REGEX_URL = 'https://regexr.com/65r9b';
export const parseRedditEntity = (val:string): RedditEntity => {
const matches = val.match(REDDIT_ENTITY_REGEX);
if (matches === null) {
throw new InvalidRegexError(REDDIT_ENTITY_REGEX, val, REDDIT_ENTITY_REGEX_URL)
}
const groups = matches.groups as any;
let eType: RedditEntityType = 'user';
if(groups.entityType !== undefined && typeof groups.entityType === 'string' && groups.entityType.includes('r')) {
eType = 'subreddit';
}
return {
name: groups.name,
type: eType,
}
}
const WIKI_REGEX: RegExp = /^\s*wiki:(?<url>[^|]+)\|*(?<subreddit>[^\s]*)\s*$/;
const WIKI_REGEX_URL = 'https://regexr.com/61bq1';
const URL_REGEX: RegExp = /^\s*url:(?<url>[^\s]+)\s*$/;
@@ -700,11 +756,11 @@ export const formatLogLineToHtml = (log: string | LogInfo) => {
stripPrefix: false,
sanitizeHtml: true,
})
.replace(/(\s*debug\s*):/i, '<span class="debug text-pink-400">$1</span>:')
.replace(/(\s*warn\s*):/i, '<span class="warn text-yellow-400">$1</span>:')
.replace(/(\s*info\s*):/i, '<span class="info text-blue-300">$1</span>:')
.replace(/(\s*error\s*):/i, '<span class="error text-red-400">$1</span>:')
.replace(/(\s*verbose\s*):/i, '<span class="error text-purple-400">$1</span>:')
.replace(/(\s*debug\s*):/i, '<span class="debug blue">$1</span>:')
.replace(/(\s*warn\s*):/i, '<span class="warn yellow">$1</span>:')
.replace(/(\s*info\s*):/i, '<span class="info green">$1</span>:')
.replace(/(\s*error\s*):/i, '<span class="error red">$1</span>:')
.replace(/(\s*verbose\s*):/i, '<span class="error purple">$1</span>:')
.replaceAll('\n', '<br />');
//.replace(HYPERLINK_REGEX, '<a target="_blank" href="$&">$&</a>');
return `<div class="logLine">${logContent}</div>`
@@ -819,17 +875,27 @@ export const boolToString = (val: boolean): string => {
return val ? 'Yes' : 'No';
}
export const isRedditMedia = (act: Submission): boolean => {
return act.is_reddit_media_domain || act.is_video || ['v.redd.it','i.redd.it'].includes(act.domain);
export const isRedditMedia = (act: Comment | Submission): boolean => {
return asSubmission(act) && (act.is_reddit_media_domain || act.is_video || ['v.redd.it','i.redd.it'].includes(act.domain));
}
export const isExternalUrlSubmission = (act: Comment | Submission): boolean => {
return act instanceof Submission && !act.is_self && !isRedditMedia(act);
return asSubmission(act) && !act.is_self && !isRedditMedia(act);
}
export const parseRegex = (r: string | RegExp, val: string, flags?: string): RegExResult => {
export const parseStringToRegex = (val: string, defaultFlags?: string): RegExp | undefined => {
const result = ReReg.exec(val);
if (result === null) {
return undefined;
}
// index 0 => full string
// index 1 => regex without flags and forward slashes
// index 2 => flags
const flags = result[2] === '' ? (defaultFlags || '') : result[2];
return new RegExp(result[1], flags);
}
const reg = r instanceof RegExp ? r : new RegExp(r, flags);
export const parseRegex = (reg: RegExp, val: string): RegExResult => {
if(reg.global) {
const g = Array.from(val.matchAll(reg));
@@ -855,12 +921,74 @@ export const parseRegex = (r: string | RegExp, val: string, flags?: string): Reg
}
}
export async function readJson(path: string, opts: any) {
export const isStrongSubredditState = (value: SubredditState | StrongSubredditState) => {
return value.name === undefined || value.name instanceof RegExp;
}
export const asStrongSubredditState = (value: any): value is StrongSubredditState => {
return isStrongSubredditState(value);
}
export interface StrongSubredditStateOptions {
defaultFlags?: string
generateDescription?: boolean
}
export const toStrongSubredditState = (s: SubredditState, opts?: StrongSubredditStateOptions): StrongSubredditState => {
const {defaultFlags, generateDescription = false} = opts || {};
const {name: nameValRaw, stateDescription} = s;
let nameReg: RegExp | undefined;
if (nameValRaw !== undefined) {
if (!(nameValRaw instanceof RegExp)) {
let nameVal = nameValRaw.trim();
nameReg = parseStringToRegex(nameVal, defaultFlags);
if (nameReg === undefined) {
try {
const parsedVal = parseSubredditName(nameVal);
nameVal = parsedVal;
} catch (err) {
// oh well
const f = 1;
}
nameReg = parseStringToRegex(`/^${nameVal}$/`, defaultFlags);
}
} else {
nameReg = nameValRaw;
}
}
const strongState = {
...s,
name: nameReg
};
if (generateDescription && stateDescription === undefined) {
strongState.stateDescription = objectToStringSummary(strongState);
}
return strongState;
}
export const convertSubredditsRawToStrong = (x: (SubredditState | string), opts: StrongSubredditStateOptions): StrongSubredditState => {
if (typeof x === 'string') {
return toStrongSubredditState({name: x, stateDescription: x}, opts);
}
return toStrongSubredditState(x, opts);
}
export async function readConfigFile(path: string, opts: any) {
const {log, throwOnNotFound = true} = opts;
try {
await promises.access(path, constants.R_OK);
const data = await promises.readFile(path);
return JSON.parse(data as unknown as string);
const [configObj, jsonErr, yamlErr] = parseFromJsonOrYamlToObject(data as unknown as string);
if(configObj !== undefined) {
return configObj as object;
}
log.error(`Could not parse wiki page contents as JSON or YAML:`);
log.error(jsonErr);
log.error(yamlErr);
throw new SimpleError('Could not parse wiki page contents as JSON or YAML');
} catch (e) {
const {code} = e;
if (code === 'ENOENT') {
@@ -924,11 +1052,14 @@ export const cacheStats = (): ResourceStats => {
author: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
authorCrit: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
itemCrit: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
subredditCrit: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
content: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
userNotes: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
submission: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
comment: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
commentCheck: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0}
subreddit: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
commentCheck: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0},
imageHash: {requests: 0, miss: 0, identifierRequestCount: statMetricCache(), requestTimestamps: timestampArr(), averageTimeBetweenHits: 'N/A', identifierAverageHit: 0}
};
}
@@ -947,7 +1078,7 @@ export const buildCacheOptionsFromProvider = (provider: CacheProvider | any): Ca
}
export const createCacheManager = (options: CacheOptions): Cache => {
const {store, max, ttl = 60, host = 'localhost', port, auth_pass, db} = options;
const {store, max, ttl = 60, host = 'localhost', port, auth_pass, db, ...rest} = options;
switch (store) {
case 'none':
return cacheManager.caching({store: 'none', max, ttl});
@@ -958,7 +1089,8 @@ export const createCacheManager = (options: CacheOptions): Cache => {
port,
auth_pass,
db,
ttl
ttl,
...rest,
});
case 'memory':
default:
@@ -984,3 +1116,311 @@ export const snooLogWrapper = (logger: Logger) => {
trace: (...args: any[]) => logger.debug(args.slice(0, 2).join(' '), [args.slice(2)]),
}
}
export const isScopeError = (err: any): boolean => {
if(typeof err === 'object' && err.name === 'StatusCodeError' && err.response !== undefined) {
const authHeader = err.response.headers['www-authenticate'];
return authHeader !== undefined && authHeader.includes('insufficient_scope');
}
return false;
}
export const isStatusError = (err: any): err is StatusCodeError => {
return typeof err === 'object' && err.name === 'StatusCodeError' && err.response !== undefined;
}
/**
* Cached activities lose type information when deserialized so need to check properties as well to see if the object is the shape of a Submission
* */
export const isSubmission = (value: any) => {
return value instanceof Submission || value.domain !== undefined;
}
export const asSubmission = (value: any): value is Submission => {
return isSubmission(value);
}
/**
* Serialized activities store subreddit and user properties as their string representations (instead of proxy)
* */
export const getActivitySubredditName = (activity: any): string => {
if(typeof activity.subreddit === 'string') {
return activity.subreddit;
}
return activity.subreddit.display_name;
}
/**
* Serialized activities store subreddit and user properties as their string representations (instead of proxy)
* */
export const getActivityAuthorName = (author: RedditUser | string): string => {
if(typeof author === 'string') {
return author;
}
return author.name;
}
export const buildCachePrefix = (parts: any[]): string => {
const prefix = parts.filter(x => typeof x === 'string' && x !== '').map(x => x.trim()).map(x => x.split(':')).flat().filter(x => x !== '').join(':')
if(prefix !== '') {
return `${prefix}:`;
}
return prefix;
}
export const objectToStringSummary = (obj: object): string => {
const parts = [];
for(const [key, val] of Object.entries(obj)) {
parts.push(`${key}: ${val}`);
}
return parts.join(' | ');
}
/**
* Returns the index of the last element in the array where predicate is true, and -1
* otherwise.
* @param array The source array to search in
* @param predicate find calls predicate once for each element of the array, in descending
* order, until it finds one where predicate returns true. If such an element is found,
* findLastIndex immediately returns that element index. Otherwise, findLastIndex returns -1.
*
* @see https://stackoverflow.com/a/53187807/1469797
*/
export function findLastIndex<T>(array: Array<T>, predicate: (value: T, index: number, obj: T[]) => boolean): number {
let l = array.length;
while (l--) {
if (predicate(array[l], l, array))
return l;
}
return -1;
}
export const parseRuleResultsToMarkdownSummary = (ruleResults: RuleResult[]): string => {
const results = ruleResults.map((y: any) => {
const {triggered, result, name, ...restY} = y;
let t = triggeredIndicator(false);
if(triggered === null) {
t = 'Skipped';
} else if(triggered === true) {
t = triggeredIndicator(true);
}
return `* ${name} - ${t} - ${result || '-'}`;
});
return results.join('\r\n');
}
export const isValidImageURL = (str: string): boolean => {
return !!str.match(/\w+\.(jpg|jpeg|gif|png|tiff|bmp|webp)$/gi);
}
let resembleCIFunc: Function;
type SharpCreate = (input?:
| Buffer
| Uint8Array
| Uint8ClampedArray
| Int8Array
| Uint16Array
| Int16Array
| Uint32Array
| Int32Array
| Float32Array
| Float64Array
| string, options?: SharpOptions) => Sharp;
let sharpImg: SharpCreate;
const getCIFunc = async () => {
if (resembleCIFunc === undefined) {
// @ts-ignore
const resembleModule = await import('resemblejs/compareImages');
if (resembleModule === undefined) {
throw new Error('Could not import resemblejs');
}
resembleCIFunc = resembleModule.default;
}
return resembleCIFunc;
}
export const getSharpAsync = async (): Promise<SharpCreate> => {
if (sharpImg === undefined) {
const sharpModule = await import('sharp');
if (sharpModule === undefined) {
throw new Error('Could not import sharp');
}
// @ts-ignore
sharpImg = sharpModule.default;
}
return sharpImg;
}
export const compareImages = async (data1: ImageData, data2: ImageData, threshold: number, variantDimensionDiff = 0): Promise<[ImageComparisonResult, boolean, string[]]> => {
let results: ImageComparisonResult | undefined;
const errors: string[] = [];
results = await pixelImageCompare(data1, data2);
// may decide to bring resemble back at some point in the future if pixelmatch has issues
// but for now...
// sharp is a *much* more useful utility and i'd rather have it as a dependency than node-canvas
// it's much faster, uses less memory, and its libraries more likely to already be available on a host
// -- with it i can control how images are normalized for dimensions which is basically what resemble was doing anyway (using canvas)
// try {
// results = await pixelImageCompare(data1, data2);
// } catch (err) {
// if(!(err instanceof SimpleError)) {
// errors.push(err.message);
// }
// // swallow this and continue with resemble
// }
// if (results === undefined) {
// results = await resembleImageCompare(data1, data2, threshold, variantDimensionDiff);
// }
return [results, results.misMatchPercentage < threshold, errors];
}
export const pixelImageCompare = async (data1: ImageData, data2: ImageData): Promise<ImageComparisonResult> => {
let pixelDiff: number | undefined = undefined;
let sharpFunc: SharpCreate;
try {
sharpFunc = await getSharpAsync();
} catch (err) {
err.message = `Unable to do image comparison due to an issue importing the comparison library. It is likely sharp is not installed (see ContextMod docs). Error Message: ${err.message}`;
throw err;
}
const [refImg, compareImg, width, height] = await data1.normalizeImagesForComparison('pixel', data2);
const time = Date.now();
// ensureAlpha() is imperative here because pixelmatch expects an alpha layer
pixelDiff = pixelmatch(await refImg.ensureAlpha().raw().toBuffer(), await compareImg.ensureAlpha().raw().toBuffer(), null, width, height);
return {
isSameDimensions: true,
dimensionDifference: {
height: 0,
width: 0,
},
misMatchPercentage: pixelDiff / (width * height),
analysisTime: Date.now() - time,
}
}
// see comments in compareImages
//
// export const resembleImageCompare = async (data1: ImageData, data2: ImageData, threshold?: number, variantDimensionDiff = 0): Promise<ImageComparisonResult> => {
// let ci: Function;
//
// try {
// ci = await getCIFunc();
// } catch (err) {
// err.message = `Unable to do image comparison due to an issue importing the comparison library. It is likely 'node-canvas' is not installed (see ContextMod docs). Error Message: ${err.message}`;
// throw err;
// }
//
// let results: ImageComparisonResult | undefined = undefined;
// // @ts-ignore
// let resResult: ResembleSingleCallbackComparisonResult = undefined;
//
// //const [minWidth, minHeight] = getMinimumDimensions(data1, data2);
// const compareOptions = {
// // "ignore": [
// // 'colors' // ~100% than nothing because resemble computes brightness information from rgb for each pixel
// // ],
// // boundingBox is ~30% slower than no restrictions
// // because resemble has to check that each pixel is within the box
// //
// // output: {
// // // compare at most 800x800 section to increase performance
// // // -- potentially allow this to be user-configurable in the future if not sufficient for dup detection
// // boundingBox: {
// // left: 0,
// // top: 0,
// // right: Math.min(minWidth, 800),
// // bottom: Math.min(minHeight, 800)
// // },
// // },
// returnEarlyThreshold: threshold !== undefined ? Math.min(threshold + 5, 100) : undefined,
// };
//
// if(data1.preferredResolution !== undefined) {
// const [prefWidth, prefHeight] = data1.preferredResolution;
// const prefImgData = data2.getSimilarResolutionVariant(prefWidth, prefHeight, variantDimensionDiff);
// if(prefImgData !== undefined) {
// let refThumbnail;
// try {
// refThumbnail = data1.getSimilarResolutionVariant(prefWidth, prefHeight) as ImageData;
// resResult = await ci(await (await refThumbnail.sharp()).clone().resize(400, null, {fit: 'outside'}).jpeg().toBuffer()
// , await (await prefImgData.sharp()).clone().resize(400, null, {fit: 'outside'}).jpeg().toBuffer()
// , compareOptions) as ResembleSingleCallbackComparisonResult;
// } catch(err) {
// throw err;
// }
// }
// }
// if(resResult === undefined) {
// resResult = await ci(await (await data1.sharp()).clone().resize(400, null, {fit: 'outside'}).jpeg().toBuffer(),
// await (await data2.sharp()).clone().resize(400, null, {fit: 'outside'}).jpeg().toBuffer(), compareOptions) as ResembleSingleCallbackComparisonResult;
// }
//
//
// return {
// isSameDimensions: resResult.isSameDimensions,
// dimensionDifference: resResult.dimensionDifference,
// // @ts-ignore
// misMatchPercentage: resResult.rawMisMatchPercentage,
// analysisTime: resResult.analysisTime
// };
// }
export const createHistoricalStatsDisplay = (data: HistoricalStats): HistoricalStatsDisplay => {
const display: any = {};
for(const [k, v] of Object.entries(data)) {
if(v instanceof Map) {
display[k] = v;
display[`${k}Total`] = Array.from(v.values()).reduce((acc, curr) => acc + curr, 0);
} else {
display[k] = v;
}
}
return display as HistoricalStatsDisplay;
}
/**
* Determine if the state criteria being checked are
* 1 ) expensive to compute or
* 2 ) require additional api requests
*
* If neither then do not cache results as the number of unique keys (sub-state) increases AT LEAST linearly taking up space (especially in memory cache)
* when they are probably not necessary to begin with
* */
export const shouldCacheSubredditStateCriteriaResult = (state: SubredditState | StrongSubredditState): boolean => {
// currently there are no scenarios where we need to cache results
// since only things computed from state are comparisons for properties already cached on subreddit object
// and regexes for name which aren't that costly
// -- so just return false
return false;
}
export const subredditStateIsNameOnly = (state: SubredditState | StrongSubredditState): boolean => {
const critCount = Object.entries(state).filter(([key, val]) => {
return val !== undefined && !['name','stateDescription'].includes(key);
}).length;
return critCount === 0;
}
export const absPercentDifference = (num1: number, num2: number) => {
return Math.abs((num1 - num2) / num1) * 100;
}
export const bitsToHexLength = (bits: number): number => {
return Math.pow(bits, 2) / 4;
}
export const escapeRegex = (val: string) => {
return val.replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&');
}