all: sync with master; upd chlog

This commit is contained in:
Ainar Garipov
2023-07-26 13:18:44 +03:00
parent ec83d0eb86
commit 48ee2f8a42
99 changed files with 3202 additions and 1886 deletions

View File

@@ -11,6 +11,7 @@ import (
"time"
"github.com/AdguardTeam/AdGuardHome/internal/aghalg"
"github.com/AdguardTeam/AdGuardHome/internal/aghrenameio"
"github.com/AdguardTeam/AdGuardHome/internal/filtering/rulelist"
"github.com/AdguardTeam/golibs/errors"
"github.com/AdguardTeam/golibs/log"
@@ -83,53 +84,53 @@ func (d *DNSFilter) filterSetProperties(
filters = d.WhitelistFilters
}
i := slices.IndexFunc(filters, func(filt FilterYAML) bool { return filt.URL == listURL })
i := slices.IndexFunc(filters, func(flt FilterYAML) bool { return flt.URL == listURL })
if i == -1 {
return false, errFilterNotExist
}
filt := &filters[i]
flt := &filters[i]
log.Debug(
"filtering: set name to %q, url to %s, enabled to %t for filter %s",
newList.Name,
newList.URL,
newList.Enabled,
filt.URL,
flt.URL,
)
defer func(oldURL, oldName string, oldEnabled bool, oldUpdated time.Time, oldRulesCount int) {
if err != nil {
filt.URL = oldURL
filt.Name = oldName
filt.Enabled = oldEnabled
filt.LastUpdated = oldUpdated
filt.RulesCount = oldRulesCount
flt.URL = oldURL
flt.Name = oldName
flt.Enabled = oldEnabled
flt.LastUpdated = oldUpdated
flt.RulesCount = oldRulesCount
}
}(filt.URL, filt.Name, filt.Enabled, filt.LastUpdated, filt.RulesCount)
}(flt.URL, flt.Name, flt.Enabled, flt.LastUpdated, flt.RulesCount)
filt.Name = newList.Name
flt.Name = newList.Name
if filt.URL != newList.URL {
if flt.URL != newList.URL {
if d.filterExistsLocked(newList.URL) {
return false, errFilterExists
}
shouldRestart = true
filt.URL = newList.URL
filt.LastUpdated = time.Time{}
filt.unload()
flt.URL = newList.URL
flt.LastUpdated = time.Time{}
flt.unload()
}
if filt.Enabled != newList.Enabled {
filt.Enabled = newList.Enabled
if flt.Enabled != newList.Enabled {
flt.Enabled = newList.Enabled
shouldRestart = true
}
if filt.Enabled {
if flt.Enabled {
if shouldRestart {
// Download the filter contents.
shouldRestart, err = d.update(filt)
shouldRestart, err = d.update(flt)
}
} else {
// TODO(e.burkov): The validation of the contents of the new URL is
@@ -137,7 +138,7 @@ func (d *DNSFilter) filterSetProperties(
// possible to set a bad rules source, but the validation should still
// kick in when the filter is enabled. Consider changing this behavior
// to be stricter.
filt.unload()
flt.unload()
}
return shouldRestart, err
@@ -250,24 +251,24 @@ func assignUniqueFilterID() int64 {
// Sets up a timer that will be checking for filters updates periodically
func (d *DNSFilter) periodicallyRefreshFilters() {
const maxInterval = 1 * 60 * 60
intval := 5 // use a dynamically increasing time interval
ivl := 5 // use a dynamically increasing time interval
for {
isNetErr, ok := false, false
if d.FiltersUpdateIntervalHours != 0 {
_, isNetErr, ok = d.tryRefreshFilters(true, true, false)
if ok && !isNetErr {
intval = maxInterval
ivl = maxInterval
}
}
if isNetErr {
intval *= 2
if intval > maxInterval {
intval = maxInterval
ivl *= 2
if ivl > maxInterval {
ivl = maxInterval
}
}
time.Sleep(time.Duration(intval) * time.Second)
time.Sleep(time.Duration(ivl) * time.Second)
}
}
@@ -329,20 +330,20 @@ func (d *DNSFilter) refreshFiltersArray(filters *[]FilterYAML, force bool) (int,
return 0, nil, nil, false
}
nfail := 0
failNum := 0
for i := range updateFilters {
uf := &updateFilters[i]
updated, err := d.update(uf)
updateFlags = append(updateFlags, updated)
if err != nil {
nfail++
log.Info("filtering: updating filter from url %q: %s\n", uf.URL, err)
failNum++
log.Error("filtering: updating filter from url %q: %s\n", uf.URL, err)
continue
}
}
if nfail == len(updateFilters) {
if failNum == len(updateFilters) {
return 0, nil, nil, true
}
@@ -464,48 +465,6 @@ func (d *DNSFilter) update(filter *FilterYAML) (b bool, err error) {
return b, err
}
// finalizeUpdate closes and gets rid of temporary file f with filter's content
// according to updated. It also saves new values of flt's name, rules number
// and checksum if succeeded.
func (d *DNSFilter) finalizeUpdate(
file *os.File,
flt *FilterYAML,
updated bool,
res *rulelist.ParseResult,
) (err error) {
tmpFileName := file.Name()
// Close the file before renaming it because it's required on Windows.
//
// See https://github.com/adguardTeam/adGuardHome/issues/1553.
err = file.Close()
if err != nil {
return fmt.Errorf("closing temporary file: %w", err)
}
if !updated {
log.Debug("filtering: filter %d from url %q has no changes, skipping", flt.ID, flt.URL)
return os.Remove(tmpFileName)
}
fltPath := flt.Path(d.DataDir)
log.Info("filtering: saving contents of filter %d into %q", flt.ID, fltPath)
// Don't use renameio or maybe packages, since those will require loading
// the whole filter content to the memory on Windows.
err = os.Rename(tmpFileName, fltPath)
if err != nil {
return errors.WithDeferred(err, os.Remove(tmpFileName))
}
flt.Name = aghalg.Coalesce(flt.Name, res.Title)
flt.checksum, flt.RulesCount = res.Checksum, res.RulesCount
return nil
}
// updateIntl updates the flt rewriting it's actual file. It returns true if
// the actual update has been performed.
func (d *DNSFilter) updateIntl(flt *FilterYAML) (ok bool, err error) {
@@ -513,63 +472,22 @@ func (d *DNSFilter) updateIntl(flt *FilterYAML) (ok bool, err error) {
var res *rulelist.ParseResult
var tmpFile *os.File
tmpFile, err = os.CreateTemp(filepath.Join(d.DataDir, filterDir), "")
if err != nil {
return false, err
}
defer func() {
finErr := d.finalizeUpdate(tmpFile, flt, ok, res)
if ok && finErr == nil {
log.Info(
"filtering: updated filter %d: %d bytes, %d rules",
flt.ID,
res.BytesWritten,
res.RulesCount,
)
return
}
err = errors.WithDeferred(err, finErr)
}()
// Change the default 0o600 permission to something more acceptable by end
// users.
//
// See https://github.com/AdguardTeam/AdGuardHome/issues/3198.
if err = tmpFile.Chmod(0o644); err != nil {
return false, fmt.Errorf("changing file mode: %w", err)
tmpFile, err := aghrenameio.NewPendingFile(flt.Path(d.DataDir), 0o644)
if err != nil {
return false, err
}
defer func() { err = d.finalizeUpdate(tmpFile, flt, res, err, ok) }()
var r io.Reader
if !filepath.IsAbs(flt.URL) {
var resp *http.Response
resp, err = d.HTTPClient.Get(flt.URL)
if err != nil {
log.Info("filtering: requesting filter from %q: %s, skipping", flt.URL, err)
return false, err
}
defer func() { err = errors.WithDeferred(err, resp.Body.Close()) }()
if resp.StatusCode != http.StatusOK {
log.Info("filtering got status code %d from %q, skipping", resp.StatusCode, flt.URL)
return false, fmt.Errorf("got status code %d, want %d", resp.StatusCode, http.StatusOK)
}
r = resp.Body
} else {
var f *os.File
f, err = os.Open(flt.URL)
if err != nil {
return false, fmt.Errorf("open file: %w", err)
}
defer func() { err = errors.WithDeferred(err, f.Close()) }()
r = f
r, err := d.reader(flt.URL)
if err != nil {
// Don't wrap the error since it's informative enough as is.
return false, err
}
defer func() { err = errors.WithDeferred(err, r.Close()) }()
bufPtr := d.bufPool.Get().(*[]byte)
defer d.bufPool.Put(bufPtr)
@@ -580,6 +498,78 @@ func (d *DNSFilter) updateIntl(flt *FilterYAML) (ok bool, err error) {
return res.Checksum != flt.checksum && err == nil, err
}
// finalizeUpdate closes and gets rid of temporary file f with filter's content
// according to updated. It also saves new values of flt's name, rules number
// and checksum if succeeded.
func (d *DNSFilter) finalizeUpdate(
file aghrenameio.PendingFile,
flt *FilterYAML,
res *rulelist.ParseResult,
returned error,
updated bool,
) (err error) {
id := flt.ID
if !updated {
if returned == nil {
log.Debug("filtering: filter %d from url %q has no changes, skipping", id, flt.URL)
}
return errors.WithDeferred(returned, file.Cleanup())
}
log.Info("filtering: saving contents of filter %d into %q", id, flt.Path(d.DataDir))
err = file.CloseReplace()
if err != nil {
return fmt.Errorf("finalizing update: %w", err)
}
rulesCount := res.RulesCount
log.Info("filtering: updated filter %d: %d bytes, %d rules", id, res.BytesWritten, rulesCount)
flt.Name = aghalg.Coalesce(flt.Name, res.Title)
flt.checksum = res.Checksum
flt.RulesCount = rulesCount
return nil
}
// reader returns an io.ReadCloser reading filtering-rule list data form either
// a file on the filesystem or the filter's HTTP URL.
func (d *DNSFilter) reader(fltURL string) (r io.ReadCloser, err error) {
if !filepath.IsAbs(fltURL) {
r, err = d.readerFromURL(fltURL)
if err != nil {
return nil, fmt.Errorf("reading from url: %w", err)
}
return r, nil
}
r, err = os.Open(fltURL)
if err != nil {
return nil, fmt.Errorf("opening file: %w", err)
}
return r, nil
}
// readerFromURL returns an io.ReadCloser reading filtering-rule list data form
// the filter's URL.
func (d *DNSFilter) readerFromURL(fltURL string) (r io.ReadCloser, err error) {
resp, err := d.HTTPClient.Get(fltURL)
if err != nil {
// Don't wrap the error since it's informative enough as is.
return nil, err
}
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("got status code %d, want %d", resp.StatusCode, http.StatusOK)
}
return resp.Body, nil
}
// loads filter contents from the file in dataDir
func (d *DNSFilter) load(flt *FilterYAML) (err error) {
fileName := flt.Path(d.DataDir)

View File

@@ -943,7 +943,7 @@ func New(c *Config, blockFilters []Filter) (d *DNSFilter, err error) {
d = &DNSFilter{
bufPool: &sync.Pool{
New: func() (buf any) {
bufVal := make([]byte, rulelist.MaxRuleLen)
bufVal := make([]byte, rulelist.DefaultRuleBufSize)
return &bufVal
},

View File

@@ -6,9 +6,9 @@ import (
"fmt"
"hash/crc32"
"io"
"unicode"
"github.com/AdguardTeam/golibs/errors"
"golang.org/x/exp/slices"
)
// Parser is a filtering-rule parser that collects data, such as the checksum
@@ -48,19 +48,29 @@ type ParseResult struct {
// nil.
func (p *Parser) Parse(dst io.Writer, src io.Reader, buf []byte) (r *ParseResult, err error) {
s := bufio.NewScanner(src)
s.Buffer(buf, MaxRuleLen)
lineIdx := 0
// Don't use [DefaultRuleBufSize] as the maximum size, since some
// filtering-rule lists compressed by e.g. HostlistsCompiler can have very
// large lines. The buffer optimization still works for the more common
// case of reasonably-sized lines.
//
// See https://github.com/AdguardTeam/AdGuardHome/issues/6003.
s.Buffer(buf, bufio.MaxScanTokenSize)
// Use a one-based index for lines and columns, since these errors end up in
// the frontend, and users are more familiar with one-based line and column
// indexes.
lineNum := 1
for s.Scan() {
var n int
n, err = p.processLine(dst, s.Bytes(), lineIdx)
n, err = p.processLine(dst, s.Bytes(), lineNum)
p.written += n
if err != nil {
// Don't wrap the error, because it's informative enough as is.
return p.result(), err
}
lineIdx++
lineNum++
}
r = p.result()
@@ -81,7 +91,7 @@ func (p *Parser) result() (r *ParseResult) {
// processLine processes a single line. It may write to dst, and if it does, n
// is the number of bytes written.
func (p *Parser) processLine(dst io.Writer, line []byte, lineIdx int) (n int, err error) {
func (p *Parser) processLine(dst io.Writer, line []byte, lineNum int) (n int, err error) {
trimmed := bytes.TrimSpace(line)
if p.written == 0 && isHTMLLine(trimmed) {
return 0, ErrHTML
@@ -95,9 +105,10 @@ func (p *Parser) processLine(dst io.Writer, line []byte, lineIdx int) (n int, er
}
if badIdx != -1 {
return 0, fmt.Errorf(
"line at index %d: character at index %d: non-printable character",
lineIdx,
badIdx+bytes.Index(line, trimmed),
"line %d: character %d: likely binary character %q",
lineNum,
badIdx+bytes.Index(line, trimmed)+1,
trimmed[badIdx],
)
}
@@ -130,41 +141,37 @@ func hasPrefixFold(b, prefix []byte) (ok bool) {
}
// parseLine returns true if the parsed line is a filtering rule. line is
// assumed to be trimmed of whitespace characters. nonPrintIdx is the index of
// the first non-printable character, if any; if there are none, nonPrintIdx is
// -1.
// assumed to be trimmed of whitespace characters. badIdx is the index of the
// first character that may indicate that this is a binary file, or -1 if none.
//
// A line is considered a rule if it's not empty, not a comment, and contains
// only printable characters.
func parseLine(line []byte) (nonPrintIdx int, isRule bool) {
func parseLine(line []byte) (badIdx int, isRule bool) {
if len(line) == 0 || line[0] == '#' || line[0] == '!' {
return -1, false
}
nonPrintIdx = bytes.IndexFunc(line, isNotPrintable)
badIdx = slices.IndexFunc(line, likelyBinary)
return nonPrintIdx, nonPrintIdx == -1
return badIdx, badIdx == -1
}
// isNotPrintable returns true if r is not a printable character that can be
// contained in a filtering rule.
func isNotPrintable(r rune) (ok bool) {
// Tab isn't included into Unicode's graphic symbols, so include it here
// explicitly.
return r != '\t' && !unicode.IsGraphic(r)
// likelyBinary returns true if b is likely to be a byte from a binary file.
func likelyBinary(b byte) (ok bool) {
return (b < ' ' || b == 0x7f) && b != '\n' && b != '\r' && b != '\t'
}
// parseLineTitle is like [parseLine] but additionally looks for a title. line
// is assumed to be trimmed of whitespace characters.
func (p *Parser) parseLineTitle(line []byte) (nonPrintIdx int, isRule bool) {
func (p *Parser) parseLineTitle(line []byte) (badIdx int, isRule bool) {
if len(line) == 0 || line[0] == '#' {
return -1, false
}
if line[0] != '!' {
nonPrintIdx = bytes.IndexFunc(line, isNotPrintable)
badIdx = slices.IndexFunc(line, likelyBinary)
return nonPrintIdx, nonPrintIdx == -1
return badIdx, badIdx == -1
}
const titlePattern = "! Title: "

View File

@@ -6,10 +6,10 @@ import (
"strings"
"testing"
"github.com/AdguardTeam/AdGuardHome/internal/aghtest"
"github.com/AdguardTeam/AdGuardHome/internal/filtering/rulelist"
"github.com/AdguardTeam/golibs/errors"
"github.com/AdguardTeam/golibs/testutil"
"github.com/AdguardTeam/golibs/testutil/fakeio"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -17,6 +17,9 @@ import (
func TestParser_Parse(t *testing.T) {
t.Parallel()
longRule := strings.Repeat("a", rulelist.DefaultRuleBufSize+1) + "\n"
tooLongRule := strings.Repeat("a", bufio.MaxScanTokenSize+1) + "\n"
testCases := []struct {
name string
in string
@@ -74,26 +77,42 @@ func TestParser_Parse(t *testing.T) {
wantTitle: "Test Title",
wantRulesNum: 1,
wantWritten: len(testRuleTextBlocked),
}, {
name: "cosmetic_with_zwnj",
in: testRuleTextCosmetic,
wantDst: testRuleTextCosmetic,
wantErrMsg: "",
wantTitle: "",
wantRulesNum: 1,
wantWritten: len(testRuleTextCosmetic),
}, {
name: "bad_char",
in: "! Title: Test Title \n" +
testRuleTextBlocked +
">>>\x7F<<<",
wantDst: testRuleTextBlocked,
wantErrMsg: "line at index 2: " +
"character at index 3: " +
"non-printable character",
wantErrMsg: "line 3: " +
"character 4: " +
"likely binary character '\\x7f'",
wantTitle: "Test Title",
wantRulesNum: 1,
wantWritten: len(testRuleTextBlocked),
}, {
name: "too_long",
in: strings.Repeat("a", rulelist.MaxRuleLen+1),
in: tooLongRule,
wantDst: "",
wantErrMsg: "scanning filter contents: " + bufio.ErrTooLong.Error(),
wantErrMsg: "scanning filter contents: bufio.Scanner: token too long",
wantTitle: "",
wantRulesNum: 0,
wantWritten: 0,
}, {
name: "longer_than_default",
in: longRule,
wantDst: longRule,
wantErrMsg: "",
wantTitle: "",
wantRulesNum: 1,
wantWritten: len(longRule),
}, {
name: "bad_tab_and_comment",
in: testRuleTextBadTab,
@@ -118,7 +137,7 @@ func TestParser_Parse(t *testing.T) {
t.Parallel()
dst := &bytes.Buffer{}
buf := make([]byte, rulelist.MaxRuleLen)
buf := make([]byte, rulelist.DefaultRuleBufSize)
p := rulelist.NewParser()
r, err := p.Parse(dst, strings.NewReader(tc.in), buf)
@@ -140,12 +159,12 @@ func TestParser_Parse(t *testing.T) {
func TestParser_Parse_writeError(t *testing.T) {
t.Parallel()
dst := &aghtest.Writer{
dst := &fakeio.Writer{
OnWrite: func(b []byte) (n int, err error) {
return 1, errors.Error("test error")
},
}
buf := make([]byte, rulelist.MaxRuleLen)
buf := make([]byte, rulelist.DefaultRuleBufSize)
p := rulelist.NewParser()
r, err := p.Parse(dst, strings.NewReader(testRuleTextBlocked), buf)
@@ -165,7 +184,7 @@ func TestParser_Parse_checksums(t *testing.T) {
"# Another comment.\n"
)
buf := make([]byte, rulelist.MaxRuleLen)
buf := make([]byte, rulelist.DefaultRuleBufSize)
p := rulelist.NewParser()
r, err := p.Parse(&bytes.Buffer{}, strings.NewReader(withoutComments), buf)
@@ -192,7 +211,7 @@ var (
func BenchmarkParser_Parse(b *testing.B) {
dst := &bytes.Buffer{}
src := strings.NewReader(strings.Repeat(testRuleTextBlocked, 1000))
buf := make([]byte, rulelist.MaxRuleLen)
buf := make([]byte, rulelist.DefaultRuleBufSize)
p := rulelist.NewParser()
b.ReportAllocs()
@@ -204,6 +223,14 @@ func BenchmarkParser_Parse(b *testing.B) {
require.NoError(b, errSink)
require.NotNil(b, resSink)
// Most recent result, on a ThinkPad X13 with a Ryzen Pro 7 CPU:
//
// goos: linux
// goarch: amd64
// pkg: github.com/AdguardTeam/AdGuardHome/internal/filtering/rulelist
// cpu: AMD Ryzen 7 PRO 4750U with Radeon Graphics
// BenchmarkParser_Parse-16 100000000 128.0 ns/op 48 B/op 1 allocs/op
}
func FuzzParser_Parse(f *testing.F) {
@@ -215,15 +242,17 @@ func FuzzParser_Parse(f *testing.F) {
"! Comment",
"! Title ",
"! Title XXX",
testRuleTextBadTab,
testRuleTextBlocked,
testRuleTextCosmetic,
testRuleTextEtcHostsTab,
testRuleTextHTML,
testRuleTextBlocked,
testRuleTextBadTab,
"1.2.3.4",
"1.2.3.4 etc-hosts.example",
">>>\x00<<<",
">>>\x7F<<<",
strings.Repeat("a", n+1),
strings.Repeat("a", rulelist.DefaultRuleBufSize+1),
strings.Repeat("a", bufio.MaxScanTokenSize+1),
}
for _, tc := range testCases {

View File

@@ -4,8 +4,6 @@
// TODO(a.garipov): Expand.
package rulelist
// MaxRuleLen is the maximum length of a line with a filtering rule, in bytes.
//
// TODO(a.garipov): Consider changing this to a rune length, like AdGuardDNS
// does.
const MaxRuleLen = 1024
// DefaultRuleBufSize is the default length of a buffer used to read a line with
// a filtering rule, in bytes.
const DefaultRuleBufSize = 1024

View File

@@ -7,8 +7,13 @@ const testTimeout = 1 * time.Second
// Common texts for tests.
const (
testRuleTextHTML = "<!DOCTYPE html>\n"
testRuleTextBlocked = "||blocked.example^\n"
testRuleTextBadTab = "||bad-tab-and-comment.example^\t# A comment.\n"
testRuleTextBlocked = "||blocked.example^\n"
testRuleTextEtcHostsTab = "0.0.0.0 tab..example^\t# A comment.\n"
testRuleTextHTML = "<!DOCTYPE html>\n"
// testRuleTextCosmetic is a cosmetic rule with a zero-width non-joiner.
//
// See https://github.com/AdguardTeam/AdGuardHome/issues/6003.
testRuleTextCosmetic = "||cosmetic.example## :has-text(/\u200c/i)\n"
)

View File

@@ -89,37 +89,34 @@ func TestSafeSearchCacheGoogle(t *testing.T) {
assert.False(t, res.IsFiltered)
assert.Empty(t, res.Rules)
resolver := &aghtest.TestResolver{}
resolver := &aghtest.Resolver{
OnLookupIP: func(_ context.Context, _, host string) (ips []net.IP, err error) {
ip4, ip6 := aghtest.HostToIPs(host)
return []net.IP{ip4, ip6}, nil
},
}
ss = newForTest(t, defaultSafeSearchConf)
ss.resolver = resolver
// Lookup for safesearch domain.
rewrite := ss.searchHost(domain, testQType)
ips, err := resolver.LookupIP(context.Background(), "ip", rewrite.NewCNAME)
require.NoError(t, err)
var foundIP net.IP
for _, ip := range ips {
if ip.To4() != nil {
foundIP = ip
break
}
}
wantIP, _ := aghtest.HostToIPs(rewrite.NewCNAME)
res, err = ss.CheckHost(domain, testQType)
require.NoError(t, err)
require.Len(t, res.Rules, 1)
assert.True(t, res.Rules[0].IP.Equal(foundIP))
assert.True(t, res.Rules[0].IP.Equal(wantIP))
// Check cache.
cachedValue, isFound := ss.getCachedResult(domain, testQType)
require.True(t, isFound)
require.Len(t, cachedValue.Rules, 1)
assert.True(t, cachedValue.Rules[0].IP.Equal(foundIP))
assert.True(t, cachedValue.Rules[0].IP.Equal(wantIP))
}
const googleHost = "www.google.com"

View File

@@ -92,8 +92,15 @@ func TestDefault_CheckHost_yandexAAAA(t *testing.T) {
}
func TestDefault_CheckHost_google(t *testing.T) {
resolver := &aghtest.TestResolver{}
ip, _ := resolver.HostToIPs("forcesafesearch.google.com")
resolver := &aghtest.Resolver{
OnLookupIP: func(_ context.Context, _, host string) (ips []net.IP, err error) {
ip4, ip6 := aghtest.HostToIPs(host)
return []net.IP{ip4, ip6}, nil
},
}
wantIP, _ := aghtest.HostToIPs("forcesafesearch.google.com")
conf := testConf
conf.CustomResolver = resolver
@@ -119,7 +126,7 @@ func TestDefault_CheckHost_google(t *testing.T) {
require.Len(t, res.Rules, 1)
assert.Equal(t, ip, res.Rules[0].IP)
assert.Equal(t, wantIP, res.Rules[0].IP)
assert.EqualValues(t, filtering.SafeSearchListID, res.Rules[0].FilterListID)
})
}

View File

@@ -1505,7 +1505,6 @@ var blockedServices = []blockedService{{
"||aus.social^",
"||awscommunity.social^",
"||climatejustice.social^",
"||cupoftea.social^",
"||cyberplace.social^",
"||defcon.social^",
"||det.social^",
@@ -1589,6 +1588,7 @@ var blockedServices = []blockedService{{
"||techhub.social^",
"||theblower.au^",
"||tkz.one^",
"||todon.eu^",
"||toot.aquilenet.fr^",
"||toot.community^",
"||toot.funami.tech^",
@@ -1661,6 +1661,7 @@ var blockedServices = []blockedService{{
"||nintendo.jp^",
"||nintendo.net^",
"||nintendo.nl^",
"||nintendo.pt^",
"||nintendoswitch.cn^",
"||nintendowifi.net^",
},
@@ -2160,6 +2161,20 @@ var blockedServices = []blockedService{{
Rules: []string{
"||voot.com^",
},
}, {
ID: "wargaming",
Name: "Wargaming",
IconSVG: []byte("<svg xmlns=\"http://www.w3.org/2000/svg\" fill=\"currentColor\" viewBox=\"0 0 24 24\"><path d=\"M12 1.998c-5.52 0-10 4.481-10 9.988 0 5.52 4.48 9.996 10 9.996s10-4.476 10-9.996c0-5.507-4.48-9.988-10-9.988zm0 2c4.413 0 8 3.588 8 7.988 0 3.246-1.944 6.04-4.727 7.293.54-1.861.831-3.988.807-6.226l1.414.414a23.648 23.648 0 0 0-2-4.041c-.627 1.347-1.48 2.56-2.52 3.68l1.68-.133c-1.507 2.92-3.134 3.906-5.547 4.013-.386-4.213.12-7.014 2.827-9.04l.386 1.493c.653-.974 1.36-2.12 2.373-2.947-1.506-.6-2.999-.627-4.492-.334.386.16.76.588 1.014.828-3.485 1.662-5.643 4.202-6.744 7.68A7.95 7.95 0 0 1 4 11.986c0-4.4 3.587-7.988 8-7.988z\"/></svg>"),
Rules: []string{
"||wargaming.com^",
"||wargaming.net^",
"||wgcdn.co^",
"||wgcrowd.io^",
"||worldoftanks.com^",
"||worldofwarplanes.com^",
"||worldofwarships.eu^",
"||wotblitz.com^",
},
}, {
ID: "wechat",
Name: "WeChat",