-
Notifications
You must be signed in to change notification settings - Fork 651
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
add unit tests for some core functions/configs
- Loading branch information
Showing
13 changed files
with
385 additions
and
62 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
{ | ||
"env": { | ||
"test": { | ||
"plugins": ["@babel/plugin-transform-modules-commonjs"] | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1,9 @@ | ||
module.exports = { | ||
preset: 'jest-puppeteer', | ||
'preset': 'jest-puppeteer', | ||
'testRegex': '(/__tests__/.*|(\\.|/)(test|spec))\\.(mjs?|jsx?|js?|tsx?|ts?)$', | ||
'transform': { | ||
'^.+\\.jsx?$': 'babel-jest', | ||
'^.+\\.mjs$': 'babel-jest', | ||
}, | ||
'moduleFileExtensions': ['js', 'jsx', 'mjs'], | ||
}; |
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
import { | ||
DEFAULT_PROXY_ADDRESS, BACKUP_PROXY_ADDRESS, | ||
DEFAULT_PROXY_PROTOCOL, BACKUP_PROXY_PROTOCOL} from './servers.mjs'; | ||
|
||
test('The proxy addresses must not be localhost', () => { | ||
expect(DEFAULT_PROXY_ADDRESS).not.toMatch(/localhost/i); | ||
expect(DEFAULT_PROXY_ADDRESS).not.toMatch(/127\.0\.0\.1/i); | ||
expect(BACKUP_PROXY_ADDRESS).not.toMatch(/localhost/i); | ||
expect(BACKUP_PROXY_ADDRESS).not.toMatch(/127\.0\.0\.1/i); | ||
}); | ||
|
||
test('Production must use HTTPS proxy', () => { | ||
expect(DEFAULT_PROXY_PROTOCOL).toBe('HTTPS'); | ||
expect(BACKUP_PROXY_PROTOCOL).toBe('HTTPS'); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
import {HEADER_URLS, PROXY_BYPASS_URLS, PROXY_URLS} from './urls.mjs'; | ||
|
||
|
||
test('The proxy URL list must contain some domains', () => { | ||
expect(PROXY_URLS.filter((url) => url.includes('.qq.com'))).not.toHaveLength(0); | ||
expect(PROXY_URLS.filter( | ||
(url) => url.includes('flask-test-iwauxcyxjb.cn-hangzhou.fcapp.run/'))).not.toHaveLength(0); | ||
}); | ||
|
||
test('Must not contain the all-url rules', () => { | ||
[HEADER_URLS, PROXY_BYPASS_URLS, PROXY_URLS].forEach((urlList) => { | ||
expect(urlList.filter((url) => url.startsWith('http://*/'))).toHaveLength(0); | ||
expect(urlList.filter((url) => url.startsWith('https://*/'))).toHaveLength(0); | ||
}); | ||
}); | ||
|
||
test('All URLs must start with http:// or https://', () => { | ||
const regex = /^(http|https):\/\//i; | ||
[HEADER_URLS, PROXY_BYPASS_URLS, PROXY_URLS].forEach((urlList) => { | ||
expect(urlList.filter((url) => !regex.test(url))).toHaveLength(0); | ||
}); | ||
}); | ||
|
||
test('All https URLs must have path that is * or empty', () => { | ||
/* | ||
* For example: | ||
* - Wrong: https://example.com/abc/* | ||
* - Right: https://example.com/* | ||
* - Right: https://example.com/ | ||
*/ | ||
[HEADER_URLS, PROXY_BYPASS_URLS, PROXY_URLS].forEach((urlList) => { | ||
for (const url of urlList) { | ||
if (url.startsWith('https://')) { | ||
const domainRemoved = url.slice('https://'.length).split('/')[1]; | ||
expect(['*', '']).toContain(domainRemoved); | ||
} | ||
} | ||
}); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,132 @@ | ||
import {urls2pac} from './_url_utils.mjs'; | ||
|
||
// urlWhitelist, urlList, | ||
// proxyProtocol1, proxyAddress1, | ||
// proxyProtocol2, proxyAddress2) { | ||
|
||
const TEST_PROXY_PROTOCOL_1 = 'HTTPS'; | ||
const TEST_PROXY_ADDRESS_1 = 'proxy.example.com'; | ||
const TEST_PROXY_PROTOCOL_2 = 'https'; | ||
const TEST_PROXY_ADDRESS_2 = '1.2.3.4'; | ||
|
||
|
||
const TEST_BYPASS_URL_LIST = [ | ||
'http://bangumi.bilibili.com/index/ding-count.json', | ||
]; | ||
|
||
const TEST_URL_LIST = [ | ||
'http://*/*', | ||
'https://*/*', | ||
'http://*.video.qq.com/*', | ||
'https://*.video.qq.com/*', | ||
'http://vd.l.qq.com/*', | ||
'https://vd.l.qq.com/*', | ||
'http://example.com', | ||
'https://example.com', | ||
'http://example.com/', | ||
'https://example.com/', | ||
'http://*.example.com/*', | ||
'https://*.example.com/*', | ||
'http://*.example.com/path.json?aaa=bbb', | ||
'https://*.example.com/path.json?aaa=bbb', | ||
'http://*.example.com/path.json?aaa=bbb*', | ||
'https://*.example.com/path.json?aaa=bbb*', | ||
'http://122.72.82.31/*', | ||
]; | ||
|
||
const EXPECTED_PAC_CONTENT = [ | ||
'var _http_map = {', | ||
' \'white\': {', | ||
' \'any\': [],', | ||
' \'bangumi.bilibili.com\': [', | ||
' /^\\/index\\/ding\\-count\\.json$/i', | ||
' ]', | ||
' },', | ||
' \'proxy\': {', | ||
' \'any\': [', | ||
' /^[^/]*\\//i,', | ||
' /^[^/]*\\.video\\.qq\\.com\\//i,', | ||
' /^[^/]*\\.example\\.com\\//i,', | ||
' /^[^/]*\\.example\\.com\\/path\\.json\\?aaa=bbb$/i,', | ||
' /^[^/]*\\.example\\.com\\/path\\.json\\?aaa=bbb/i', | ||
' ],', | ||
' \'vd.l.qq.com\': [', | ||
' /^\\//i', | ||
' ],', | ||
' \'example.com\': [', | ||
' /^\\/$/i,', | ||
' /^\\/$/i', | ||
' ],', | ||
' \'122.72.82.31\': [', | ||
' /^\\//i', | ||
' ]', | ||
' }', | ||
'};', | ||
'var _https_map = {', | ||
' \'white\': {', | ||
' \'any\': []', | ||
' },', | ||
' \'proxy\': {', | ||
' \'any\': [', | ||
' /^[^/]*\\//i,', | ||
' /^[^/]*\\.video\\.qq\\.com\\//i,', | ||
' /^[^/]*\\.example\\.com\\//i,', | ||
' /^[^/]*\\.example\\.com\\/path\\.json\\?aaa=bbb$/i,', | ||
' /^[^/]*\\.example\\.com\\/path\\.json\\?aaa=bbb/i', | ||
' ],', | ||
' \'vd.l.qq.com\': [', | ||
' /^\\//i', | ||
' ],', | ||
' \'example.com\': [', | ||
' /^\\/$/i,', | ||
' /^\\/$/i', | ||
' ]', | ||
' }', | ||
'};', | ||
'var _proxy_str = \'HTTPS proxy.example.com; HTTPS 1.2.3.4; DIRECT;\';', | ||
'', | ||
'function _check_regex_list(regex_list, str) {', | ||
' if (str.slice(0, 4) === \':80/\')', | ||
' str = str.slice(3);', | ||
' for (var i = 0; i < regex_list.length; i++)', | ||
' if (regex_list[i].test(str))', | ||
' return true;', | ||
' return false;', | ||
'}', | ||
'', | ||
'function _check_patterns(patterns, hostname, full_url, prot_len) {', | ||
' if (patterns.hasOwnProperty(hostname))', | ||
' if (_check_regex_list(patterns[hostname],', | ||
' full_url.slice(prot_len + hostname.length)))', | ||
' return true;', | ||
' if (_check_regex_list(patterns.any,', | ||
' full_url.slice(prot_len)))', | ||
' return true;', | ||
' return false;', | ||
'}', | ||
'', | ||
'function _find_proxy(url_map, host, url, prot_len) {', | ||
' if (_check_patterns(url_map.white, host, url, prot_len))', | ||
' return \'DIRECT\';', | ||
' if (_check_patterns(url_map.proxy, host, url, prot_len))', | ||
' return _proxy_str;', | ||
' return \'DIRECT\';', | ||
'}', | ||
'', | ||
'function FindProxyForURL(url, host) {', | ||
' var prot = url.slice(0, 6);', | ||
' if (prot === \'http:/\')', | ||
' return _find_proxy(_http_map, host, url, 7);', | ||
' else if (prot === \'https:\')', | ||
' return _find_proxy(_https_map, host, url, 8);', | ||
' return \'DIRECT\';', | ||
'}', | ||
].join('\n') + '\n'; | ||
|
||
|
||
test('Should produce the expected PAC content', () => { | ||
expect(urls2pac( | ||
TEST_BYPASS_URL_LIST, TEST_URL_LIST, | ||
TEST_PROXY_PROTOCOL_1, TEST_PROXY_ADDRESS_1, | ||
TEST_PROXY_PROTOCOL_2, TEST_PROXY_ADDRESS_2)).toMatch(EXPECTED_PAC_CONTENT); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
function urls2regexs(urlList) { | ||
const regexList = []; | ||
|
||
for (let str of urlList) { | ||
// Escape all possibly problematic symbols | ||
// http://stackoverflow.com/a/6969486/1766096 | ||
str = str.replace(/[\-\[\]\/\{\}\(\)\+\?\.\\\^\$\|]/g, '\\$&'); | ||
str = str.replace(/\*/g, '.*'); | ||
|
||
// make the first * matches only domain names or ip addresses | ||
// just as http://developer.chrome.com/extensions/match_patterns.html | ||
str = str.replace(/^http:\\\/\\\/\.\*/i, 'http:\\/\\/[^\/]*'); | ||
str = str.replace(/^https:\\\/\\\/\.\*/i, 'https:\\/\\/[^\/]*'); | ||
|
||
regexList.push(new RegExp('^' + str + '$', 'i')); | ||
} | ||
|
||
// console.log(regex_list); | ||
return regexList; | ||
} | ||
|
||
|
||
export function produceSquidRegexList(urlList) { | ||
const regexList = urls2regexs(urlList); | ||
const regexToExtractHttpsDomain = /^\^https:\\\/\\\/([^:]+)\\\//i; | ||
|
||
let str; | ||
const result = []; | ||
for (const regex of regexList) { | ||
str = regex.toString(); | ||
str = str.substring(1, str.length - 2); | ||
|
||
if (str.match(regexToExtractHttpsDomain)) { | ||
str = '^' + str.match(regexToExtractHttpsDomain)[1] + ':443'; | ||
} | ||
|
||
result.push(str); | ||
} | ||
|
||
return result; | ||
} |
Oops, something went wrong.