New proxy logic
1. create proxy file if not exist 2. create array from proxy file if not exist 3. if array is empty download proxy file 4. shift continue unless country is US 5. shift continue if proxy is slow 6. shift continue if proxy is blocked by Google 7. print successful proxy 8. save remaining proxies to file
This commit is contained in:
parent
062883c89f
commit
ed6092a005
63
apt-cyg
63
apt-cyg
|
@ -307,36 +307,51 @@ apt-search () {
|
||||||
}
|
}
|
||||||
|
|
||||||
proxy () {
|
proxy () {
|
||||||
local cn sd
|
local msg url dt pool px cn
|
||||||
cn=proxy.txt
|
msg=$1
|
||||||
|
url=$2
|
||||||
|
set --
|
||||||
|
dt=proxy.txt
|
||||||
cd /tmp
|
cd /tmp
|
||||||
if [ ! -s $cn ]
|
printf 'request %s... ' "$msg"
|
||||||
then
|
while :
|
||||||
wget -q txt.proxyspy.net/$cn
|
|
||||||
fi
|
|
||||||
while read px country
|
|
||||||
do
|
do
|
||||||
if [[ ! $country =~ US ]]
|
if (( ! $# ))
|
||||||
then
|
then
|
||||||
(( sd++ ))
|
touch $dt
|
||||||
|
mapfile -t pool < $dt
|
||||||
|
set -- "${pool[@]}"
|
||||||
|
fi
|
||||||
|
if (( ! $# ))
|
||||||
|
then
|
||||||
|
wget -q -O $dt txt.proxyspy.net/$dt
|
||||||
|
fi
|
||||||
|
read px cn <<< $1
|
||||||
|
if [[ ! $cn =~ US ]]
|
||||||
|
then
|
||||||
|
shift
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
if ! wget -q -T 1 -t 1 -O web.json -e http_proxy=$px "$1"
|
if ! wget -q -T 1 -t 1 -O web.json -e http_proxy=$px "$url"
|
||||||
then
|
then
|
||||||
(( sd++ ))
|
shift
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
if grep -q 200 web.json
|
case $(awk '/responseStatus/,$0=$2' RS='(, |})' web.json) in
|
||||||
then
|
200) # OK
|
||||||
break
|
break
|
||||||
else
|
;;
|
||||||
(( sd++ ))
|
400) # out of range start
|
||||||
fi
|
break
|
||||||
done < $cn
|
;;
|
||||||
if (( sd ))
|
403) # suspected terms of service abuse
|
||||||
then
|
shift
|
||||||
sed -i 1,${sd}d $cn
|
continue
|
||||||
fi
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
printf '%s\n' "$px"
|
||||||
|
printf '%s\n' "$@" > $dt
|
||||||
}
|
}
|
||||||
|
|
||||||
apt-searchall () {
|
apt-searchall () {
|
||||||
|
@ -347,14 +362,12 @@ apt-searchall () {
|
||||||
do
|
do
|
||||||
printf -v qs 'v=1.0&rsz=8&q="%s"+-"index of"+site:%s/%s' $pkg $ste $ARCH
|
printf -v qs 'v=1.0&rsz=8&q="%s"+-"index of"+site:%s/%s' $pkg $ste $ARCH
|
||||||
(( nof++ )) && echo
|
(( nof++ )) && echo
|
||||||
echo getting pages . . .
|
proxy pages "$api?$qs"
|
||||||
proxy "$api?$qs"
|
|
||||||
grep -q pages web.json || continue
|
grep -q pages web.json || continue
|
||||||
awk '$2 == "start" {print $4}' RS='[{,]' FS='"' web.json |
|
awk '$2 == "start" {print $4}' RS='[{,]' FS='"' web.json |
|
||||||
while read start
|
while read start
|
||||||
do
|
do
|
||||||
echo getting start $start . . . >&2
|
proxy "start $start" "$api?$qs&start=$start" >&2
|
||||||
proxy "$api?$qs&start=$start"
|
|
||||||
awk '$2 == "url" {print $4}' RS=, FS='"' web.json
|
awk '$2 == "url" {print $4}' RS=, FS='"' web.json
|
||||||
done > urls.txt
|
done > urls.txt
|
||||||
awk '!s[$7]++ {print $7}' FS=/ urls.txt
|
awk '!s[$7]++ {print $7}' FS=/ urls.txt
|
||||||
|
|
Loading…
Reference in New Issue