Skip to content

Commit

Permalink
implement socks5 proxy pool
Browse files Browse the repository at this point in the history
  • Loading branch information
akkuman committed Oct 18, 2021
1 parent a4d09b0 commit 6c32588
Show file tree
Hide file tree
Showing 7 changed files with 73 additions and 44 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
*.exe
.history
.vscode
.vscode
*.zip
32 changes: 15 additions & 17 deletions check.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@ import (
"time"
)

var maxRetry = 3

type IPInfo struct {
Status string `json:"status"`
Country string `json:"country"`
Expand Down Expand Up @@ -70,19 +68,19 @@ func StartCheckProxyAlive() {
}

func checkAlive() {
ProxyMap.Range(func(key, value interface{}) bool {
// check if proxy is valid, if check failed 3 times, it will not be checked again.
if failedCount, ok := value.(int); ok && failedCount < maxRetry {
go func() {
if CheckProxyAlive(fmt.Sprintf("socks5://%v", key)) {
fmt.Printf("%v 可用\n", key)
ProxyMap.Store(key, 0)
// return true
}
ProxyMap.Store(key, failedCount+1)
}()
return true
}
return true
})
proxies, err := QueryProxyURL()
if err != nil {
fmt.Printf("[!] query db error: %v\n", err)
}
for i := range proxies {
proxy := proxies[i]
go func() {
if CheckProxyAlive(proxy.URL) {
fmt.Printf("%v 可用\n", proxy.URL)
SetProxyURLAvail(proxy.URL)
} else {
AddProxyURLRetry(proxy.URL)
}
}()
}
}
13 changes: 3 additions & 10 deletions crawler.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,9 @@ import (
"encoding/json"
"fmt"
"net/http"
"sync"
"time"
)

var ProxyMap sync.Map
var mutex sync.Mutex
var crawlDone = make(chan struct{})

type fofaAPIResponse struct {
Expand All @@ -22,12 +19,8 @@ type fofaAPIResponse struct {
Size int `json:"size"`
}

func addProxyMap(addr interface{}) {
mutex.Lock()
defer mutex.Unlock()
if _, ok := ProxyMap.Load(addr); !ok {
ProxyMap.Store(addr, maxRetry-1)
}
func addProxyURL(url string) {
CreateProxyURL(url)
}

func RunCrawler(fofaApiKey, fofaEmail, rule string) (err error) {
Expand Down Expand Up @@ -58,7 +51,7 @@ func RunCrawler(fofaApiKey, fofaEmail, rule string) (err error) {
fmt.Printf("get %d host\n", len(res.Results))
for _, value := range res.Results {
host := value[0]
addProxyMap(host)
addProxyURL(fmt.Sprintf("socks5://%s", host))
}
crawlDone <- struct{}{}
return
Expand Down
34 changes: 31 additions & 3 deletions db.go
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
package rotateproxy

import (
"fmt"

"gorm.io/driver/sqlite"
"gorm.io/gorm"
"gorm.io/gorm/logger"
)

var DB *gorm.DB
Expand All @@ -14,6 +17,10 @@ type ProxyURL struct {
Available bool `gorm:"column:available"`
}

func (ProxyURL) TableName() string {
return "proxy_urls"
}

func checkErr(err error) {
if err != nil {
panic(err)
Expand All @@ -22,17 +29,20 @@ func checkErr(err error) {

func init() {
var err error
DB, err = gorm.Open(sqlite.Open("db.db"), &gorm.Config{})
DB, err = gorm.Open(sqlite.Open("db.db"), &gorm.Config{
Logger: logger.Discard,
})
checkErr(err)
DB.AutoMigrate(&ProxyURL{})
}

func CreateProxyURL(url string) {
DB.Create(&ProxyURL{
func CreateProxyURL(url string) error {
tx := DB.Create(&ProxyURL{
URL: url,
Retry: 0,
Available: false,
})
return tx.Error
}

func QueryAvailProxyURL() (proxyURLs []ProxyURL, err error) {
Expand All @@ -41,7 +51,25 @@ func QueryAvailProxyURL() (proxyURLs []ProxyURL, err error) {
return
}

func QueryProxyURL() (proxyURLs []ProxyURL, err error) {
tx := DB.Find(&proxyURLs)
err = tx.Error
return
}

func SetProxyURLAvail(url string) error {
tx := DB.Model(&ProxyURL{}).Where("url = ?", url).Updates(ProxyURL{Retry: 0, Available: true})
return tx.Error
}

func AddProxyURLRetry(url string) error {
tx := DB.Model(&ProxyURL{}).Where("url = ?", url).Update("retry", gorm.Expr("retry + 1"))
return tx.Error
}

func RandomProxyURL() (string, error) {
var proxyURL ProxyURL
tx := DB.Raw(fmt.Sprintf("SELECT * FROM %s WHERE available = 1 ORDER BY RANDOM() LIMIT 1;", proxyURL.TableName())).Scan(&proxyURL)
fmt.Println(proxyURL)
return proxyURL.URL, tx.Error
}
2 changes: 2 additions & 0 deletions makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
build:
cd cmd/rotateproxy && go build -trimpath -ldflags="-s -w"
15 changes: 12 additions & 3 deletions traffic_redirect.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"fmt"
"io"
"net"
"strings"
"time"
)

Expand Down Expand Up @@ -40,7 +41,7 @@ func (c *RedirectClient) Serve() error {
if err != nil {
return err
}
for SyncMapIsBlank(ProxyMap) {
for IsProxyURLBlank() {
fmt.Println("[*] waiting for crawl proxy...")
time.Sleep(3 * time.Second)
}
Expand All @@ -55,8 +56,16 @@ func (c *RedirectClient) Serve() error {
}

func (c *RedirectClient) HandleConn(conn net.Conn) {
key, _ := RandomSyncMap(ProxyMap)
cc, err := net.DialTimeout("tcp", key.(string), 20*time.Second)
key, err := RandomProxyURL()
if err != nil {
errConn := closeConn(conn)
if errConn != nil {
fmt.Printf("[!] close connect error: %v\n", errConn)
}
return
}
key = strings.TrimPrefix(key, "socks5://")
cc, err := net.DialTimeout("tcp", key, 20*time.Second)
if err != nil {
fmt.Printf("[!] cannot connect to %v\n", key)
}
Expand Down
18 changes: 8 additions & 10 deletions util.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package rotateproxy

import (
"fmt"
"math/rand"
"sync"
"time"
Expand All @@ -22,14 +23,11 @@ func RandomSyncMap(sMap sync.Map) (key, value interface{}) {
return element[0], element[1]
}

func SyncMapIsBlank(sMap sync.Map) bool {
isBlank := true
sMap.Range(func(key, value interface{}) bool {
if value.(int) == 0 {
isBlank = false
return false
}
return true
})
return isBlank
func IsProxyURLBlank() bool {
proxies, err := QueryAvailProxyURL()
if err != nil {
fmt.Printf("[!] Error: %v\n", err)
return false
}
return len(proxies) == 0
}

0 comments on commit 6c32588

Please sign in to comment.