2025-09-20 09:35:50 +02:00
|
|
|
|
// Package service provides business logic services for the 3x-ui web panel,
|
|
|
|
|
|
// including inbound/outbound management, user administration, settings, and Xray integration.
|
2023-02-09 22:48:06 +03:30
|
|
|
|
package service
|
|
|
|
|
|
|
|
|
|
|
|
import (
|
2026-05-09 17:38:48 +02:00
|
|
|
|
"context"
|
2023-02-18 16:07:32 +03:30
|
|
|
|
"encoding/json"
|
2023-02-09 22:48:06 +03:30
|
|
|
|
"fmt"
|
2025-05-06 19:57:17 +03:30
|
|
|
|
"sort"
|
2024-04-05 12:24:18 +03:00
|
|
|
|
"strconv"
|
2023-04-25 18:43:37 +03:30
|
|
|
|
"strings"
|
2023-02-09 22:48:06 +03:30
|
|
|
|
"time"
|
2024-03-11 01:01:24 +03:30
|
|
|
|
|
2026-04-23 16:19:07 +03:00
|
|
|
|
"github.com/google/uuid"
|
2026-05-10 02:13:42 +02:00
|
|
|
|
"github.com/mhsanaei/3x-ui/v3/database"
|
|
|
|
|
|
"github.com/mhsanaei/3x-ui/v3/database/model"
|
|
|
|
|
|
"github.com/mhsanaei/3x-ui/v3/logger"
|
|
|
|
|
|
"github.com/mhsanaei/3x-ui/v3/util/common"
|
|
|
|
|
|
"github.com/mhsanaei/3x-ui/v3/web/runtime"
|
|
|
|
|
|
"github.com/mhsanaei/3x-ui/v3/xray"
|
2023-02-09 22:48:06 +03:30
|
|
|
|
|
|
|
|
|
|
"gorm.io/gorm"
|
2026-05-09 17:38:48 +02:00
|
|
|
|
"gorm.io/gorm/clause"
|
2023-02-09 22:48:06 +03:30
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
type InboundService struct {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
xrayApi xray.XrayAPI
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
func (s *InboundService) runtimeFor(ib *model.Inbound) (runtime.Runtime, error) {
|
|
|
|
|
|
mgr := runtime.GetManager()
|
|
|
|
|
|
if mgr == nil {
|
|
|
|
|
|
return nil, fmt.Errorf("runtime manager not initialised")
|
|
|
|
|
|
}
|
|
|
|
|
|
return mgr.RuntimeFor(ib.NodeID)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-04-23 16:19:07 +03:00
|
|
|
|
type CopyClientsResult struct {
|
|
|
|
|
|
Added []string `json:"added"`
|
|
|
|
|
|
Skipped []string `json:"skipped"`
|
|
|
|
|
|
Errors []string `json:"errors"`
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
// enrichClientStats parses each inbound's clients once, fills in the
|
|
|
|
|
|
// UUID/SubId fields on the preloaded ClientStats, and tops up rows owned by
|
|
|
|
|
|
// a sibling inbound (shared-email mode — the row is keyed on email so it
|
|
|
|
|
|
// only preloads on its owning inbound).
|
|
|
|
|
|
func (s *InboundService) enrichClientStats(db *gorm.DB, inbounds []*model.Inbound) {
|
|
|
|
|
|
if len(inbounds) == 0 {
|
|
|
|
|
|
return
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
clientsByInbound := make([][]model.Client, len(inbounds))
|
|
|
|
|
|
seenByInbound := make([]map[string]struct{}, len(inbounds))
|
|
|
|
|
|
missing := make(map[string]struct{})
|
|
|
|
|
|
for i, inbound := range inbounds {
|
2025-09-23 11:52:40 +02:00
|
|
|
|
clients, _ := s.GetClients(inbound)
|
2026-05-09 17:38:48 +02:00
|
|
|
|
clientsByInbound[i] = clients
|
|
|
|
|
|
seen := make(map[string]struct{}, len(inbound.ClientStats))
|
|
|
|
|
|
for _, st := range inbound.ClientStats {
|
|
|
|
|
|
if st.Email != "" {
|
|
|
|
|
|
seen[strings.ToLower(st.Email)] = struct{}{}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
seenByInbound[i] = seen
|
|
|
|
|
|
for _, c := range clients {
|
|
|
|
|
|
if c.Email == "" {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
if _, ok := seen[strings.ToLower(c.Email)]; !ok {
|
|
|
|
|
|
missing[c.Email] = struct{}{}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
if len(missing) > 0 {
|
|
|
|
|
|
emails := make([]string, 0, len(missing))
|
|
|
|
|
|
for e := range missing {
|
|
|
|
|
|
emails = append(emails, e)
|
|
|
|
|
|
}
|
|
|
|
|
|
var extra []xray.ClientTraffic
|
|
|
|
|
|
if err := db.Model(xray.ClientTraffic{}).Where("email IN ?", emails).Find(&extra).Error; err != nil {
|
|
|
|
|
|
logger.Warning("enrichClientStats:", err)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
byEmail := make(map[string]xray.ClientTraffic, len(extra))
|
|
|
|
|
|
for _, st := range extra {
|
|
|
|
|
|
byEmail[strings.ToLower(st.Email)] = st
|
|
|
|
|
|
}
|
|
|
|
|
|
for i, inbound := range inbounds {
|
|
|
|
|
|
for _, c := range clientsByInbound[i] {
|
|
|
|
|
|
if c.Email == "" {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
key := strings.ToLower(c.Email)
|
|
|
|
|
|
if _, ok := seenByInbound[i][key]; ok {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
if st, ok := byEmail[key]; ok {
|
|
|
|
|
|
inbound.ClientStats = append(inbound.ClientStats, st)
|
|
|
|
|
|
seenByInbound[i][key] = struct{}{}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
for i, inbound := range inbounds {
|
|
|
|
|
|
clients := clientsByInbound[i]
|
2025-09-23 11:52:40 +02:00
|
|
|
|
if len(clients) == 0 || len(inbound.ClientStats) == 0 {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
cMap := make(map[string]model.Client, len(clients))
|
|
|
|
|
|
for _, c := range clients {
|
|
|
|
|
|
cMap[strings.ToLower(c.Email)] = c
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
for j := range inbound.ClientStats {
|
|
|
|
|
|
email := strings.ToLower(inbound.ClientStats[j].Email)
|
2025-09-23 11:52:40 +02:00
|
|
|
|
if c, ok := cMap[email]; ok {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
inbound.ClientStats[j].UUID = c.ID
|
|
|
|
|
|
inbound.ClientStats[j].SubId = c.SubID
|
2025-09-23 11:52:40 +02:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// GetInbounds retrieves all inbounds for a specific user with client stats.
|
|
|
|
|
|
func (s *InboundService) GetInbounds(userId int) ([]*model.Inbound, error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
var inbounds []*model.Inbound
|
|
|
|
|
|
err := db.Model(model.Inbound{}).Preload("ClientStats").Where("user_id = ?", userId).Find(&inbounds).Error
|
|
|
|
|
|
if err != nil && err != gorm.ErrRecordNotFound {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
s.enrichClientStats(db, inbounds)
|
2023-02-09 22:48:06 +03:30
|
|
|
|
return inbounds, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
// GetAllInbounds retrieves all inbounds with client stats.
|
2023-02-09 22:48:06 +03:30
|
|
|
|
func (s *InboundService) GetAllInbounds() ([]*model.Inbound, error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
var inbounds []*model.Inbound
|
|
|
|
|
|
err := db.Model(model.Inbound{}).Preload("ClientStats").Find(&inbounds).Error
|
|
|
|
|
|
if err != nil && err != gorm.ErrRecordNotFound {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
s.enrichClientStats(db, inbounds)
|
2023-02-09 22:48:06 +03:30
|
|
|
|
return inbounds, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-09-16 09:24:32 +02:00
|
|
|
|
func (s *InboundService) GetInboundsByTrafficReset(period string) ([]*model.Inbound, error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
var inbounds []*model.Inbound
|
|
|
|
|
|
err := db.Model(model.Inbound{}).Where("traffic_reset = ?", period).Find(&inbounds).Error
|
|
|
|
|
|
if err != nil && err != gorm.ErrRecordNotFound {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
return inbounds, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-05-22 18:06:34 +03:30
|
|
|
|
func (s *InboundService) GetClients(inbound *model.Inbound) ([]model.Client, error) {
|
2023-02-09 22:48:06 +03:30
|
|
|
|
settings := map[string][]model.Client{}
|
|
|
|
|
|
json.Unmarshal([]byte(inbound.Settings), &settings)
|
|
|
|
|
|
if settings == nil {
|
2023-03-17 19:37:49 +03:30
|
|
|
|
return nil, fmt.Errorf("setting is null")
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
clients := settings["clients"]
|
|
|
|
|
|
if clients == nil {
|
|
|
|
|
|
return nil, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
return clients, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-18 21:34:06 +03:30
|
|
|
|
func (s *InboundService) getAllEmails() ([]string, error) {
|
2023-02-09 22:48:06 +03:30
|
|
|
|
db := database.GetDB()
|
2023-04-18 21:34:06 +03:30
|
|
|
|
var emails []string
|
|
|
|
|
|
err := db.Raw(`
|
2026-05-09 17:38:48 +02:00
|
|
|
|
SELECT DISTINCT JSON_EXTRACT(client.value, '$.email')
|
2023-04-18 21:34:06 +03:30
|
|
|
|
FROM inbounds,
|
|
|
|
|
|
JSON_EACH(JSON_EXTRACT(inbounds.settings, '$.clients')) AS client
|
|
|
|
|
|
`).Scan(&emails).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2023-04-18 21:34:06 +03:30
|
|
|
|
return emails, nil
|
|
|
|
|
|
}
|
2023-02-09 22:48:06 +03:30
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
// getAllEmailSubIDs returns email→subId. An email seen with two different
|
|
|
|
|
|
// non-empty subIds is locked (mapped to "") so neither identity can claim it.
|
|
|
|
|
|
func (s *InboundService) getAllEmailSubIDs() (map[string]string, error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
var rows []struct {
|
|
|
|
|
|
Email string
|
|
|
|
|
|
SubID string
|
2023-04-18 21:34:06 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
err := db.Raw(`
|
|
|
|
|
|
SELECT JSON_EXTRACT(client.value, '$.email') AS email,
|
|
|
|
|
|
JSON_EXTRACT(client.value, '$.subId') AS sub_id
|
|
|
|
|
|
FROM inbounds,
|
|
|
|
|
|
JSON_EACH(JSON_EXTRACT(inbounds.settings, '$.clients')) AS client
|
|
|
|
|
|
`).Scan(&rows).Error
|
2023-04-18 21:34:06 +03:30
|
|
|
|
if err != nil {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
return nil, err
|
2023-04-18 21:34:06 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
result := make(map[string]string, len(rows))
|
|
|
|
|
|
for _, r := range rows {
|
|
|
|
|
|
email := strings.ToLower(strings.Trim(r.Email, "\""))
|
|
|
|
|
|
if email == "" {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
subID := strings.Trim(r.SubID, "\"")
|
|
|
|
|
|
if existing, ok := result[email]; ok {
|
|
|
|
|
|
if existing != subID {
|
|
|
|
|
|
result[email] = ""
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
continue
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
result[email] = subID
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
return result, nil
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
func lowerAll(in []string) []string {
|
|
|
|
|
|
out := make([]string, len(in))
|
|
|
|
|
|
for i, s := range in {
|
|
|
|
|
|
out[i] = strings.ToLower(s)
|
|
|
|
|
|
}
|
|
|
|
|
|
return out
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// emailUsedByOtherInbounds reports whether email lives in any inbound other
|
|
|
|
|
|
// than exceptInboundId. Empty email returns false.
|
|
|
|
|
|
func (s *InboundService) emailUsedByOtherInbounds(email string, exceptInboundId int) (bool, error) {
|
|
|
|
|
|
if email == "" {
|
|
|
|
|
|
return false, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
var count int64
|
|
|
|
|
|
err := db.Raw(`
|
|
|
|
|
|
SELECT COUNT(*)
|
|
|
|
|
|
FROM inbounds,
|
|
|
|
|
|
JSON_EACH(JSON_EXTRACT(inbounds.settings, '$.clients')) AS client
|
|
|
|
|
|
WHERE inbounds.id != ?
|
|
|
|
|
|
AND LOWER(JSON_EXTRACT(client.value, '$.email')) = LOWER(?)
|
|
|
|
|
|
`, exceptInboundId, email).Scan(&count).Error
|
2023-02-09 22:48:06 +03:30
|
|
|
|
if err != nil {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
return false, err
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
return count > 0, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// checkEmailsExistForClients validates a batch of incoming clients. An email
|
|
|
|
|
|
// collides only when the existing holder has a different (or empty) subId —
|
|
|
|
|
|
// matching non-empty subIds let multiple inbounds share one identity.
|
|
|
|
|
|
func (s *InboundService) checkEmailsExistForClients(clients []model.Client) (string, error) {
|
|
|
|
|
|
emailSubIDs, err := s.getAllEmailSubIDs()
|
2023-04-18 21:34:06 +03:30
|
|
|
|
if err != nil {
|
|
|
|
|
|
return "", err
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
seen := make(map[string]string, len(clients))
|
2023-02-09 22:48:06 +03:30
|
|
|
|
for _, client := range clients {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if client.Email == "" {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
key := strings.ToLower(client.Email)
|
|
|
|
|
|
// Within the same payload, the same email must carry the same subId;
|
|
|
|
|
|
// otherwise we would silently merge two distinct identities.
|
|
|
|
|
|
if prev, ok := seen[key]; ok {
|
|
|
|
|
|
if prev != client.SubID || client.SubID == "" {
|
2023-04-18 21:34:06 +03:30
|
|
|
|
return client.Email, nil
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
seen[key] = client.SubID
|
|
|
|
|
|
if existingSub, ok := emailSubIDs[key]; ok {
|
|
|
|
|
|
if client.SubID == "" || existingSub == "" || existingSub != client.SubID {
|
2023-02-09 22:48:06 +03:30
|
|
|
|
return client.Email, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2023-04-18 21:34:06 +03:30
|
|
|
|
return "", nil
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2025-09-20 09:35:50 +02:00
|
|
|
|
// AddInbound creates a new inbound configuration.
|
|
|
|
|
|
// It validates port uniqueness, client email uniqueness, and required fields,
|
|
|
|
|
|
// then saves the inbound to the database and optionally adds it to the running Xray instance.
|
|
|
|
|
|
// Returns the created inbound, whether Xray needs restart, and any error.
|
2023-07-18 02:40:22 +03:30
|
|
|
|
func (s *InboundService) AddInbound(inbound *model.Inbound) (*model.Inbound, bool, error) {
|
2026-05-06 12:41:21 +03:00
|
|
|
|
exist, err := s.checkPortConflict(inbound, 0)
|
2023-02-09 22:48:06 +03:30
|
|
|
|
if err != nil {
|
2023-07-18 02:40:22 +03:30
|
|
|
|
return inbound, false, err
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
if exist {
|
2023-07-18 02:40:22 +03:30
|
|
|
|
return inbound, false, common.NewError("Port already exists:", inbound.Port)
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-11 12:51:45 +02:00
|
|
|
|
inbound.Tag, err = s.resolveInboundTag(inbound, 0)
|
2026-05-06 12:41:21 +03:00
|
|
|
|
if err != nil {
|
|
|
|
|
|
return inbound, false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
clients, err := s.GetClients(inbound)
|
2023-02-09 22:48:06 +03:30
|
|
|
|
if err != nil {
|
2023-07-18 02:40:22 +03:30
|
|
|
|
return inbound, false, err
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
existEmail, err := s.checkEmailsExistForClients(clients)
|
2023-03-17 19:37:49 +03:30
|
|
|
|
if err != nil {
|
2023-07-18 02:40:22 +03:30
|
|
|
|
return inbound, false, err
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if existEmail != "" {
|
|
|
|
|
|
return inbound, false, common.NewError("Duplicate email:", existEmail)
|
|
|
|
|
|
}
|
2023-03-17 19:37:49 +03:30
|
|
|
|
|
2025-08-27 21:00:49 +03:30
|
|
|
|
// Ensure created_at and updated_at on clients in settings
|
|
|
|
|
|
if len(clients) > 0 {
|
|
|
|
|
|
var settings map[string]any
|
|
|
|
|
|
if err2 := json.Unmarshal([]byte(inbound.Settings), &settings); err2 == nil && settings != nil {
|
|
|
|
|
|
now := time.Now().Unix() * 1000
|
|
|
|
|
|
updatedClients := make([]model.Client, 0, len(clients))
|
|
|
|
|
|
for _, c := range clients {
|
|
|
|
|
|
if c.CreatedAt == 0 {
|
|
|
|
|
|
c.CreatedAt = now
|
|
|
|
|
|
}
|
|
|
|
|
|
c.UpdatedAt = now
|
|
|
|
|
|
updatedClients = append(updatedClients, c)
|
|
|
|
|
|
}
|
|
|
|
|
|
settings["clients"] = updatedClients
|
|
|
|
|
|
if bs, err3 := json.MarshalIndent(settings, "", " "); err3 == nil {
|
|
|
|
|
|
inbound.Settings = string(bs)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
logger.Debug("Unable to marshal inbound settings with timestamps:", err3)
|
|
|
|
|
|
}
|
|
|
|
|
|
} else if err2 != nil {
|
|
|
|
|
|
logger.Debug("Unable to parse inbound settings for timestamps:", err2)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-04-01 12:08:25 +04:30
|
|
|
|
// Secure client ID
|
|
|
|
|
|
for _, client := range clients {
|
2025-08-17 13:37:49 +02:00
|
|
|
|
switch inbound.Protocol {
|
|
|
|
|
|
case "trojan":
|
2024-04-01 12:08:25 +04:30
|
|
|
|
if client.Password == "" {
|
|
|
|
|
|
return inbound, false, common.NewError("empty client ID")
|
|
|
|
|
|
}
|
2025-08-17 13:37:49 +02:00
|
|
|
|
case "shadowsocks":
|
2024-04-01 12:08:25 +04:30
|
|
|
|
if client.Email == "" {
|
|
|
|
|
|
return inbound, false, common.NewError("empty client ID")
|
|
|
|
|
|
}
|
2026-04-22 18:55:09 +03:00
|
|
|
|
case "hysteria", "hysteria2":
|
2026-04-20 16:05:27 +02:00
|
|
|
|
if client.Auth == "" {
|
|
|
|
|
|
return inbound, false, common.NewError("empty client ID")
|
|
|
|
|
|
}
|
2025-08-17 13:37:49 +02:00
|
|
|
|
default:
|
2024-04-01 12:08:25 +04:30
|
|
|
|
if client.ID == "" {
|
|
|
|
|
|
return inbound, false, common.NewError("empty client ID")
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-02-09 22:48:06 +03:30
|
|
|
|
db := database.GetDB()
|
2023-06-05 00:32:19 +03:30
|
|
|
|
tx := db.Begin()
|
|
|
|
|
|
defer func() {
|
|
|
|
|
|
if err == nil {
|
|
|
|
|
|
tx.Commit()
|
|
|
|
|
|
} else {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
}
|
|
|
|
|
|
}()
|
2023-02-09 22:48:06 +03:30
|
|
|
|
|
2023-06-05 00:32:19 +03:30
|
|
|
|
err = tx.Save(inbound).Error
|
2023-02-09 22:48:06 +03:30
|
|
|
|
if err == nil {
|
2023-12-08 20:08:44 +01:00
|
|
|
|
if len(inbound.ClientStats) == 0 {
|
|
|
|
|
|
for _, client := range clients {
|
|
|
|
|
|
s.AddClientStat(tx, inbound.Id, &client)
|
|
|
|
|
|
}
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
2023-12-08 20:08:44 +01:00
|
|
|
|
} else {
|
|
|
|
|
|
return inbound, false, err
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2023-07-18 02:40:22 +03:30
|
|
|
|
|
|
|
|
|
|
needRestart := false
|
|
|
|
|
|
if inbound.Enable {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
rt, rterr := s.runtimeFor(inbound)
|
|
|
|
|
|
if rterr != nil {
|
|
|
|
|
|
err = rterr
|
|
|
|
|
|
return inbound, false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if err1 := rt.AddInbound(context.Background(), inbound); err1 == nil {
|
|
|
|
|
|
logger.Debug("New inbound added on", rt.Name(), ":", inbound.Tag)
|
2023-07-18 02:40:22 +03:30
|
|
|
|
} else {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
logger.Debug("Unable to add inbound on", rt.Name(), ":", err1)
|
|
|
|
|
|
if inbound.NodeID != nil {
|
|
|
|
|
|
err = err1
|
|
|
|
|
|
return inbound, false, err
|
|
|
|
|
|
}
|
2023-07-18 02:40:22 +03:30
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return inbound, needRestart, err
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-07-18 02:40:22 +03:30
|
|
|
|
func (s *InboundService) DelInbound(id int) (bool, error) {
|
2023-02-09 22:48:06 +03:30
|
|
|
|
db := database.GetDB()
|
2023-07-18 02:40:22 +03:30
|
|
|
|
|
|
|
|
|
|
needRestart := false
|
2026-05-09 17:38:48 +02:00
|
|
|
|
var ib model.Inbound
|
|
|
|
|
|
loadErr := db.Model(model.Inbound{}).Where("id = ? and enable = ?", id, true).First(&ib).Error
|
|
|
|
|
|
if loadErr == nil {
|
|
|
|
|
|
rt, rterr := s.runtimeFor(&ib)
|
|
|
|
|
|
if rterr != nil {
|
|
|
|
|
|
logger.Warning("DelInbound: runtime lookup failed, deleting central row anyway:", rterr)
|
|
|
|
|
|
if ib.NodeID == nil {
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
|
|
|
|
|
} else if err1 := rt.DelInbound(context.Background(), &ib); err1 == nil {
|
|
|
|
|
|
logger.Debug("Inbound deleted on", rt.Name(), ":", ib.Tag)
|
2023-07-18 02:40:22 +03:30
|
|
|
|
} else {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
logger.Warning("DelInbound on", rt.Name(), "failed, deleting central row anyway:", err1)
|
|
|
|
|
|
if ib.NodeID == nil {
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
2023-07-18 02:40:22 +03:30
|
|
|
|
}
|
|
|
|
|
|
} else {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
logger.Debug("No enabled inbound found to remove by api, id:", id)
|
2023-07-18 02:40:22 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Delete client traffics of inbounds
|
2023-03-17 19:37:49 +03:30
|
|
|
|
err := db.Where("inbound_id = ?", id).Delete(xray.ClientTraffic{}).Error
|
|
|
|
|
|
if err != nil {
|
2023-07-18 02:40:22 +03:30
|
|
|
|
return false, err
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
2023-03-17 18:51:43 +01:00
|
|
|
|
inbound, err := s.GetInbound(id)
|
|
|
|
|
|
if err != nil {
|
2023-07-18 02:40:22 +03:30
|
|
|
|
return false, err
|
2023-03-17 18:51:43 +01:00
|
|
|
|
}
|
2023-05-22 18:06:34 +03:30
|
|
|
|
clients, err := s.GetClients(inbound)
|
2023-03-17 18:51:43 +01:00
|
|
|
|
if err != nil {
|
2023-07-18 02:40:22 +03:30
|
|
|
|
return false, err
|
2023-03-17 18:51:43 +01:00
|
|
|
|
}
|
2026-05-05 18:27:49 +03:00
|
|
|
|
// Bulk-delete client IPs for every email in this inbound. The previous
|
|
|
|
|
|
// per-client loop fired one DELETE per row — at 7k+ clients that meant
|
|
|
|
|
|
// thousands of synchronous SQL roundtrips and a multi-second freeze.
|
|
|
|
|
|
// Chunked to stay under SQLite's bind-variable limit on huge inbounds.
|
|
|
|
|
|
if len(clients) > 0 {
|
|
|
|
|
|
emails := make([]string, 0, len(clients))
|
|
|
|
|
|
for i := range clients {
|
|
|
|
|
|
if clients[i].Email != "" {
|
|
|
|
|
|
emails = append(emails, clients[i].Email)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
for _, batch := range chunkStrings(uniqueNonEmptyStrings(emails), sqliteMaxVars) {
|
|
|
|
|
|
if err := db.Where("client_email IN ?", batch).
|
|
|
|
|
|
Delete(model.InboundClientIps{}).Error; err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
2023-03-17 18:51:43 +01:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2023-07-18 02:40:22 +03:30
|
|
|
|
|
|
|
|
|
|
return needRestart, db.Delete(model.Inbound{}, id).Error
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) GetInbound(id int) (*model.Inbound, error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
inbound := &model.Inbound{}
|
|
|
|
|
|
err := db.Model(model.Inbound{}).First(inbound, id).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
return inbound, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-05 18:27:49 +03:00
|
|
|
|
// SetInboundEnable toggles only the enable flag of an inbound, without
|
|
|
|
|
|
// rewriting the (potentially multi-MB) settings JSON. Used by the UI's
|
|
|
|
|
|
// per-row enable switch — for inbounds with thousands of clients the full
|
|
|
|
|
|
// UpdateInbound path is an order of magnitude too slow for an interactive
|
|
|
|
|
|
// toggle (parses + reserialises every client, runs O(N) traffic diff).
|
|
|
|
|
|
//
|
|
|
|
|
|
// Returns (needRestart, error). needRestart is true when the xray runtime
|
|
|
|
|
|
// could not be re-synced from the cached config and a full restart is
|
|
|
|
|
|
// required to pick up the change.
|
|
|
|
|
|
func (s *InboundService) SetInboundEnable(id int, enable bool) (bool, error) {
|
|
|
|
|
|
inbound, err := s.GetInbound(id)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if inbound.Enable == enable {
|
|
|
|
|
|
return false, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
if err := db.Model(model.Inbound{}).Where("id = ?", id).
|
|
|
|
|
|
Update("enable", enable).Error; err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
inbound.Enable = enable
|
|
|
|
|
|
|
|
|
|
|
|
needRestart := false
|
2026-05-09 17:38:48 +02:00
|
|
|
|
rt, rterr := s.runtimeFor(inbound)
|
|
|
|
|
|
if rterr != nil {
|
|
|
|
|
|
if inbound.NodeID != nil {
|
|
|
|
|
|
return false, rterr
|
|
|
|
|
|
}
|
|
|
|
|
|
return true, nil
|
|
|
|
|
|
}
|
2026-05-05 18:27:49 +03:00
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if err := rt.DelInbound(context.Background(), inbound); err != nil &&
|
2026-05-05 18:27:49 +03:00
|
|
|
|
!strings.Contains(err.Error(), "not found") {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
logger.Debug("SetInboundEnable: DelInbound on", rt.Name(), "failed:", err)
|
2026-05-05 18:27:49 +03:00
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
|
|
|
|
|
if !enable {
|
|
|
|
|
|
return needRestart, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
addTarget := inbound
|
|
|
|
|
|
if inbound.NodeID == nil {
|
|
|
|
|
|
runtimeInbound, err := s.buildRuntimeInboundForAPI(db, inbound)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
logger.Debug("SetInboundEnable: build runtime config failed:", err)
|
|
|
|
|
|
return true, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
addTarget = runtimeInbound
|
2026-05-05 18:27:49 +03:00
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if err := rt.AddInbound(context.Background(), addTarget); err != nil {
|
|
|
|
|
|
logger.Debug("SetInboundEnable: AddInbound on", rt.Name(), "failed:", err)
|
|
|
|
|
|
if inbound.NodeID != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
2026-05-05 18:27:49 +03:00
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
|
|
|
|
|
return needRestart, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-07-18 02:40:22 +03:30
|
|
|
|
func (s *InboundService) UpdateInbound(inbound *model.Inbound) (*model.Inbound, bool, error) {
|
2026-05-06 12:41:21 +03:00
|
|
|
|
exist, err := s.checkPortConflict(inbound, inbound.Id)
|
2023-02-09 22:48:06 +03:30
|
|
|
|
if err != nil {
|
2023-07-18 02:40:22 +03:30
|
|
|
|
return inbound, false, err
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
if exist {
|
2023-07-18 02:40:22 +03:30
|
|
|
|
return inbound, false, common.NewError("Port already exists:", inbound.Port)
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2023-02-18 16:07:32 +03:30
|
|
|
|
|
2023-02-09 22:48:06 +03:30
|
|
|
|
oldInbound, err := s.GetInbound(inbound.Id)
|
|
|
|
|
|
if err != nil {
|
2023-07-18 02:40:22 +03:30
|
|
|
|
return inbound, false, err
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2023-06-05 00:32:19 +03:30
|
|
|
|
|
2023-07-18 02:40:22 +03:30
|
|
|
|
tag := oldInbound.Tag
|
|
|
|
|
|
|
2023-12-04 19:20:16 +01:00
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
tx := db.Begin()
|
|
|
|
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
} else {
|
|
|
|
|
|
tx.Commit()
|
|
|
|
|
|
}
|
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
|
|
err = s.updateClientTraffics(tx, oldInbound, inbound)
|
2023-06-05 00:32:19 +03:30
|
|
|
|
if err != nil {
|
2023-07-18 02:40:22 +03:30
|
|
|
|
return inbound, false, err
|
2023-06-05 00:32:19 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2025-08-27 21:00:49 +03:30
|
|
|
|
// Ensure created_at and updated_at exist in inbound.Settings clients
|
|
|
|
|
|
{
|
|
|
|
|
|
var oldSettings map[string]any
|
|
|
|
|
|
_ = json.Unmarshal([]byte(oldInbound.Settings), &oldSettings)
|
|
|
|
|
|
emailToCreated := map[string]int64{}
|
2025-09-02 02:27:44 +03:30
|
|
|
|
emailToUpdated := map[string]int64{}
|
2025-08-27 21:00:49 +03:30
|
|
|
|
if oldSettings != nil {
|
|
|
|
|
|
if oc, ok := oldSettings["clients"].([]any); ok {
|
|
|
|
|
|
for _, it := range oc {
|
|
|
|
|
|
if m, ok2 := it.(map[string]any); ok2 {
|
|
|
|
|
|
if email, ok3 := m["email"].(string); ok3 {
|
|
|
|
|
|
switch v := m["created_at"].(type) {
|
|
|
|
|
|
case float64:
|
|
|
|
|
|
emailToCreated[email] = int64(v)
|
|
|
|
|
|
case int64:
|
|
|
|
|
|
emailToCreated[email] = v
|
|
|
|
|
|
}
|
2025-09-02 02:27:44 +03:30
|
|
|
|
switch v := m["updated_at"].(type) {
|
|
|
|
|
|
case float64:
|
|
|
|
|
|
emailToUpdated[email] = int64(v)
|
|
|
|
|
|
case int64:
|
|
|
|
|
|
emailToUpdated[email] = v
|
|
|
|
|
|
}
|
2025-08-27 21:00:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
var newSettings map[string]any
|
|
|
|
|
|
if err2 := json.Unmarshal([]byte(inbound.Settings), &newSettings); err2 == nil && newSettings != nil {
|
|
|
|
|
|
now := time.Now().Unix() * 1000
|
|
|
|
|
|
if nSlice, ok := newSettings["clients"].([]any); ok {
|
|
|
|
|
|
for i := range nSlice {
|
|
|
|
|
|
if m, ok2 := nSlice[i].(map[string]any); ok2 {
|
|
|
|
|
|
email, _ := m["email"].(string)
|
|
|
|
|
|
if _, ok3 := m["created_at"]; !ok3 {
|
|
|
|
|
|
if v, ok4 := emailToCreated[email]; ok4 && v > 0 {
|
|
|
|
|
|
m["created_at"] = v
|
|
|
|
|
|
} else {
|
|
|
|
|
|
m["created_at"] = now
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-09-02 02:27:44 +03:30
|
|
|
|
// Preserve client's updated_at if present; do not bump on parent inbound update
|
|
|
|
|
|
if _, hasUpdated := m["updated_at"]; !hasUpdated {
|
|
|
|
|
|
if v, ok4 := emailToUpdated[email]; ok4 && v > 0 {
|
|
|
|
|
|
m["updated_at"] = v
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-08-27 21:00:49 +03:30
|
|
|
|
nSlice[i] = m
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
newSettings["clients"] = nSlice
|
|
|
|
|
|
if bs, err3 := json.MarshalIndent(newSettings, "", " "); err3 == nil {
|
|
|
|
|
|
inbound.Settings = string(bs)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-02-09 22:48:06 +03:30
|
|
|
|
oldInbound.Total = inbound.Total
|
|
|
|
|
|
oldInbound.Remark = inbound.Remark
|
|
|
|
|
|
oldInbound.Enable = inbound.Enable
|
|
|
|
|
|
oldInbound.ExpiryTime = inbound.ExpiryTime
|
2025-09-16 09:24:32 +02:00
|
|
|
|
oldInbound.TrafficReset = inbound.TrafficReset
|
2023-02-09 22:48:06 +03:30
|
|
|
|
oldInbound.Listen = inbound.Listen
|
|
|
|
|
|
oldInbound.Port = inbound.Port
|
|
|
|
|
|
oldInbound.Protocol = inbound.Protocol
|
|
|
|
|
|
oldInbound.Settings = inbound.Settings
|
|
|
|
|
|
oldInbound.StreamSettings = inbound.StreamSettings
|
|
|
|
|
|
oldInbound.Sniffing = inbound.Sniffing
|
2026-05-11 12:51:45 +02:00
|
|
|
|
oldInbound.Tag, err = s.resolveInboundTag(inbound, inbound.Id)
|
2026-05-06 12:41:21 +03:00
|
|
|
|
if err != nil {
|
|
|
|
|
|
return inbound, false, err
|
2024-01-17 16:21:28 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-07-18 02:40:22 +03:30
|
|
|
|
needRestart := false
|
2026-05-09 17:38:48 +02:00
|
|
|
|
rt, rterr := s.runtimeFor(oldInbound)
|
|
|
|
|
|
if rterr != nil {
|
|
|
|
|
|
if oldInbound.NodeID != nil {
|
|
|
|
|
|
err = rterr
|
|
|
|
|
|
return inbound, false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
} else {
|
|
|
|
|
|
oldSnapshot := *oldInbound
|
|
|
|
|
|
oldSnapshot.Tag = tag
|
|
|
|
|
|
if oldInbound.NodeID == nil {
|
|
|
|
|
|
if err2 := rt.DelInbound(context.Background(), &oldSnapshot); err2 == nil {
|
|
|
|
|
|
logger.Debug("Old inbound deleted on", rt.Name(), ":", tag)
|
|
|
|
|
|
}
|
|
|
|
|
|
if inbound.Enable {
|
|
|
|
|
|
runtimeInbound, err2 := s.buildRuntimeInboundForAPI(tx, oldInbound)
|
|
|
|
|
|
if err2 != nil {
|
|
|
|
|
|
logger.Debug("Unable to prepare runtime inbound config:", err2)
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
} else if err2 := rt.AddInbound(context.Background(), runtimeInbound); err2 == nil {
|
|
|
|
|
|
logger.Debug("Updated inbound added on", rt.Name(), ":", oldInbound.Tag)
|
2026-04-20 19:45:21 +02:00
|
|
|
|
} else {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
logger.Debug("Unable to update inbound on", rt.Name(), ":", err2)
|
2026-04-20 19:45:21 +02:00
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
2023-07-18 02:40:22 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
} else {
|
|
|
|
|
|
if !inbound.Enable {
|
|
|
|
|
|
if err2 := rt.DelInbound(context.Background(), &oldSnapshot); err2 != nil {
|
|
|
|
|
|
err = err2
|
|
|
|
|
|
return inbound, false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
} else if err2 := rt.UpdateInbound(context.Background(), &oldSnapshot, oldInbound); err2 != nil {
|
|
|
|
|
|
err = err2
|
|
|
|
|
|
return inbound, false, err
|
|
|
|
|
|
}
|
2023-07-18 02:40:22 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-12-04 19:20:16 +01:00
|
|
|
|
return inbound, needRestart, tx.Save(oldInbound).Error
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2026-04-20 19:45:21 +02:00
|
|
|
|
func (s *InboundService) buildRuntimeInboundForAPI(tx *gorm.DB, inbound *model.Inbound) (*model.Inbound, error) {
|
|
|
|
|
|
if inbound == nil {
|
|
|
|
|
|
return nil, fmt.Errorf("inbound is nil")
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
runtimeInbound := *inbound
|
|
|
|
|
|
settings := map[string]any{}
|
|
|
|
|
|
if err := json.Unmarshal([]byte(inbound.Settings), &settings); err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
clients, ok := settings["clients"].([]any)
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
return &runtimeInbound, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var clientStats []xray.ClientTraffic
|
|
|
|
|
|
err := tx.Model(xray.ClientTraffic{}).
|
|
|
|
|
|
Where("inbound_id = ?", inbound.Id).
|
|
|
|
|
|
Select("email", "enable").
|
|
|
|
|
|
Find(&clientStats).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
enableMap := make(map[string]bool, len(clientStats))
|
|
|
|
|
|
for _, clientTraffic := range clientStats {
|
|
|
|
|
|
enableMap[clientTraffic.Email] = clientTraffic.Enable
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
finalClients := make([]any, 0, len(clients))
|
|
|
|
|
|
for _, client := range clients {
|
|
|
|
|
|
c, ok := client.(map[string]any)
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
email, _ := c["email"].(string)
|
|
|
|
|
|
if enable, exists := enableMap[email]; exists && !enable {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if manualEnable, ok := c["enable"].(bool); ok && !manualEnable {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
finalClients = append(finalClients, c)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
settings["clients"] = finalClients
|
|
|
|
|
|
modifiedSettings, err := json.MarshalIndent(settings, "", " ")
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
runtimeInbound.Settings = string(modifiedSettings)
|
|
|
|
|
|
|
|
|
|
|
|
return &runtimeInbound, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-05 18:27:49 +03:00
|
|
|
|
// updateClientTraffics syncs the ClientTraffic rows with the inbound's clients
|
|
|
|
|
|
// list: removes rows for emails that disappeared, inserts rows for newly-added
|
|
|
|
|
|
// emails. Uses sets for O(N) lookup — the previous nested-loop implementation
|
|
|
|
|
|
// was O(N²) and degraded into multi-second pauses on inbounds with thousands
|
|
|
|
|
|
// of clients (toggling, saving, or deleting any such inbound felt frozen).
|
2023-12-04 19:20:16 +01:00
|
|
|
|
func (s *InboundService) updateClientTraffics(tx *gorm.DB, oldInbound *model.Inbound, newInbound *model.Inbound) error {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
oldClients, err := s.GetClients(oldInbound)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
newClients, err := s.GetClients(newInbound)
|
2023-04-18 21:34:06 +03:30
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
2023-04-19 11:55:38 +03:30
|
|
|
|
|
2026-05-05 18:27:49 +03:00
|
|
|
|
// Email is the unique key for ClientTraffic rows. Clients without an
|
|
|
|
|
|
// email have no stats row to sync — skip them on both sides instead of
|
|
|
|
|
|
// risking a unique-constraint hit or accidental delete of an unrelated row.
|
|
|
|
|
|
oldEmails := make(map[string]struct{}, len(oldClients))
|
|
|
|
|
|
for i := range oldClients {
|
|
|
|
|
|
if oldClients[i].Email == "" {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
oldEmails[oldClients[i].Email] = struct{}{}
|
|
|
|
|
|
}
|
|
|
|
|
|
newEmails := make(map[string]struct{}, len(newClients))
|
|
|
|
|
|
for i := range newClients {
|
|
|
|
|
|
if newClients[i].Email == "" {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
newEmails[newClients[i].Email] = struct{}{}
|
|
|
|
|
|
}
|
2023-06-05 00:32:19 +03:30
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
// Drop stats rows for removed emails — but not when a sibling inbound
|
|
|
|
|
|
// still references the email, since the row is the shared accumulator.
|
2026-05-05 18:27:49 +03:00
|
|
|
|
for i := range oldClients {
|
|
|
|
|
|
email := oldClients[i].Email
|
|
|
|
|
|
if email == "" {
|
|
|
|
|
|
continue
|
2023-06-05 00:32:19 +03:30
|
|
|
|
}
|
2026-05-05 18:27:49 +03:00
|
|
|
|
if _, kept := newEmails[email]; kept {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
stillUsed, err := s.emailUsedByOtherInbounds(email, oldInbound.Id)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
if stillUsed {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
2026-05-05 18:27:49 +03:00
|
|
|
|
if err := s.DelClientStat(tx, email); err != nil {
|
|
|
|
|
|
return err
|
2023-06-05 00:32:19 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
2026-05-05 18:27:49 +03:00
|
|
|
|
for i := range newClients {
|
|
|
|
|
|
email := newClients[i].Email
|
|
|
|
|
|
if email == "" {
|
|
|
|
|
|
continue
|
2023-06-05 00:32:19 +03:30
|
|
|
|
}
|
2026-05-05 18:27:49 +03:00
|
|
|
|
if _, existed := oldEmails[email]; existed {
|
2026-05-10 16:25:23 +02:00
|
|
|
|
if err := s.UpdateClientStat(tx, email, &newClients[i]); err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
2026-05-05 18:27:49 +03:00
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
if err := s.AddClientStat(tx, oldInbound.Id, &newClients[i]); err != nil {
|
|
|
|
|
|
return err
|
2023-06-05 00:32:19 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) AddInboundClient(data *model.Inbound) (bool, error) {
|
|
|
|
|
|
clients, err := s.GetClients(data)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var settings map[string]any
|
2023-04-19 11:55:38 +03:30
|
|
|
|
err = json.Unmarshal([]byte(data.Settings), &settings)
|
2023-03-17 19:37:49 +03:30
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2025-03-12 20:13:51 +01:00
|
|
|
|
interfaceClients := settings["clients"].([]any)
|
2025-08-27 21:00:49 +03:30
|
|
|
|
// Add timestamps for new clients being appended
|
|
|
|
|
|
nowTs := time.Now().Unix() * 1000
|
|
|
|
|
|
for i := range interfaceClients {
|
|
|
|
|
|
if cm, ok := interfaceClients[i].(map[string]any); ok {
|
|
|
|
|
|
if _, ok2 := cm["created_at"]; !ok2 {
|
|
|
|
|
|
cm["created_at"] = nowTs
|
|
|
|
|
|
}
|
|
|
|
|
|
cm["updated_at"] = nowTs
|
|
|
|
|
|
interfaceClients[i] = cm
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2023-04-19 11:55:38 +03:30
|
|
|
|
existEmail, err := s.checkEmailsExistForClients(clients)
|
|
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-04-19 11:55:38 +03:30
|
|
|
|
}
|
2023-03-17 19:37:49 +03:30
|
|
|
|
if existEmail != "" {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, common.NewError("Duplicate email:", existEmail)
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-18 21:34:06 +03:30
|
|
|
|
oldInbound, err := s.GetInbound(data.Id)
|
2023-03-17 19:37:49 +03:30
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2024-04-01 12:08:25 +04:30
|
|
|
|
// Secure client ID
|
|
|
|
|
|
for _, client := range clients {
|
2026-05-13 13:45:31 +02:00
|
|
|
|
if strings.TrimSpace(client.Email) == "" {
|
|
|
|
|
|
return false, common.NewError("client email is required")
|
|
|
|
|
|
}
|
2025-08-17 13:37:49 +02:00
|
|
|
|
switch oldInbound.Protocol {
|
|
|
|
|
|
case "trojan":
|
2024-04-01 12:08:25 +04:30
|
|
|
|
if client.Password == "" {
|
|
|
|
|
|
return false, common.NewError("empty client ID")
|
|
|
|
|
|
}
|
2025-08-17 13:37:49 +02:00
|
|
|
|
case "shadowsocks":
|
2024-04-01 12:08:25 +04:30
|
|
|
|
if client.Email == "" {
|
|
|
|
|
|
return false, common.NewError("empty client ID")
|
|
|
|
|
|
}
|
2026-04-22 18:55:09 +03:00
|
|
|
|
case "hysteria", "hysteria2":
|
2026-04-20 16:05:27 +02:00
|
|
|
|
if client.Auth == "" {
|
|
|
|
|
|
return false, common.NewError("empty client ID")
|
|
|
|
|
|
}
|
2025-08-17 13:37:49 +02:00
|
|
|
|
default:
|
2024-04-01 12:08:25 +04:30
|
|
|
|
if client.ID == "" {
|
|
|
|
|
|
return false, common.NewError("empty client ID")
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var oldSettings map[string]any
|
2023-04-19 11:55:38 +03:30
|
|
|
|
err = json.Unmarshal([]byte(oldInbound.Settings), &oldSettings)
|
2023-03-17 19:37:49 +03:30
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2025-03-12 20:13:51 +01:00
|
|
|
|
oldClients := oldSettings["clients"].([]any)
|
2023-04-19 11:55:38 +03:30
|
|
|
|
oldClients = append(oldClients, interfaceClients...)
|
2023-04-18 21:34:06 +03:30
|
|
|
|
|
2023-04-19 11:55:38 +03:30
|
|
|
|
oldSettings["clients"] = oldClients
|
2023-04-18 21:34:06 +03:30
|
|
|
|
|
2023-04-19 11:55:38 +03:30
|
|
|
|
newSettings, err := json.MarshalIndent(oldSettings, "", " ")
|
2023-03-17 19:37:49 +03:30
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-18 21:34:06 +03:30
|
|
|
|
oldInbound.Settings = string(newSettings)
|
2023-03-17 19:37:49 +03:30
|
|
|
|
|
2023-06-05 00:32:19 +03:30
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
tx := db.Begin()
|
|
|
|
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
} else {
|
|
|
|
|
|
tx.Commit()
|
|
|
|
|
|
}
|
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
|
|
needRestart := false
|
2026-05-09 17:38:48 +02:00
|
|
|
|
rt, rterr := s.runtimeFor(oldInbound)
|
|
|
|
|
|
if rterr != nil {
|
|
|
|
|
|
if oldInbound.NodeID != nil {
|
|
|
|
|
|
err = rterr
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
} else if oldInbound.NodeID == nil {
|
|
|
|
|
|
for _, client := range clients {
|
|
|
|
|
|
if len(client.Email) == 0 {
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
2023-06-05 00:32:19 +03:30
|
|
|
|
s.AddClientStat(tx, data.Id, &client)
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if !client.Enable {
|
|
|
|
|
|
continue
|
2023-06-05 00:32:19 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
cipher := ""
|
|
|
|
|
|
if oldInbound.Protocol == "shadowsocks" {
|
|
|
|
|
|
cipher = oldSettings["method"].(string)
|
|
|
|
|
|
}
|
|
|
|
|
|
err1 := rt.AddUser(context.Background(), oldInbound, map[string]any{
|
|
|
|
|
|
"email": client.Email,
|
|
|
|
|
|
"id": client.ID,
|
|
|
|
|
|
"auth": client.Auth,
|
|
|
|
|
|
"security": client.Security,
|
|
|
|
|
|
"flow": client.Flow,
|
|
|
|
|
|
"password": client.Password,
|
|
|
|
|
|
"cipher": cipher,
|
|
|
|
|
|
})
|
|
|
|
|
|
if err1 == nil {
|
|
|
|
|
|
logger.Debug("Client added on", rt.Name(), ":", client.Email)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
logger.Debug("Error in adding client on", rt.Name(), ":", err1)
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
} else {
|
|
|
|
|
|
for _, client := range clients {
|
|
|
|
|
|
if len(client.Email) > 0 {
|
|
|
|
|
|
s.AddClientStat(tx, data.Id, &client)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
if err1 := rt.UpdateInbound(context.Background(), oldInbound, oldInbound); err1 != nil {
|
|
|
|
|
|
err = err1
|
|
|
|
|
|
return false, err
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
2023-06-05 00:32:19 +03:30
|
|
|
|
|
|
|
|
|
|
return needRestart, tx.Save(oldInbound).Error
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2026-04-23 16:19:07 +03:00
|
|
|
|
func (s *InboundService) getClientPrimaryKey(protocol model.Protocol, client model.Client) string {
|
|
|
|
|
|
switch protocol {
|
|
|
|
|
|
case model.Trojan:
|
|
|
|
|
|
return client.Password
|
|
|
|
|
|
case model.Shadowsocks:
|
|
|
|
|
|
return client.Email
|
|
|
|
|
|
case model.Hysteria:
|
|
|
|
|
|
return client.Auth
|
|
|
|
|
|
default:
|
|
|
|
|
|
return client.ID
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) writeBackClientSubID(sourceInboundID int, sourceProtocol model.Protocol, client model.Client, subID string) (bool, error) {
|
|
|
|
|
|
client.SubID = subID
|
|
|
|
|
|
client.UpdatedAt = time.Now().UnixMilli()
|
|
|
|
|
|
clientID := s.getClientPrimaryKey(sourceProtocol, client)
|
|
|
|
|
|
if clientID == "" {
|
|
|
|
|
|
return false, common.NewError("empty client ID")
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
settingsBytes, err := json.Marshal(map[string][]model.Client{
|
2026-04-26 17:34:31 +02:00
|
|
|
|
"clients": {client},
|
2026-04-23 16:19:07 +03:00
|
|
|
|
})
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
updatePayload := &model.Inbound{
|
|
|
|
|
|
Id: sourceInboundID,
|
|
|
|
|
|
Settings: string(settingsBytes),
|
|
|
|
|
|
}
|
|
|
|
|
|
return s.UpdateInboundClient(updatePayload, clientID)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) generateRandomCredential(targetProtocol model.Protocol) string {
|
|
|
|
|
|
switch targetProtocol {
|
|
|
|
|
|
case model.VMESS, model.VLESS:
|
|
|
|
|
|
return uuid.NewString()
|
|
|
|
|
|
default:
|
|
|
|
|
|
return strings.ReplaceAll(uuid.NewString(), "-", "")
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) buildTargetClientFromSource(source model.Client, targetProtocol model.Protocol, email string, flow string) (model.Client, error) {
|
|
|
|
|
|
nowTs := time.Now().UnixMilli()
|
|
|
|
|
|
target := source
|
|
|
|
|
|
target.Email = email
|
|
|
|
|
|
target.CreatedAt = nowTs
|
|
|
|
|
|
target.UpdatedAt = nowTs
|
|
|
|
|
|
|
|
|
|
|
|
target.ID = ""
|
|
|
|
|
|
target.Password = ""
|
|
|
|
|
|
target.Auth = ""
|
|
|
|
|
|
target.Flow = ""
|
|
|
|
|
|
|
|
|
|
|
|
switch targetProtocol {
|
|
|
|
|
|
case model.VMESS:
|
|
|
|
|
|
target.ID = s.generateRandomCredential(targetProtocol)
|
|
|
|
|
|
case model.VLESS:
|
|
|
|
|
|
target.ID = s.generateRandomCredential(targetProtocol)
|
|
|
|
|
|
if flow == "xtls-rprx-vision" || flow == "xtls-rprx-vision-udp443" {
|
|
|
|
|
|
target.Flow = flow
|
|
|
|
|
|
}
|
|
|
|
|
|
case model.Trojan, model.Shadowsocks:
|
|
|
|
|
|
target.Password = s.generateRandomCredential(targetProtocol)
|
|
|
|
|
|
case model.Hysteria:
|
|
|
|
|
|
target.Auth = s.generateRandomCredential(targetProtocol)
|
|
|
|
|
|
default:
|
|
|
|
|
|
target.ID = s.generateRandomCredential(targetProtocol)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return target, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) nextAvailableCopiedEmail(originalEmail string, targetID int, occupied map[string]struct{}) string {
|
|
|
|
|
|
base := fmt.Sprintf("%s_%d", originalEmail, targetID)
|
|
|
|
|
|
candidate := base
|
|
|
|
|
|
suffix := 0
|
|
|
|
|
|
for {
|
|
|
|
|
|
if _, exists := occupied[strings.ToLower(candidate)]; !exists {
|
|
|
|
|
|
occupied[strings.ToLower(candidate)] = struct{}{}
|
|
|
|
|
|
return candidate
|
|
|
|
|
|
}
|
|
|
|
|
|
suffix++
|
|
|
|
|
|
candidate = fmt.Sprintf("%s_%d", base, suffix)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) CopyInboundClients(targetInboundID int, sourceInboundID int, clientEmails []string, flow string) (*CopyClientsResult, bool, error) {
|
|
|
|
|
|
result := &CopyClientsResult{
|
|
|
|
|
|
Added: []string{},
|
|
|
|
|
|
Skipped: []string{},
|
|
|
|
|
|
Errors: []string{},
|
|
|
|
|
|
}
|
|
|
|
|
|
if targetInboundID == sourceInboundID {
|
|
|
|
|
|
return result, false, common.NewError("source and target inbounds must be different")
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
targetInbound, err := s.GetInbound(targetInboundID)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return result, false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
sourceInbound, err := s.GetInbound(sourceInboundID)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return result, false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
sourceClients, err := s.GetClients(sourceInbound)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return result, false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if len(sourceClients) == 0 {
|
|
|
|
|
|
return result, false, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
allowedEmails := map[string]struct{}{}
|
|
|
|
|
|
if len(clientEmails) > 0 {
|
|
|
|
|
|
for _, email := range clientEmails {
|
|
|
|
|
|
allowedEmails[strings.ToLower(strings.TrimSpace(email))] = struct{}{}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
occupiedEmails := map[string]struct{}{}
|
|
|
|
|
|
allEmails, err := s.getAllEmails()
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return result, false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
for _, email := range allEmails {
|
|
|
|
|
|
clean := strings.Trim(email, "\"")
|
|
|
|
|
|
if clean != "" {
|
|
|
|
|
|
occupiedEmails[strings.ToLower(clean)] = struct{}{}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
newClients := make([]model.Client, 0)
|
|
|
|
|
|
needRestart := false
|
|
|
|
|
|
for _, sourceClient := range sourceClients {
|
|
|
|
|
|
originalEmail := strings.TrimSpace(sourceClient.Email)
|
|
|
|
|
|
if originalEmail == "" {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
if len(allowedEmails) > 0 {
|
|
|
|
|
|
if _, ok := allowedEmails[strings.ToLower(originalEmail)]; !ok {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if sourceClient.SubID == "" {
|
|
|
|
|
|
newSubID := uuid.NewString()
|
|
|
|
|
|
subNeedRestart, subErr := s.writeBackClientSubID(sourceInbound.Id, sourceInbound.Protocol, sourceClient, newSubID)
|
|
|
|
|
|
if subErr != nil {
|
|
|
|
|
|
result.Errors = append(result.Errors, fmt.Sprintf("%s: failed to write source subId: %v", originalEmail, subErr))
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
if subNeedRestart {
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
|
|
|
|
|
sourceClient.SubID = newSubID
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
targetEmail := s.nextAvailableCopiedEmail(originalEmail, targetInboundID, occupiedEmails)
|
|
|
|
|
|
targetClient, buildErr := s.buildTargetClientFromSource(sourceClient, targetInbound.Protocol, targetEmail, flow)
|
|
|
|
|
|
if buildErr != nil {
|
|
|
|
|
|
result.Errors = append(result.Errors, fmt.Sprintf("%s: %v", originalEmail, buildErr))
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
newClients = append(newClients, targetClient)
|
|
|
|
|
|
result.Added = append(result.Added, targetEmail)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if len(newClients) == 0 {
|
|
|
|
|
|
return result, needRestart, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
settingsPayload, err := json.Marshal(map[string][]model.Client{
|
|
|
|
|
|
"clients": newClients,
|
|
|
|
|
|
})
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return result, needRestart, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
addNeedRestart, err := s.AddInboundClient(&model.Inbound{
|
|
|
|
|
|
Id: targetInboundID,
|
|
|
|
|
|
Settings: string(settingsPayload),
|
|
|
|
|
|
})
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return result, needRestart, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if addNeedRestart {
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return result, needRestart, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-06-05 00:32:19 +03:30
|
|
|
|
func (s *InboundService) DelInboundClient(inboundId int, clientId string) (bool, error) {
|
2023-04-24 15:07:11 +03:30
|
|
|
|
oldInbound, err := s.GetInbound(inboundId)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
logger.Error("Load Old Data Error")
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-04-24 15:07:11 +03:30
|
|
|
|
}
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var settings map[string]any
|
2023-04-24 15:07:11 +03:30
|
|
|
|
err = json.Unmarshal([]byte(oldInbound.Settings), &settings)
|
2023-03-17 19:37:49 +03:30
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-24 15:07:11 +03:30
|
|
|
|
email := ""
|
|
|
|
|
|
client_key := "id"
|
2026-04-20 16:05:27 +02:00
|
|
|
|
switch oldInbound.Protocol {
|
|
|
|
|
|
case "trojan":
|
2023-04-24 15:07:11 +03:30
|
|
|
|
client_key = "password"
|
2026-04-20 16:05:27 +02:00
|
|
|
|
case "shadowsocks":
|
2023-05-06 20:21:14 +03:30
|
|
|
|
client_key = "email"
|
2026-04-22 18:55:09 +03:00
|
|
|
|
case "hysteria", "hysteria2":
|
2026-04-20 16:05:27 +02:00
|
|
|
|
client_key = "auth"
|
2023-05-06 20:21:14 +03:30
|
|
|
|
}
|
2023-04-24 15:07:11 +03:30
|
|
|
|
|
2025-03-12 20:13:51 +01:00
|
|
|
|
interfaceClients := settings["clients"].([]any)
|
|
|
|
|
|
var newClients []any
|
2024-09-02 10:26:19 +02:00
|
|
|
|
needApiDel := false
|
2026-05-04 17:00:09 +02:00
|
|
|
|
clientFound := false
|
2024-04-01 12:08:25 +04:30
|
|
|
|
for _, client := range interfaceClients {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
c := client.(map[string]any)
|
2023-04-24 15:07:11 +03:30
|
|
|
|
c_id := c[client_key].(string)
|
|
|
|
|
|
if c_id == clientId {
|
2026-05-04 17:00:09 +02:00
|
|
|
|
clientFound = true
|
2024-09-02 10:26:19 +02:00
|
|
|
|
email, _ = c["email"].(string)
|
|
|
|
|
|
needApiDel, _ = c["enable"].(bool)
|
2023-04-24 15:07:11 +03:30
|
|
|
|
} else {
|
|
|
|
|
|
newClients = append(newClients, client)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-04 17:00:09 +02:00
|
|
|
|
if !clientFound {
|
|
|
|
|
|
return false, common.NewError("Client Not Found In Inbound For ID:", clientId)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-01-24 20:50:58 +03:30
|
|
|
|
if len(newClients) == 0 {
|
|
|
|
|
|
return false, common.NewError("no client remained in Inbound")
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-24 15:07:11 +03:30
|
|
|
|
settings["clients"] = newClients
|
|
|
|
|
|
newSettings, err := json.MarshalIndent(settings, "", " ")
|
2023-03-17 19:37:49 +03:30
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-24 15:07:11 +03:30
|
|
|
|
oldInbound.Settings = string(newSettings)
|
|
|
|
|
|
|
|
|
|
|
|
db := database.GetDB()
|
2023-03-17 19:37:49 +03:30
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
// Keep the client_traffics row and IPs alive when another inbound still
|
|
|
|
|
|
// references this email — siblings depend on the shared accounting state.
|
|
|
|
|
|
emailShared, err := s.emailUsedByOtherInbounds(email, inboundId)
|
2023-03-17 18:51:43 +01:00
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
|
|
|
|
|
|
if !emailShared {
|
|
|
|
|
|
err = s.DelClientIPs(db, email)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
logger.Error("Error in delete client IPs")
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2023-07-18 02:40:22 +03:30
|
|
|
|
needRestart := false
|
2024-09-02 10:26:19 +02:00
|
|
|
|
|
2023-06-05 00:32:19 +03:30
|
|
|
|
if len(email) > 0 {
|
2024-09-02 10:26:19 +02:00
|
|
|
|
notDepleted := true
|
|
|
|
|
|
err = db.Model(xray.ClientTraffic{}).Select("enable").Where("email = ?", email).First(¬Depleted).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
logger.Error("Get stats error")
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if !emailShared {
|
|
|
|
|
|
err = s.DelClientStat(db, email)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
logger.Error("Delete stats Data Error")
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
2024-09-02 10:26:19 +02:00
|
|
|
|
}
|
|
|
|
|
|
if needApiDel && notDepleted {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
rt, rterr := s.runtimeFor(oldInbound)
|
|
|
|
|
|
if rterr != nil {
|
|
|
|
|
|
if oldInbound.NodeID != nil {
|
|
|
|
|
|
return false, rterr
|
|
|
|
|
|
}
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
} else if oldInbound.NodeID == nil {
|
|
|
|
|
|
err1 := rt.RemoveUser(context.Background(), oldInbound, email)
|
|
|
|
|
|
if err1 == nil {
|
|
|
|
|
|
logger.Debug("Client deleted on", rt.Name(), ":", email)
|
|
|
|
|
|
needRestart = false
|
|
|
|
|
|
} else if strings.Contains(err1.Error(), fmt.Sprintf("User %s not found.", email)) {
|
2024-11-16 14:35:23 +01:00
|
|
|
|
logger.Debug("User is already deleted. Nothing to do more...")
|
|
|
|
|
|
} else {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
logger.Debug("Error in deleting client on", rt.Name(), ":", err1)
|
2024-11-16 14:35:23 +01:00
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
} else {
|
|
|
|
|
|
if err1 := rt.UpdateInbound(context.Background(), oldInbound, oldInbound); err1 != nil {
|
|
|
|
|
|
return false, err1
|
|
|
|
|
|
}
|
2024-09-02 10:26:19 +02:00
|
|
|
|
}
|
2023-06-05 00:32:19 +03:30
|
|
|
|
}
|
2023-03-17 18:51:43 +01:00
|
|
|
|
}
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return needRestart, db.Save(oldInbound).Error
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-06-05 00:32:19 +03:30
|
|
|
|
func (s *InboundService) UpdateInboundClient(data *model.Inbound, clientId string) (bool, error) {
|
2025-09-16 09:24:32 +02:00
|
|
|
|
// TODO: check if TrafficReset field is updating
|
2023-05-22 18:06:34 +03:30
|
|
|
|
clients, err := s.GetClients(data)
|
2023-03-17 19:37:49 +03:30
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var settings map[string]any
|
2023-04-19 11:55:38 +03:30
|
|
|
|
err = json.Unmarshal([]byte(data.Settings), &settings)
|
|
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-04-19 11:55:38 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2025-03-12 20:13:51 +01:00
|
|
|
|
interfaceClients := settings["clients"].([]any)
|
2023-04-19 11:55:38 +03:30
|
|
|
|
|
2023-04-18 21:34:06 +03:30
|
|
|
|
oldInbound, err := s.GetInbound(data.Id)
|
2023-03-17 19:37:49 +03:30
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-05-22 18:06:34 +03:30
|
|
|
|
oldClients, err := s.GetClients(oldInbound)
|
2023-03-17 19:37:49 +03:30
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-25 14:38:35 +03:30
|
|
|
|
oldEmail := ""
|
2024-03-11 11:52:28 +03:30
|
|
|
|
newClientId := ""
|
2024-04-21 00:58:37 +03:30
|
|
|
|
clientIndex := -1
|
2023-04-25 14:38:35 +03:30
|
|
|
|
for index, oldClient := range oldClients {
|
|
|
|
|
|
oldClientId := ""
|
2025-08-17 13:37:49 +02:00
|
|
|
|
switch oldInbound.Protocol {
|
|
|
|
|
|
case "trojan":
|
2023-04-25 14:38:35 +03:30
|
|
|
|
oldClientId = oldClient.Password
|
2024-03-11 11:52:28 +03:30
|
|
|
|
newClientId = clients[0].Password
|
2025-08-17 13:37:49 +02:00
|
|
|
|
case "shadowsocks":
|
2023-05-06 20:21:14 +03:30
|
|
|
|
oldClientId = oldClient.Email
|
2024-03-11 11:52:28 +03:30
|
|
|
|
newClientId = clients[0].Email
|
2026-04-22 18:55:09 +03:00
|
|
|
|
case "hysteria", "hysteria2":
|
2026-04-20 16:05:27 +02:00
|
|
|
|
oldClientId = oldClient.Auth
|
|
|
|
|
|
newClientId = clients[0].Auth
|
2025-08-17 13:37:49 +02:00
|
|
|
|
default:
|
2023-04-25 14:38:35 +03:30
|
|
|
|
oldClientId = oldClient.ID
|
2024-03-11 11:52:28 +03:30
|
|
|
|
newClientId = clients[0].ID
|
2023-04-25 14:38:35 +03:30
|
|
|
|
}
|
|
|
|
|
|
if clientId == oldClientId {
|
|
|
|
|
|
oldEmail = oldClient.Email
|
|
|
|
|
|
clientIndex = index
|
|
|
|
|
|
break
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-11 11:52:28 +03:30
|
|
|
|
// Validate new client ID
|
2024-04-21 00:58:37 +03:30
|
|
|
|
if newClientId == "" || clientIndex == -1 {
|
2024-03-11 11:52:28 +03:30
|
|
|
|
return false, common.NewError("empty client ID")
|
|
|
|
|
|
}
|
2026-05-13 13:45:31 +02:00
|
|
|
|
if strings.TrimSpace(clients[0].Email) == "" {
|
|
|
|
|
|
return false, common.NewError("client email is required")
|
|
|
|
|
|
}
|
2024-03-11 11:52:28 +03:30
|
|
|
|
|
2026-05-13 13:45:31 +02:00
|
|
|
|
if clients[0].Email != oldEmail {
|
2023-04-18 21:34:06 +03:30
|
|
|
|
existEmail, err := s.checkEmailsExistForClients(clients)
|
|
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-04-18 21:34:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
if existEmail != "" {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, common.NewError("Duplicate email:", existEmail)
|
2023-04-18 21:34:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var oldSettings map[string]any
|
2023-04-19 11:55:38 +03:30
|
|
|
|
err = json.Unmarshal([]byte(oldInbound.Settings), &oldSettings)
|
2023-03-17 19:37:49 +03:30
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
2025-03-12 20:13:51 +01:00
|
|
|
|
settingsClients := oldSettings["clients"].([]any)
|
2025-08-27 21:00:49 +03:30
|
|
|
|
// Preserve created_at and set updated_at for the replacing client
|
|
|
|
|
|
var preservedCreated any
|
|
|
|
|
|
if clientIndex >= 0 && clientIndex < len(settingsClients) {
|
|
|
|
|
|
if oldMap, ok := settingsClients[clientIndex].(map[string]any); ok {
|
|
|
|
|
|
if v, ok2 := oldMap["created_at"]; ok2 {
|
|
|
|
|
|
preservedCreated = v
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
if len(interfaceClients) > 0 {
|
|
|
|
|
|
if newMap, ok := interfaceClients[0].(map[string]any); ok {
|
|
|
|
|
|
if preservedCreated == nil {
|
|
|
|
|
|
preservedCreated = time.Now().Unix() * 1000
|
|
|
|
|
|
}
|
|
|
|
|
|
newMap["created_at"] = preservedCreated
|
|
|
|
|
|
newMap["updated_at"] = time.Now().Unix() * 1000
|
|
|
|
|
|
interfaceClients[0] = newMap
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2024-07-04 00:17:44 +02:00
|
|
|
|
settingsClients[clientIndex] = interfaceClients[0]
|
2023-04-19 11:55:38 +03:30
|
|
|
|
oldSettings["clients"] = settingsClients
|
2023-04-18 21:34:06 +03:30
|
|
|
|
|
2026-05-07 14:44:33 +02:00
|
|
|
|
// testseed is only meaningful when at least one VLESS client uses the exact
|
|
|
|
|
|
// xtls-rprx-vision flow. The client-edit path only rewrites a single client,
|
|
|
|
|
|
// so re-check the flow set here and strip a stale testseed when nothing in the
|
|
|
|
|
|
// inbound still warrants it. The full-inbound update path already handles this
|
|
|
|
|
|
// on the JS side via VLESSSettings.toJson().
|
|
|
|
|
|
if oldInbound.Protocol == model.VLESS {
|
|
|
|
|
|
hasVisionFlow := false
|
|
|
|
|
|
for _, c := range settingsClients {
|
|
|
|
|
|
cm, ok := c.(map[string]any)
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
if flow, _ := cm["flow"].(string); flow == "xtls-rprx-vision" {
|
|
|
|
|
|
hasVisionFlow = true
|
|
|
|
|
|
break
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
if !hasVisionFlow {
|
|
|
|
|
|
delete(oldSettings, "testseed")
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-19 11:55:38 +03:30
|
|
|
|
newSettings, err := json.MarshalIndent(oldSettings, "", " ")
|
2023-04-18 21:34:06 +03:30
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-04-18 21:34:06 +03:30
|
|
|
|
}
|
2023-03-17 19:37:49 +03:30
|
|
|
|
|
2023-04-18 21:34:06 +03:30
|
|
|
|
oldInbound.Settings = string(newSettings)
|
2023-03-17 19:37:49 +03:30
|
|
|
|
db := database.GetDB()
|
2023-06-05 00:32:19 +03:30
|
|
|
|
tx := db.Begin()
|
|
|
|
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
} else {
|
|
|
|
|
|
tx.Commit()
|
|
|
|
|
|
}
|
|
|
|
|
|
}()
|
2023-03-17 19:37:49 +03:30
|
|
|
|
|
2023-04-18 21:34:06 +03:30
|
|
|
|
if len(clients[0].Email) > 0 {
|
2023-04-25 14:38:35 +03:30
|
|
|
|
if len(oldEmail) > 0 {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
// Repointing onto an email that already has a row would collide on
|
|
|
|
|
|
// the unique constraint, so retire the donor and let the surviving
|
|
|
|
|
|
// row carry the merged identity.
|
|
|
|
|
|
emailUnchanged := strings.EqualFold(oldEmail, clients[0].Email)
|
|
|
|
|
|
targetExists := int64(0)
|
|
|
|
|
|
if !emailUnchanged {
|
|
|
|
|
|
if err = tx.Model(xray.ClientTraffic{}).Where("email = ?", clients[0].Email).Count(&targetExists).Error; err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
2023-03-17 18:51:43 +01:00
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if emailUnchanged || targetExists == 0 {
|
|
|
|
|
|
err = s.UpdateClientStat(tx, oldEmail, &clients[0])
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
err = s.UpdateClientIPs(tx, oldEmail, clients[0].Email)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
} else {
|
|
|
|
|
|
stillUsed, sErr := s.emailUsedByOtherInbounds(oldEmail, data.Id)
|
|
|
|
|
|
if sErr != nil {
|
|
|
|
|
|
return false, sErr
|
|
|
|
|
|
}
|
|
|
|
|
|
if !stillUsed {
|
|
|
|
|
|
if err = s.DelClientStat(tx, oldEmail); err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if err = s.DelClientIPs(tx, oldEmail); err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
// Refresh the surviving row with the new client's limits/expiry.
|
|
|
|
|
|
if err = s.UpdateClientStat(tx, clients[0].Email, &clients[0]); err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
2023-03-17 18:51:43 +01:00
|
|
|
|
}
|
2023-03-17 19:37:49 +03:30
|
|
|
|
} else {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
s.AddClientStat(tx, data.Id, &clients[0])
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
} else {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
stillUsed, err := s.emailUsedByOtherInbounds(oldEmail, data.Id)
|
2023-03-17 18:51:43 +01:00
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-03-17 18:51:43 +01:00
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if !stillUsed {
|
|
|
|
|
|
err = s.DelClientStat(tx, oldEmail)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
err = s.DelClientIPs(tx, oldEmail)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
2023-03-17 18:51:43 +01:00
|
|
|
|
}
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
2023-07-18 02:40:22 +03:30
|
|
|
|
needRestart := false
|
2023-06-05 00:32:19 +03:30
|
|
|
|
if len(oldEmail) > 0 {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
rt, rterr := s.runtimeFor(oldInbound)
|
|
|
|
|
|
if rterr != nil {
|
|
|
|
|
|
if oldInbound.NodeID != nil {
|
|
|
|
|
|
err = rterr
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
} else if oldInbound.NodeID == nil {
|
|
|
|
|
|
if oldClients[clientIndex].Enable {
|
|
|
|
|
|
err1 := rt.RemoveUser(context.Background(), oldInbound, oldEmail)
|
|
|
|
|
|
if err1 == nil {
|
|
|
|
|
|
logger.Debug("Old client deleted on", rt.Name(), ":", oldEmail)
|
|
|
|
|
|
} else if strings.Contains(err1.Error(), fmt.Sprintf("User %s not found.", oldEmail)) {
|
2024-11-16 14:35:23 +01:00
|
|
|
|
logger.Debug("User is already deleted. Nothing to do more...")
|
|
|
|
|
|
} else {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
logger.Debug("Error in deleting client on", rt.Name(), ":", err1)
|
2024-11-16 14:35:23 +01:00
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
2024-09-02 10:26:19 +02:00
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if clients[0].Enable {
|
|
|
|
|
|
cipher := ""
|
|
|
|
|
|
if oldInbound.Protocol == "shadowsocks" {
|
|
|
|
|
|
cipher = oldSettings["method"].(string)
|
|
|
|
|
|
}
|
|
|
|
|
|
err1 := rt.AddUser(context.Background(), oldInbound, map[string]any{
|
|
|
|
|
|
"email": clients[0].Email,
|
|
|
|
|
|
"id": clients[0].ID,
|
|
|
|
|
|
"security": clients[0].Security,
|
|
|
|
|
|
"flow": clients[0].Flow,
|
|
|
|
|
|
"auth": clients[0].Auth,
|
|
|
|
|
|
"password": clients[0].Password,
|
|
|
|
|
|
"cipher": cipher,
|
|
|
|
|
|
})
|
|
|
|
|
|
if err1 == nil {
|
|
|
|
|
|
logger.Debug("Client edited on", rt.Name(), ":", clients[0].Email)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
logger.Debug("Error in adding client on", rt.Name(), ":", err1)
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
2023-07-27 11:58:12 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
} else {
|
|
|
|
|
|
if err1 := rt.UpdateInbound(context.Background(), oldInbound, oldInbound); err1 != nil {
|
|
|
|
|
|
err = err1
|
|
|
|
|
|
return false, err
|
2023-06-05 00:32:19 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
2023-07-27 11:58:12 +03:30
|
|
|
|
} else {
|
|
|
|
|
|
logger.Debug("Client old email not found")
|
|
|
|
|
|
needRestart = true
|
2023-06-05 00:32:19 +03:30
|
|
|
|
}
|
|
|
|
|
|
return needRestart, tx.Save(oldInbound).Error
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
const resetGracePeriodMs int64 = 30000
|
|
|
|
|
|
|
2026-05-10 16:25:23 +02:00
|
|
|
|
func (s *InboundService) SetRemoteTraffic(nodeID int, snap *runtime.TrafficSnapshot) (bool, error) {
|
|
|
|
|
|
var structuralChange bool
|
|
|
|
|
|
err := submitTrafficWrite(func() error {
|
|
|
|
|
|
var inner error
|
|
|
|
|
|
structuralChange, inner = s.setRemoteTrafficLocked(nodeID, snap)
|
|
|
|
|
|
return inner
|
|
|
|
|
|
})
|
|
|
|
|
|
return structuralChange, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) setRemoteTrafficLocked(nodeID int, snap *runtime.TrafficSnapshot) (bool, error) {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if snap == nil || nodeID <= 0 {
|
2026-05-10 16:25:23 +02:00
|
|
|
|
return false, nil
|
2026-05-09 17:38:48 +02:00
|
|
|
|
}
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
now := time.Now().UnixMilli()
|
|
|
|
|
|
|
|
|
|
|
|
var central []model.Inbound
|
|
|
|
|
|
if err := db.Model(model.Inbound{}).
|
|
|
|
|
|
Where("node_id = ?", nodeID).
|
|
|
|
|
|
Find(¢ral).Error; err != nil {
|
2026-05-10 16:25:23 +02:00
|
|
|
|
return false, err
|
2026-05-09 17:38:48 +02:00
|
|
|
|
}
|
|
|
|
|
|
tagToCentral := make(map[string]*model.Inbound, len(central))
|
|
|
|
|
|
for i := range central {
|
|
|
|
|
|
tagToCentral[central[i].Tag] = ¢ral[i]
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-10 16:25:23 +02:00
|
|
|
|
var centralClientStats []xray.ClientTraffic
|
|
|
|
|
|
if len(central) > 0 {
|
|
|
|
|
|
ids := make([]int, 0, len(central))
|
|
|
|
|
|
for i := range central {
|
|
|
|
|
|
ids = append(ids, central[i].Id)
|
|
|
|
|
|
}
|
|
|
|
|
|
if err := db.Model(xray.ClientTraffic{}).
|
|
|
|
|
|
Where("inbound_id IN ?", ids).
|
|
|
|
|
|
Find(¢ralClientStats).Error; err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
type csKey struct {
|
|
|
|
|
|
inboundID int
|
|
|
|
|
|
email string
|
|
|
|
|
|
}
|
|
|
|
|
|
centralCS := make(map[csKey]*xray.ClientTraffic, len(centralClientStats))
|
|
|
|
|
|
for i := range centralClientStats {
|
|
|
|
|
|
centralCS[csKey{centralClientStats[i].InboundId, centralClientStats[i].Email}] = ¢ralClientStats[i]
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var defaultUserId int
|
|
|
|
|
|
if len(central) > 0 {
|
|
|
|
|
|
defaultUserId = central[0].UserId
|
|
|
|
|
|
} else {
|
|
|
|
|
|
var u model.User
|
|
|
|
|
|
if err := db.Model(model.User{}).Order("id asc").First(&u).Error; err == nil {
|
|
|
|
|
|
defaultUserId = u.Id
|
|
|
|
|
|
} else {
|
|
|
|
|
|
defaultUserId = 1
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
tx := db.Begin()
|
|
|
|
|
|
committed := false
|
|
|
|
|
|
defer func() {
|
|
|
|
|
|
if !committed {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
}
|
|
|
|
|
|
}()
|
|
|
|
|
|
|
2026-05-10 16:25:23 +02:00
|
|
|
|
structuralChange := false
|
|
|
|
|
|
|
|
|
|
|
|
snapTags := make(map[string]struct{}, len(snap.Inbounds))
|
2026-05-09 17:38:48 +02:00
|
|
|
|
for _, snapIb := range snap.Inbounds {
|
|
|
|
|
|
if snapIb == nil {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
2026-05-10 16:25:23 +02:00
|
|
|
|
snapTags[snapIb.Tag] = struct{}{}
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
c, ok := tagToCentral[snapIb.Tag]
|
|
|
|
|
|
if !ok {
|
2026-05-10 16:25:23 +02:00
|
|
|
|
newIb := model.Inbound{
|
|
|
|
|
|
UserId: defaultUserId,
|
|
|
|
|
|
NodeID: &nodeID,
|
|
|
|
|
|
Tag: snapIb.Tag,
|
|
|
|
|
|
Listen: snapIb.Listen,
|
|
|
|
|
|
Port: snapIb.Port,
|
|
|
|
|
|
Protocol: snapIb.Protocol,
|
|
|
|
|
|
Settings: snapIb.Settings,
|
|
|
|
|
|
StreamSettings: snapIb.StreamSettings,
|
|
|
|
|
|
Sniffing: snapIb.Sniffing,
|
|
|
|
|
|
TrafficReset: snapIb.TrafficReset,
|
|
|
|
|
|
Enable: snapIb.Enable,
|
|
|
|
|
|
Remark: snapIb.Remark,
|
|
|
|
|
|
Total: snapIb.Total,
|
|
|
|
|
|
ExpiryTime: snapIb.ExpiryTime,
|
|
|
|
|
|
Up: snapIb.Up,
|
|
|
|
|
|
Down: snapIb.Down,
|
|
|
|
|
|
AllTime: snapIb.AllTime,
|
|
|
|
|
|
}
|
|
|
|
|
|
if err := tx.Create(&newIb).Error; err != nil {
|
|
|
|
|
|
logger.Warning("setRemoteTraffic: create central inbound for tag", snapIb.Tag, "failed:", err)
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
tagToCentral[snapIb.Tag] = &newIb
|
|
|
|
|
|
structuralChange = true
|
|
|
|
|
|
continue
|
2026-05-09 17:38:48 +02:00
|
|
|
|
}
|
2026-05-10 16:25:23 +02:00
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
inGrace := c.LastTrafficResetTime > 0 && now-c.LastTrafficResetTime < resetGracePeriodMs
|
2026-05-10 16:25:23 +02:00
|
|
|
|
|
|
|
|
|
|
updates := map[string]any{
|
|
|
|
|
|
"enable": snapIb.Enable,
|
|
|
|
|
|
"remark": snapIb.Remark,
|
|
|
|
|
|
"listen": snapIb.Listen,
|
|
|
|
|
|
"port": snapIb.Port,
|
|
|
|
|
|
"protocol": snapIb.Protocol,
|
|
|
|
|
|
"total": snapIb.Total,
|
|
|
|
|
|
"expiry_time": snapIb.ExpiryTime,
|
|
|
|
|
|
"settings": snapIb.Settings,
|
|
|
|
|
|
"stream_settings": snapIb.StreamSettings,
|
|
|
|
|
|
"sniffing": snapIb.Sniffing,
|
|
|
|
|
|
"traffic_reset": snapIb.TrafficReset,
|
|
|
|
|
|
}
|
|
|
|
|
|
if !inGrace || (snapIb.Up+snapIb.Down) <= (c.Up+c.Down) {
|
|
|
|
|
|
updates["up"] = snapIb.Up
|
|
|
|
|
|
updates["down"] = snapIb.Down
|
|
|
|
|
|
}
|
|
|
|
|
|
if snapIb.AllTime > c.AllTime {
|
|
|
|
|
|
updates["all_time"] = snapIb.AllTime
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if c.Settings != snapIb.Settings ||
|
|
|
|
|
|
c.Remark != snapIb.Remark ||
|
|
|
|
|
|
c.Listen != snapIb.Listen ||
|
|
|
|
|
|
c.Port != snapIb.Port ||
|
|
|
|
|
|
c.Total != snapIb.Total ||
|
|
|
|
|
|
c.ExpiryTime != snapIb.ExpiryTime ||
|
|
|
|
|
|
c.Enable != snapIb.Enable {
|
|
|
|
|
|
structuralChange = true
|
2026-05-09 17:38:48 +02:00
|
|
|
|
}
|
2026-05-10 16:25:23 +02:00
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if err := tx.Model(model.Inbound{}).
|
|
|
|
|
|
Where("id = ?", c.Id).
|
2026-05-10 16:25:23 +02:00
|
|
|
|
Updates(updates).Error; err != nil {
|
|
|
|
|
|
return false, err
|
2026-05-09 17:38:48 +02:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-10 16:25:23 +02:00
|
|
|
|
for _, c := range central {
|
|
|
|
|
|
if _, kept := snapTags[c.Tag]; kept {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
if err := tx.Where("inbound_id = ?", c.Id).
|
|
|
|
|
|
Delete(&xray.ClientTraffic{}).Error; err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if err := tx.Where("id = ?", c.Id).
|
|
|
|
|
|
Delete(&model.Inbound{}).Error; err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
delete(tagToCentral, c.Tag)
|
|
|
|
|
|
structuralChange = true
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
for _, snapIb := range snap.Inbounds {
|
|
|
|
|
|
if snapIb == nil {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
c, ok := tagToCentral[snapIb.Tag]
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
inGrace := c.LastTrafficResetTime > 0 && now-c.LastTrafficResetTime < resetGracePeriodMs
|
2026-05-10 16:25:23 +02:00
|
|
|
|
|
|
|
|
|
|
snapEmails := make(map[string]struct{}, len(snapIb.ClientStats))
|
2026-05-09 17:38:48 +02:00
|
|
|
|
for _, cs := range snapIb.ClientStats {
|
2026-05-10 16:25:23 +02:00
|
|
|
|
snapEmails[cs.Email] = struct{}{}
|
|
|
|
|
|
|
|
|
|
|
|
existing := centralCS[csKey{c.Id, cs.Email}]
|
|
|
|
|
|
if existing == nil {
|
|
|
|
|
|
if err := tx.Create(&xray.ClientTraffic{
|
|
|
|
|
|
InboundId: c.Id,
|
|
|
|
|
|
Email: cs.Email,
|
|
|
|
|
|
Enable: cs.Enable,
|
|
|
|
|
|
Total: cs.Total,
|
|
|
|
|
|
ExpiryTime: cs.ExpiryTime,
|
|
|
|
|
|
Reset: cs.Reset,
|
|
|
|
|
|
Up: cs.Up,
|
|
|
|
|
|
Down: cs.Down,
|
|
|
|
|
|
AllTime: cs.AllTime,
|
|
|
|
|
|
LastOnline: cs.LastOnline,
|
|
|
|
|
|
}).Error; err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
structuralChange = true
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if existing.Enable != cs.Enable ||
|
|
|
|
|
|
existing.Total != cs.Total ||
|
|
|
|
|
|
existing.ExpiryTime != cs.ExpiryTime ||
|
|
|
|
|
|
existing.Reset != cs.Reset {
|
|
|
|
|
|
structuralChange = true
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
allTime := existing.AllTime
|
|
|
|
|
|
if cs.AllTime > allTime {
|
|
|
|
|
|
allTime = cs.AllTime
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if inGrace && cs.Up+cs.Down > 0 {
|
|
|
|
|
|
if err := tx.Exec(
|
|
|
|
|
|
`UPDATE client_traffics
|
|
|
|
|
|
SET enable = ?, total = ?, expiry_time = ?, reset = ?, all_time = ?
|
|
|
|
|
|
WHERE inbound_id = ? AND email = ?`,
|
|
|
|
|
|
cs.Enable, cs.Total, cs.ExpiryTime, cs.Reset, allTime, c.Id, cs.Email,
|
|
|
|
|
|
).Error; err != nil {
|
|
|
|
|
|
return false, err
|
2026-05-09 17:38:48 +02:00
|
|
|
|
}
|
2026-05-10 16:25:23 +02:00
|
|
|
|
continue
|
2026-05-09 17:38:48 +02:00
|
|
|
|
}
|
2026-05-10 16:25:23 +02:00
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if err := tx.Exec(
|
|
|
|
|
|
`UPDATE client_traffics
|
2026-05-10 16:25:23 +02:00
|
|
|
|
SET up = ?, down = ?, enable = ?, total = ?, expiry_time = ?, reset = ?,
|
|
|
|
|
|
all_time = ?, last_online = MAX(last_online, ?)
|
2026-05-09 17:38:48 +02:00
|
|
|
|
WHERE inbound_id = ? AND email = ?`,
|
2026-05-10 16:25:23 +02:00
|
|
|
|
cs.Up, cs.Down, cs.Enable, cs.Total, cs.ExpiryTime, cs.Reset, allTime,
|
|
|
|
|
|
cs.LastOnline, c.Id, cs.Email,
|
2026-05-09 17:38:48 +02:00
|
|
|
|
).Error; err != nil {
|
2026-05-10 16:25:23 +02:00
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for k, existing := range centralCS {
|
|
|
|
|
|
if k.inboundID != c.Id {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
if _, kept := snapEmails[k.email]; kept {
|
|
|
|
|
|
continue
|
2026-05-09 17:38:48 +02:00
|
|
|
|
}
|
2026-05-10 16:25:23 +02:00
|
|
|
|
if err := tx.Where("inbound_id = ? AND email = ?", c.Id, existing.Email).
|
|
|
|
|
|
Delete(&xray.ClientTraffic{}).Error; err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
structuralChange = true
|
2026-05-09 17:38:48 +02:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if err := tx.Commit().Error; err != nil {
|
2026-05-10 16:25:23 +02:00
|
|
|
|
return false, err
|
2026-05-09 17:38:48 +02:00
|
|
|
|
}
|
|
|
|
|
|
committed = true
|
|
|
|
|
|
|
|
|
|
|
|
if p != nil {
|
|
|
|
|
|
p.SetNodeOnlineClients(nodeID, snap.OnlineEmails)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-10 16:25:23 +02:00
|
|
|
|
return structuralChange, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) AddTraffic(inboundTraffics []*xray.Traffic, clientTraffics []*xray.ClientTraffic) (needRestart bool, clientsDisabled bool, err error) {
|
|
|
|
|
|
err = submitTrafficWrite(func() error {
|
|
|
|
|
|
var inner error
|
|
|
|
|
|
needRestart, clientsDisabled, inner = s.addTrafficLocked(inboundTraffics, clientTraffics)
|
|
|
|
|
|
return inner
|
|
|
|
|
|
})
|
|
|
|
|
|
return
|
2026-05-09 17:38:48 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-10 16:25:23 +02:00
|
|
|
|
func (s *InboundService) addTrafficLocked(inboundTraffics []*xray.Traffic, clientTraffics []*xray.ClientTraffic) (bool, bool, error) {
|
2023-08-26 15:19:51 +03:30
|
|
|
|
var err error
|
2023-04-09 23:13:18 +03:30
|
|
|
|
db := database.GetDB()
|
2023-02-09 22:48:06 +03:30
|
|
|
|
tx := db.Begin()
|
2023-04-09 23:13:18 +03:30
|
|
|
|
|
2023-02-09 22:48:06 +03:30
|
|
|
|
defer func() {
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
} else {
|
|
|
|
|
|
tx.Commit()
|
|
|
|
|
|
}
|
|
|
|
|
|
}()
|
2024-03-11 11:52:28 +03:30
|
|
|
|
err = s.addInboundTraffic(tx, inboundTraffics)
|
2023-08-26 15:19:51 +03:30
|
|
|
|
if err != nil {
|
2026-05-05 18:27:49 +03:00
|
|
|
|
return false, false, err
|
2023-08-26 15:19:51 +03:30
|
|
|
|
}
|
|
|
|
|
|
err = s.addClientTraffic(tx, clientTraffics)
|
|
|
|
|
|
if err != nil {
|
2026-05-05 18:27:49 +03:00
|
|
|
|
return false, false, err
|
2023-08-26 15:19:51 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-12-04 19:20:16 +01:00
|
|
|
|
needRestart0, count, err := s.autoRenewClients(tx)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
logger.Warning("Error in renew clients:", err)
|
|
|
|
|
|
} else if count > 0 {
|
|
|
|
|
|
logger.Debugf("%v clients renewed", count)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-04 23:19:25 +02:00
|
|
|
|
disabledClientsCount := int64(0)
|
2023-08-26 15:19:51 +03:30
|
|
|
|
needRestart1, count, err := s.disableInvalidClients(tx)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
logger.Warning("Error in disabling invalid clients:", err)
|
|
|
|
|
|
} else if count > 0 {
|
|
|
|
|
|
logger.Debugf("%v clients disabled", count)
|
2026-05-04 23:19:25 +02:00
|
|
|
|
disabledClientsCount = count
|
2023-08-26 15:19:51 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
needRestart2, count, err := s.disableInvalidInbounds(tx)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
logger.Warning("Error in disabling invalid inbounds:", err)
|
|
|
|
|
|
} else if count > 0 {
|
|
|
|
|
|
logger.Debugf("%v inbounds disabled", count)
|
|
|
|
|
|
}
|
2026-05-05 18:27:49 +03:00
|
|
|
|
return needRestart0 || needRestart1 || needRestart2, disabledClientsCount > 0, nil
|
2023-08-26 15:19:51 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) addInboundTraffic(tx *gorm.DB, traffics []*xray.Traffic) error {
|
|
|
|
|
|
if len(traffics) == 0 {
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var err error
|
|
|
|
|
|
|
|
|
|
|
|
for _, traffic := range traffics {
|
|
|
|
|
|
if traffic.IsInbound {
|
2026-05-10 16:25:23 +02:00
|
|
|
|
err = tx.Model(&model.Inbound{}).Where("tag = ? AND node_id IS NULL", traffic.Tag).
|
2025-03-12 20:13:51 +01:00
|
|
|
|
Updates(map[string]any{
|
2025-08-28 02:40:50 +03:30
|
|
|
|
"up": gorm.Expr("up + ?", traffic.Up),
|
|
|
|
|
|
"down": gorm.Expr("down + ?", traffic.Down),
|
|
|
|
|
|
"all_time": gorm.Expr("COALESCE(all_time, 0) + ?", traffic.Up+traffic.Down),
|
2023-08-26 15:19:51 +03:30
|
|
|
|
}).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) addClientTraffic(tx *gorm.DB, traffics []*xray.ClientTraffic) (err error) {
|
|
|
|
|
|
if len(traffics) == 0 {
|
2023-12-04 19:13:21 +01:00
|
|
|
|
// Empty onlineUsers
|
|
|
|
|
|
if p != nil {
|
2026-01-05 05:50:40 +01:00
|
|
|
|
p.SetOnlineClients(make([]string, 0))
|
2023-12-04 19:13:21 +01:00
|
|
|
|
}
|
2023-08-26 15:19:51 +03:30
|
|
|
|
return nil
|
|
|
|
|
|
}
|
2023-04-09 23:13:18 +03:30
|
|
|
|
|
2026-01-05 05:50:40 +01:00
|
|
|
|
onlineClients := make([]string, 0)
|
2023-12-04 19:13:21 +01:00
|
|
|
|
|
2023-04-24 14:04:05 +03:30
|
|
|
|
emails := make([]string, 0, len(traffics))
|
|
|
|
|
|
for _, traffic := range traffics {
|
|
|
|
|
|
emails = append(emails, traffic.Email)
|
|
|
|
|
|
}
|
|
|
|
|
|
dbClientTraffics := make([]*xray.ClientTraffic, 0, len(traffics))
|
2026-05-10 16:25:23 +02:00
|
|
|
|
err = tx.Model(xray.ClientTraffic{}).
|
|
|
|
|
|
Where("email IN (?) AND inbound_id IN (?)", emails,
|
|
|
|
|
|
tx.Model(&model.Inbound{}).Select("id").Where("node_id IS NULL")).
|
|
|
|
|
|
Find(&dbClientTraffics).Error
|
2023-04-24 14:04:05 +03:30
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-07-27 11:58:12 +03:30
|
|
|
|
// Avoid empty slice error
|
|
|
|
|
|
if len(dbClientTraffics) == 0 {
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-24 14:04:05 +03:30
|
|
|
|
dbClientTraffics, err = s.adjustTraffics(tx, dbClientTraffics)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-05 18:27:49 +03:00
|
|
|
|
// Index by email for O(N) merge — the previous nested loop was O(N²)
|
|
|
|
|
|
// and dominated each cron tick on inbounds with thousands of active
|
|
|
|
|
|
// clients (7500 × 7500 = 56M string comparisons every 10 seconds).
|
|
|
|
|
|
trafficByEmail := make(map[string]*xray.ClientTraffic, len(traffics))
|
|
|
|
|
|
for i := range traffics {
|
|
|
|
|
|
if traffics[i] != nil {
|
|
|
|
|
|
trafficByEmail[traffics[i].Email] = traffics[i]
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
now := time.Now().UnixMilli()
|
2023-04-24 14:04:05 +03:30
|
|
|
|
for dbTraffic_index := range dbClientTraffics {
|
2026-05-05 18:27:49 +03:00
|
|
|
|
t, ok := trafficByEmail[dbClientTraffics[dbTraffic_index].Email]
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
dbClientTraffics[dbTraffic_index].Up += t.Up
|
|
|
|
|
|
dbClientTraffics[dbTraffic_index].Down += t.Down
|
|
|
|
|
|
dbClientTraffics[dbTraffic_index].AllTime += t.Up + t.Down
|
|
|
|
|
|
if t.Up+t.Down > 0 {
|
|
|
|
|
|
onlineClients = append(onlineClients, t.Email)
|
|
|
|
|
|
dbClientTraffics[dbTraffic_index].LastOnline = now
|
2023-04-24 14:04:05 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-12-04 19:13:21 +01:00
|
|
|
|
// Set onlineUsers
|
|
|
|
|
|
p.SetOnlineClients(onlineClients)
|
|
|
|
|
|
|
2023-04-24 14:04:05 +03:30
|
|
|
|
err = tx.Save(dbClientTraffics).Error
|
2023-04-09 23:13:18 +03:30
|
|
|
|
if err != nil {
|
|
|
|
|
|
logger.Warning("AddClientTraffic update data ", err)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-24 14:04:05 +03:30
|
|
|
|
func (s *InboundService) adjustTraffics(tx *gorm.DB, dbClientTraffics []*xray.ClientTraffic) ([]*xray.ClientTraffic, error) {
|
|
|
|
|
|
inboundIds := make([]int, 0, len(dbClientTraffics))
|
|
|
|
|
|
for _, dbClientTraffic := range dbClientTraffics {
|
|
|
|
|
|
if dbClientTraffic.ExpiryTime < 0 {
|
|
|
|
|
|
inboundIds = append(inboundIds, dbClientTraffic.InboundId)
|
2023-03-17 01:31:14 +03:30
|
|
|
|
}
|
2023-04-24 14:04:05 +03:30
|
|
|
|
}
|
2023-03-17 19:37:49 +03:30
|
|
|
|
|
2023-04-24 14:04:05 +03:30
|
|
|
|
if len(inboundIds) > 0 {
|
|
|
|
|
|
var inbounds []*model.Inbound
|
|
|
|
|
|
err := tx.Model(model.Inbound{}).Where("id IN (?)", inboundIds).Find(&inbounds).Error
|
2023-03-17 01:31:14 +03:30
|
|
|
|
if err != nil {
|
2023-04-24 14:04:05 +03:30
|
|
|
|
return nil, err
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2023-04-24 14:04:05 +03:30
|
|
|
|
for inbound_index := range inbounds {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
settings := map[string]any{}
|
2023-04-24 14:04:05 +03:30
|
|
|
|
json.Unmarshal([]byte(inbounds[inbound_index].Settings), &settings)
|
2025-03-12 20:13:51 +01:00
|
|
|
|
clients, ok := settings["clients"].([]any)
|
2023-04-24 14:04:05 +03:30
|
|
|
|
if ok {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var newClients []any
|
2023-04-24 14:04:05 +03:30
|
|
|
|
for client_index := range clients {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
c := clients[client_index].(map[string]any)
|
2023-04-24 14:04:05 +03:30
|
|
|
|
for traffic_index := range dbClientTraffics {
|
2023-04-26 12:57:49 +03:30
|
|
|
|
if dbClientTraffics[traffic_index].ExpiryTime < 0 && c["email"] == dbClientTraffics[traffic_index].Email {
|
2023-04-24 14:04:05 +03:30
|
|
|
|
oldExpiryTime := c["expiryTime"].(float64)
|
|
|
|
|
|
newExpiryTime := (time.Now().Unix() * 1000) - int64(oldExpiryTime)
|
|
|
|
|
|
c["expiryTime"] = newExpiryTime
|
2025-08-27 21:00:49 +03:30
|
|
|
|
c["updated_at"] = time.Now().Unix() * 1000
|
2023-04-24 14:04:05 +03:30
|
|
|
|
dbClientTraffics[traffic_index].ExpiryTime = newExpiryTime
|
|
|
|
|
|
break
|
|
|
|
|
|
}
|
2023-04-11 15:41:04 +03:30
|
|
|
|
}
|
2025-08-27 21:00:49 +03:30
|
|
|
|
// Backfill created_at and updated_at
|
|
|
|
|
|
if _, ok := c["created_at"]; !ok {
|
|
|
|
|
|
c["created_at"] = time.Now().Unix() * 1000
|
|
|
|
|
|
}
|
|
|
|
|
|
c["updated_at"] = time.Now().Unix() * 1000
|
2025-03-12 20:13:51 +01:00
|
|
|
|
newClients = append(newClients, any(c))
|
2023-04-09 23:13:18 +03:30
|
|
|
|
}
|
2023-04-24 14:04:05 +03:30
|
|
|
|
settings["clients"] = newClients
|
|
|
|
|
|
modifiedSettings, err := json.MarshalIndent(settings, "", " ")
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
2023-04-24 14:13:25 +03:30
|
|
|
|
|
2023-04-24 14:04:05 +03:30
|
|
|
|
inbounds[inbound_index].Settings = string(modifiedSettings)
|
2023-04-24 14:13:25 +03:30
|
|
|
|
}
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2023-04-24 14:04:05 +03:30
|
|
|
|
err = tx.Save(inbounds).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
logger.Warning("AddClientTraffic update inbounds ", err)
|
|
|
|
|
|
logger.Error(inbounds)
|
2023-04-24 14:13:25 +03:30
|
|
|
|
}
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2023-04-24 14:13:25 +03:30
|
|
|
|
|
2023-04-24 14:04:05 +03:30
|
|
|
|
return dbClientTraffics, nil
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-12-04 19:20:16 +01:00
|
|
|
|
func (s *InboundService) autoRenewClients(tx *gorm.DB) (bool, int64, error) {
|
|
|
|
|
|
// check for time expired
|
|
|
|
|
|
var traffics []*xray.ClientTraffic
|
|
|
|
|
|
now := time.Now().Unix() * 1000
|
|
|
|
|
|
var err, err1 error
|
|
|
|
|
|
|
2026-05-10 16:25:23 +02:00
|
|
|
|
err = tx.Model(xray.ClientTraffic{}).
|
|
|
|
|
|
Where("reset > 0 and expiry_time > 0 and expiry_time <= ?", now).
|
|
|
|
|
|
Where("inbound_id IN (?)", tx.Model(&model.Inbound{}).Select("id").Where("node_id IS NULL")).
|
|
|
|
|
|
Find(&traffics).Error
|
2023-12-04 19:20:16 +01:00
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, 0, err
|
|
|
|
|
|
}
|
|
|
|
|
|
// return if there is no client to renew
|
|
|
|
|
|
if len(traffics) == 0 {
|
|
|
|
|
|
return false, 0, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var inbound_ids []int
|
|
|
|
|
|
var inbounds []*model.Inbound
|
|
|
|
|
|
needRestart := false
|
|
|
|
|
|
var clientsToAdd []struct {
|
|
|
|
|
|
protocol string
|
|
|
|
|
|
tag string
|
2025-03-12 20:13:51 +01:00
|
|
|
|
client map[string]any
|
2023-12-04 19:20:16 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for _, traffic := range traffics {
|
|
|
|
|
|
inbound_ids = append(inbound_ids, traffic.InboundId)
|
|
|
|
|
|
}
|
2026-05-05 18:27:49 +03:00
|
|
|
|
// Dedupe so an inbound hosting N expired clients is fetched and saved once
|
|
|
|
|
|
// per tick instead of N times across chunk boundaries.
|
|
|
|
|
|
inbound_ids = uniqueInts(inbound_ids)
|
|
|
|
|
|
// Chunked to stay under SQLite's bind-variable limit when many inbounds
|
|
|
|
|
|
// are touched in a single tick.
|
|
|
|
|
|
for _, batch := range chunkInts(inbound_ids, sqliteMaxVars) {
|
|
|
|
|
|
var page []*model.Inbound
|
|
|
|
|
|
if err = tx.Model(model.Inbound{}).Where("id IN ?", batch).Find(&page).Error; err != nil {
|
|
|
|
|
|
return false, 0, err
|
|
|
|
|
|
}
|
|
|
|
|
|
inbounds = append(inbounds, page...)
|
2023-12-04 19:20:16 +01:00
|
|
|
|
}
|
|
|
|
|
|
for inbound_index := range inbounds {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
settings := map[string]any{}
|
2023-12-04 19:20:16 +01:00
|
|
|
|
json.Unmarshal([]byte(inbounds[inbound_index].Settings), &settings)
|
2025-03-12 20:13:51 +01:00
|
|
|
|
clients := settings["clients"].([]any)
|
2023-12-04 19:20:16 +01:00
|
|
|
|
for client_index := range clients {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
c := clients[client_index].(map[string]any)
|
2023-12-04 19:20:16 +01:00
|
|
|
|
for traffic_index, traffic := range traffics {
|
|
|
|
|
|
if traffic.Email == c["email"].(string) {
|
|
|
|
|
|
newExpiryTime := traffic.ExpiryTime
|
|
|
|
|
|
for newExpiryTime < now {
|
|
|
|
|
|
newExpiryTime += (int64(traffic.Reset) * 86400000)
|
|
|
|
|
|
}
|
|
|
|
|
|
c["expiryTime"] = newExpiryTime
|
|
|
|
|
|
traffics[traffic_index].ExpiryTime = newExpiryTime
|
|
|
|
|
|
traffics[traffic_index].Down = 0
|
|
|
|
|
|
traffics[traffic_index].Up = 0
|
|
|
|
|
|
if !traffic.Enable {
|
|
|
|
|
|
traffics[traffic_index].Enable = true
|
2026-05-13 02:15:52 +03:00
|
|
|
|
c["enable"] = true
|
2023-12-04 19:20:16 +01:00
|
|
|
|
clientsToAdd = append(clientsToAdd,
|
|
|
|
|
|
struct {
|
|
|
|
|
|
protocol string
|
|
|
|
|
|
tag string
|
2025-03-12 20:13:51 +01:00
|
|
|
|
client map[string]any
|
2023-12-04 19:20:16 +01:00
|
|
|
|
}{
|
|
|
|
|
|
protocol: string(inbounds[inbound_index].Protocol),
|
|
|
|
|
|
tag: inbounds[inbound_index].Tag,
|
|
|
|
|
|
client: c,
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
2025-03-12 20:13:51 +01:00
|
|
|
|
clients[client_index] = any(c)
|
2023-12-04 19:20:16 +01:00
|
|
|
|
break
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
settings["clients"] = clients
|
|
|
|
|
|
newSettings, err := json.MarshalIndent(settings, "", " ")
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, 0, err
|
|
|
|
|
|
}
|
|
|
|
|
|
inbounds[inbound_index].Settings = string(newSettings)
|
|
|
|
|
|
}
|
|
|
|
|
|
err = tx.Save(inbounds).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, 0, err
|
|
|
|
|
|
}
|
|
|
|
|
|
err = tx.Save(traffics).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, 0, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if p != nil {
|
|
|
|
|
|
err1 = s.xrayApi.Init(p.GetAPIPort())
|
|
|
|
|
|
if err1 != nil {
|
|
|
|
|
|
return true, int64(len(traffics)), nil
|
|
|
|
|
|
}
|
|
|
|
|
|
for _, clientToAdd := range clientsToAdd {
|
|
|
|
|
|
err1 = s.xrayApi.AddUser(clientToAdd.protocol, clientToAdd.tag, clientToAdd.client)
|
|
|
|
|
|
if err1 != nil {
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
s.xrayApi.Close()
|
|
|
|
|
|
}
|
|
|
|
|
|
return needRestart, int64(len(traffics)), nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-08-26 15:19:51 +03:30
|
|
|
|
func (s *InboundService) disableInvalidInbounds(tx *gorm.DB) (bool, int64, error) {
|
2023-02-09 22:48:06 +03:30
|
|
|
|
now := time.Now().Unix() * 1000
|
2023-07-18 02:40:22 +03:30
|
|
|
|
needRestart := false
|
|
|
|
|
|
|
|
|
|
|
|
if p != nil {
|
|
|
|
|
|
var tags []string
|
2023-08-26 15:19:51 +03:30
|
|
|
|
err := tx.Table("inbounds").
|
2023-07-18 02:40:22 +03:30
|
|
|
|
Select("inbounds.tag").
|
2026-05-10 16:25:23 +02:00
|
|
|
|
Where("((total > 0 and up + down >= total) or (expiry_time > 0 and expiry_time <= ?)) and enable = ? and node_id IS NULL", now, true).
|
2023-07-18 02:40:22 +03:30
|
|
|
|
Scan(&tags).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, 0, err
|
|
|
|
|
|
}
|
|
|
|
|
|
s.xrayApi.Init(p.GetAPIPort())
|
|
|
|
|
|
for _, tag := range tags {
|
2023-07-27 11:58:12 +03:30
|
|
|
|
err1 := s.xrayApi.DelInbound(tag)
|
2024-03-11 11:46:54 +03:30
|
|
|
|
if err1 == nil {
|
2023-07-18 02:40:22 +03:30
|
|
|
|
logger.Debug("Inbound disabled by api:", tag)
|
|
|
|
|
|
} else {
|
2024-11-16 14:35:23 +01:00
|
|
|
|
logger.Debug("Error in disabling inbound by api:", err1)
|
|
|
|
|
|
needRestart = true
|
2023-07-18 02:40:22 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
s.xrayApi.Close()
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-08-26 15:19:51 +03:30
|
|
|
|
result := tx.Model(model.Inbound{}).
|
2026-05-10 16:25:23 +02:00
|
|
|
|
Where("((total > 0 and up + down >= total) or (expiry_time > 0 and expiry_time <= ?)) and enable = ? and node_id IS NULL", now, true).
|
2023-02-09 22:48:06 +03:30
|
|
|
|
Update("enable", false)
|
|
|
|
|
|
err := result.Error
|
|
|
|
|
|
count := result.RowsAffected
|
2023-07-18 02:40:22 +03:30
|
|
|
|
return needRestart, count, err
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2023-05-06 00:22:39 +04:30
|
|
|
|
|
2023-08-26 15:19:51 +03:30
|
|
|
|
func (s *InboundService) disableInvalidClients(tx *gorm.DB) (bool, int64, error) {
|
2023-04-18 21:34:06 +03:30
|
|
|
|
now := time.Now().Unix() * 1000
|
2023-06-05 00:32:19 +03:30
|
|
|
|
needRestart := false
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
var depletedRows []xray.ClientTraffic
|
|
|
|
|
|
err := tx.Model(xray.ClientTraffic{}).
|
|
|
|
|
|
Where("((total > 0 AND up + down >= total) OR (expiry_time > 0 AND expiry_time <= ?)) AND enable = ?", now, true).
|
2026-05-10 16:25:23 +02:00
|
|
|
|
Where("inbound_id IN (?)", tx.Model(&model.Inbound{}).Select("id").Where("node_id IS NULL")).
|
2026-05-09 17:38:48 +02:00
|
|
|
|
Find(&depletedRows).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, 0, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if len(depletedRows) == 0 {
|
|
|
|
|
|
return false, 0, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
rowByEmail := make(map[string]*xray.ClientTraffic, len(depletedRows))
|
|
|
|
|
|
depletedEmails := make([]string, 0, len(depletedRows))
|
|
|
|
|
|
for i := range depletedRows {
|
|
|
|
|
|
if depletedRows[i].Email == "" {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
rowByEmail[strings.ToLower(depletedRows[i].Email)] = &depletedRows[i]
|
|
|
|
|
|
depletedEmails = append(depletedEmails, depletedRows[i].Email)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Resolve inbound membership only for the depleted emails — pushing the
|
|
|
|
|
|
// filter into SQLite avoids dragging every panel client through Go for
|
|
|
|
|
|
// the common case where most clients are healthy.
|
|
|
|
|
|
var memberships []struct {
|
2026-05-04 23:19:25 +02:00
|
|
|
|
InboundId int
|
|
|
|
|
|
Tag string
|
|
|
|
|
|
Email string
|
2026-05-09 17:38:48 +02:00
|
|
|
|
SubID string `gorm:"column:sub_id"`
|
|
|
|
|
|
}
|
|
|
|
|
|
if len(depletedEmails) > 0 {
|
|
|
|
|
|
err = tx.Raw(`
|
|
|
|
|
|
SELECT inbounds.id AS inbound_id,
|
|
|
|
|
|
inbounds.tag AS tag,
|
|
|
|
|
|
JSON_EXTRACT(client.value, '$.email') AS email,
|
|
|
|
|
|
JSON_EXTRACT(client.value, '$.subId') AS sub_id
|
|
|
|
|
|
FROM inbounds,
|
|
|
|
|
|
JSON_EACH(JSON_EXTRACT(inbounds.settings, '$.clients')) AS client
|
|
|
|
|
|
WHERE LOWER(JSON_EXTRACT(client.value, '$.email')) IN ?
|
|
|
|
|
|
`, lowerAll(depletedEmails)).Scan(&memberships).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, 0, err
|
|
|
|
|
|
}
|
2026-05-04 23:19:25 +02:00
|
|
|
|
}
|
2023-06-05 00:32:19 +03:30
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
// Discover the row holder's subId per email. Only siblings sharing it
|
|
|
|
|
|
// get cascaded; legacy data where two identities reuse the same email
|
|
|
|
|
|
// stays isolated to the row owner.
|
|
|
|
|
|
holderSub := make(map[string]string, len(rowByEmail))
|
|
|
|
|
|
for _, m := range memberships {
|
|
|
|
|
|
email := strings.ToLower(strings.Trim(m.Email, "\""))
|
|
|
|
|
|
row, ok := rowByEmail[email]
|
|
|
|
|
|
if !ok || m.InboundId != row.InboundId {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
holderSub[email] = strings.Trim(m.SubID, "\"")
|
2026-05-04 23:19:25 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
type target struct {
|
|
|
|
|
|
InboundId int
|
|
|
|
|
|
Tag string
|
|
|
|
|
|
Email string
|
|
|
|
|
|
}
|
|
|
|
|
|
var targets []target
|
|
|
|
|
|
for _, m := range memberships {
|
|
|
|
|
|
email := strings.ToLower(strings.Trim(m.Email, "\""))
|
|
|
|
|
|
row, ok := rowByEmail[email]
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
expected, hasSub := holderSub[email]
|
|
|
|
|
|
mSub := strings.Trim(m.SubID, "\"")
|
|
|
|
|
|
switch {
|
|
|
|
|
|
case !hasSub || expected == "":
|
|
|
|
|
|
if m.InboundId != row.InboundId {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
case mSub != expected:
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
targets = append(targets, target{
|
|
|
|
|
|
InboundId: m.InboundId,
|
|
|
|
|
|
Tag: m.Tag,
|
|
|
|
|
|
Email: strings.Trim(m.Email, "\""),
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if p != nil && len(targets) > 0 {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
s.xrayApi.Init(p.GetAPIPort())
|
2026-05-09 17:38:48 +02:00
|
|
|
|
for _, t := range targets {
|
|
|
|
|
|
err1 := s.xrayApi.RemoveUser(t.Tag, t.Email)
|
2023-07-27 11:58:12 +03:30
|
|
|
|
if err1 == nil {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
logger.Debug("Client disabled by api:", t.Email)
|
|
|
|
|
|
} else if strings.Contains(err1.Error(), fmt.Sprintf("User %s not found.", t.Email)) {
|
|
|
|
|
|
logger.Debug("User is already disabled. Nothing to do more...")
|
2023-06-05 00:32:19 +03:30
|
|
|
|
} else {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
logger.Debug("Error in disabling client by api:", err1)
|
|
|
|
|
|
needRestart = true
|
2023-06-05 00:32:19 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
s.xrayApi.Close()
|
|
|
|
|
|
}
|
2026-05-04 23:19:25 +02:00
|
|
|
|
|
2023-08-26 15:19:51 +03:30
|
|
|
|
result := tx.Model(xray.ClientTraffic{}).
|
2023-04-18 21:34:06 +03:30
|
|
|
|
Where("((total > 0 and up + down >= total) or (expiry_time > 0 and expiry_time <= ?)) and enable = ?", now, true).
|
2026-05-10 16:25:23 +02:00
|
|
|
|
Where("inbound_id IN (?)", tx.Model(&model.Inbound{}).Select("id").Where("node_id IS NULL")).
|
2023-04-18 21:34:06 +03:30
|
|
|
|
Update("enable", false)
|
2026-05-04 23:19:25 +02:00
|
|
|
|
err = result.Error
|
2023-04-18 21:34:06 +03:30
|
|
|
|
count := result.RowsAffected
|
2026-05-04 23:19:25 +02:00
|
|
|
|
if err != nil {
|
|
|
|
|
|
return needRestart, count, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if len(targets) == 0 {
|
|
|
|
|
|
return needRestart, count, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
inboundEmailMap := make(map[int]map[string]struct{})
|
|
|
|
|
|
for _, t := range targets {
|
|
|
|
|
|
if inboundEmailMap[t.InboundId] == nil {
|
|
|
|
|
|
inboundEmailMap[t.InboundId] = make(map[string]struct{})
|
2026-05-04 23:19:25 +02:00
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
inboundEmailMap[t.InboundId][t.Email] = struct{}{}
|
|
|
|
|
|
}
|
|
|
|
|
|
inboundIds := make([]int, 0, len(inboundEmailMap))
|
|
|
|
|
|
for id := range inboundEmailMap {
|
|
|
|
|
|
inboundIds = append(inboundIds, id)
|
|
|
|
|
|
}
|
|
|
|
|
|
var inbounds []*model.Inbound
|
|
|
|
|
|
if err = tx.Model(model.Inbound{}).Where("id IN ?", inboundIds).Find(&inbounds).Error; err != nil {
|
|
|
|
|
|
logger.Warning("disableInvalidClients fetch inbounds:", err)
|
|
|
|
|
|
return needRestart, count, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
dirty := make([]*model.Inbound, 0, len(inbounds))
|
|
|
|
|
|
for _, inbound := range inbounds {
|
|
|
|
|
|
settings := map[string]any{}
|
|
|
|
|
|
if jsonErr := json.Unmarshal([]byte(inbound.Settings), &settings); jsonErr != nil {
|
|
|
|
|
|
continue
|
2026-05-04 23:19:25 +02:00
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
clientsRaw, ok := settings["clients"].([]any)
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
continue
|
2026-05-04 23:19:25 +02:00
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
emailSet := inboundEmailMap[inbound.Id]
|
|
|
|
|
|
changed := false
|
|
|
|
|
|
for i := range clientsRaw {
|
|
|
|
|
|
c, ok := clientsRaw[i].(map[string]any)
|
|
|
|
|
|
if !ok {
|
2026-05-04 23:19:25 +02:00
|
|
|
|
continue
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
email, _ := c["email"].(string)
|
|
|
|
|
|
if _, shouldDisable := emailSet[email]; !shouldDisable {
|
2026-05-04 23:19:25 +02:00
|
|
|
|
continue
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
c["enable"] = false
|
|
|
|
|
|
if row, ok := rowByEmail[strings.ToLower(email)]; ok {
|
|
|
|
|
|
c["totalGB"] = row.Total
|
|
|
|
|
|
c["expiryTime"] = row.ExpiryTime
|
2026-05-04 23:19:25 +02:00
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
c["updated_at"] = now
|
|
|
|
|
|
clientsRaw[i] = c
|
|
|
|
|
|
changed = true
|
|
|
|
|
|
}
|
|
|
|
|
|
if !changed {
|
|
|
|
|
|
continue
|
2026-05-04 23:19:25 +02:00
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
settings["clients"] = clientsRaw
|
|
|
|
|
|
modifiedSettings, jsonErr := json.MarshalIndent(settings, "", " ")
|
|
|
|
|
|
if jsonErr != nil {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
inbound.Settings = string(modifiedSettings)
|
|
|
|
|
|
dirty = append(dirty, inbound)
|
|
|
|
|
|
}
|
|
|
|
|
|
if len(dirty) > 0 {
|
|
|
|
|
|
if err = tx.Save(dirty).Error; err != nil {
|
2026-05-04 23:19:25 +02:00
|
|
|
|
logger.Warning("disableInvalidClients update inbound settings:", err)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return needRestart, count, nil
|
2023-04-18 21:34:06 +03:30
|
|
|
|
}
|
2023-05-06 00:22:39 +04:30
|
|
|
|
|
2023-12-05 18:13:36 +01:00
|
|
|
|
func (s *InboundService) GetInboundTags() (string, error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
var inboundTags []string
|
|
|
|
|
|
err := db.Model(model.Inbound{}).Select("tag").Find(&inboundTags).Error
|
|
|
|
|
|
if err != nil && err != gorm.ErrRecordNotFound {
|
|
|
|
|
|
return "", err
|
|
|
|
|
|
}
|
2023-12-08 19:44:52 +01:00
|
|
|
|
tags, _ := json.Marshal(inboundTags)
|
|
|
|
|
|
return string(tags), nil
|
2023-12-05 18:13:36 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-06 00:43:47 +02:00
|
|
|
|
func (s *InboundService) GetClientReverseTags() (string, error) {
|
|
|
|
|
|
db := database.GetDB()
|
2026-05-06 11:43:21 +02:00
|
|
|
|
var inbounds []model.Inbound
|
|
|
|
|
|
err := db.Model(model.Inbound{}).Select("settings").Where("protocol = ?", "vless").Find(&inbounds).Error
|
2026-05-06 00:43:47 +02:00
|
|
|
|
if err != nil && err != gorm.ErrRecordNotFound {
|
|
|
|
|
|
return "[]", err
|
|
|
|
|
|
}
|
2026-05-06 11:43:21 +02:00
|
|
|
|
|
|
|
|
|
|
tagSet := make(map[string]struct{})
|
|
|
|
|
|
for _, inbound := range inbounds {
|
|
|
|
|
|
var settings map[string]any
|
|
|
|
|
|
if err := json.Unmarshal([]byte(inbound.Settings), &settings); err != nil {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
clients, ok := settings["clients"].([]any)
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
for _, client := range clients {
|
|
|
|
|
|
clientMap, ok := client.(map[string]any)
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
reverse, ok := clientMap["reverse"].(map[string]any)
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
tag, _ := reverse["tag"].(string)
|
|
|
|
|
|
tag = strings.TrimSpace(tag)
|
|
|
|
|
|
if tag != "" {
|
|
|
|
|
|
tagSet[tag] = struct{}{}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
rawTags := make([]string, 0, len(tagSet))
|
|
|
|
|
|
for tag := range tagSet {
|
|
|
|
|
|
rawTags = append(rawTags, tag)
|
|
|
|
|
|
}
|
|
|
|
|
|
sort.Strings(rawTags)
|
|
|
|
|
|
|
2026-05-06 00:43:47 +02:00
|
|
|
|
result, _ := json.Marshal(rawTags)
|
|
|
|
|
|
return string(result), nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-05-06 00:22:39 +04:30
|
|
|
|
func (s *InboundService) MigrationRemoveOrphanedTraffics() {
|
2023-04-13 18:45:18 +03:30
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
db.Exec(`
|
|
|
|
|
|
DELETE FROM client_traffics
|
|
|
|
|
|
WHERE email NOT IN (
|
|
|
|
|
|
SELECT JSON_EXTRACT(client.value, '$.email')
|
|
|
|
|
|
FROM inbounds,
|
|
|
|
|
|
JSON_EACH(JSON_EXTRACT(inbounds.settings, '$.clients')) AS client
|
|
|
|
|
|
)
|
|
|
|
|
|
`)
|
|
|
|
|
|
}
|
2023-05-06 00:22:39 +04:30
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
// AddClientStat inserts a per-client accounting row, no-op on email
|
|
|
|
|
|
// conflict. Xray reports traffic per email, so the surviving row acts as
|
|
|
|
|
|
// the shared accumulator for inbounds that re-use the same identity.
|
2023-06-05 00:32:19 +03:30
|
|
|
|
func (s *InboundService) AddClientStat(tx *gorm.DB, inboundId int, client *model.Client) error {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
clientTraffic := xray.ClientTraffic{
|
|
|
|
|
|
InboundId: inboundId,
|
|
|
|
|
|
Email: client.Email,
|
|
|
|
|
|
Total: client.TotalGB,
|
|
|
|
|
|
ExpiryTime: client.ExpiryTime,
|
|
|
|
|
|
Enable: client.Enable,
|
|
|
|
|
|
Reset: client.Reset,
|
|
|
|
|
|
}
|
|
|
|
|
|
return tx.Clauses(clause.OnConflict{Columns: []clause.Column{{Name: "email"}}, DoNothing: true}).
|
|
|
|
|
|
Create(&clientTraffic).Error
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2023-05-06 00:22:39 +04:30
|
|
|
|
|
2023-12-04 19:20:16 +01:00
|
|
|
|
func (s *InboundService) UpdateClientStat(tx *gorm.DB, email string, client *model.Client) error {
|
|
|
|
|
|
result := tx.Model(xray.ClientTraffic{}).
|
2023-03-17 19:37:49 +03:30
|
|
|
|
Where("email = ?", email).
|
2025-03-12 20:13:51 +01:00
|
|
|
|
Updates(map[string]any{
|
2025-09-16 14:57:31 +02:00
|
|
|
|
"enable": client.Enable,
|
2023-03-17 19:37:49 +03:30
|
|
|
|
"email": client.Email,
|
|
|
|
|
|
"total": client.TotalGB,
|
2023-12-04 19:20:16 +01:00
|
|
|
|
"expiry_time": client.ExpiryTime,
|
2024-03-11 01:01:24 +03:30
|
|
|
|
"reset": client.Reset,
|
|
|
|
|
|
})
|
2023-03-17 19:37:49 +03:30
|
|
|
|
err := result.Error
|
2024-03-12 17:35:17 +03:30
|
|
|
|
return err
|
2023-02-28 23:24:29 +03:30
|
|
|
|
}
|
2023-03-17 18:51:43 +01:00
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) UpdateClientIPs(tx *gorm.DB, oldEmail string, newEmail string) error {
|
|
|
|
|
|
return tx.Model(model.InboundClientIps{}).Where("client_email = ?", oldEmail).Update("client_email", newEmail).Error
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-03-17 19:37:49 +03:30
|
|
|
|
func (s *InboundService) DelClientStat(tx *gorm.DB, email string) error {
|
2023-03-17 19:29:08 +01:00
|
|
|
|
return tx.Where("email = ?", email).Delete(xray.ClientTraffic{}).Error
|
2023-03-17 18:51:43 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) DelClientIPs(tx *gorm.DB, email string) error {
|
2023-03-17 19:29:08 +01:00
|
|
|
|
return tx.Where("client_email = ?", email).Delete(model.InboundClientIps{}).Error
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-05-14 22:07:49 +03:30
|
|
|
|
func (s *InboundService) GetClientInboundByTrafficID(trafficId int) (traffic *xray.ClientTraffic, inbound *model.Inbound, err error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
var traffics []*xray.ClientTraffic
|
|
|
|
|
|
err = db.Model(xray.ClientTraffic{}).Where("id = ?", trafficId).Find(&traffics).Error
|
|
|
|
|
|
if err != nil {
|
2024-07-08 23:08:00 +02:00
|
|
|
|
logger.Warningf("Error retrieving ClientTraffic with trafficId %d: %v", trafficId, err)
|
2023-05-14 22:07:49 +03:30
|
|
|
|
return nil, nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if len(traffics) > 0 {
|
|
|
|
|
|
inbound, err = s.GetInbound(traffics[0].InboundId)
|
|
|
|
|
|
return traffics[0], inbound, err
|
|
|
|
|
|
}
|
|
|
|
|
|
return nil, nil, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-05-05 18:20:56 +03:30
|
|
|
|
func (s *InboundService) GetClientInboundByEmail(email string) (traffic *xray.ClientTraffic, inbound *model.Inbound, err error) {
|
2023-05-05 01:16:43 +03:30
|
|
|
|
db := database.GetDB()
|
2023-05-05 04:34:39 +03:30
|
|
|
|
var traffics []*xray.ClientTraffic
|
|
|
|
|
|
err = db.Model(xray.ClientTraffic{}).Where("email = ?", email).Find(&traffics).Error
|
|
|
|
|
|
if err != nil {
|
2024-07-08 23:08:00 +02:00
|
|
|
|
logger.Warningf("Error retrieving ClientTraffic with email %s: %v", email, err)
|
2023-05-05 18:20:56 +03:30
|
|
|
|
return nil, nil, err
|
2023-05-05 04:34:39 +03:30
|
|
|
|
}
|
|
|
|
|
|
if len(traffics) > 0 {
|
2023-05-05 18:20:56 +03:30
|
|
|
|
inbound, err = s.GetInbound(traffics[0].InboundId)
|
|
|
|
|
|
return traffics[0], inbound, err
|
|
|
|
|
|
}
|
|
|
|
|
|
return nil, nil, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-05-14 22:07:49 +03:30
|
|
|
|
func (s *InboundService) GetClientByEmail(clientEmail string) (*xray.ClientTraffic, *model.Client, error) {
|
|
|
|
|
|
traffic, inbound, err := s.GetClientInboundByEmail(clientEmail)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if inbound == nil {
|
|
|
|
|
|
return nil, nil, common.NewError("Inbound Not Found For Email:", clientEmail)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-05-22 18:06:34 +03:30
|
|
|
|
clients, err := s.GetClients(inbound)
|
2023-05-14 22:07:49 +03:30
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for _, client := range clients {
|
|
|
|
|
|
if client.Email == clientEmail {
|
|
|
|
|
|
return traffic, &client, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return nil, nil, common.NewError("Client Not Found In Inbound For Email:", clientEmail)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-04-02 15:04:44 +03:30
|
|
|
|
func (s *InboundService) SetClientTelegramUserID(trafficId int, tgId int64) (bool, error) {
|
2023-05-14 22:07:49 +03:30
|
|
|
|
traffic, inbound, err := s.GetClientInboundByTrafficID(trafficId)
|
|
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-05-14 22:07:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
if inbound == nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, common.NewError("Inbound Not Found For Traffic ID:", trafficId)
|
2023-05-14 22:07:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
clientEmail := traffic.Email
|
|
|
|
|
|
|
2023-05-22 18:06:34 +03:30
|
|
|
|
oldClients, err := s.GetClients(inbound)
|
2023-05-14 22:07:49 +03:30
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-05-14 22:07:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
clientId := ""
|
|
|
|
|
|
|
|
|
|
|
|
for _, oldClient := range oldClients {
|
|
|
|
|
|
if oldClient.Email == clientEmail {
|
2025-08-17 13:37:49 +02:00
|
|
|
|
switch inbound.Protocol {
|
|
|
|
|
|
case "trojan":
|
2023-05-14 22:07:49 +03:30
|
|
|
|
clientId = oldClient.Password
|
2025-08-17 13:37:49 +02:00
|
|
|
|
case "shadowsocks":
|
2024-01-01 18:07:56 +03:00
|
|
|
|
clientId = oldClient.Email
|
2025-08-17 13:37:49 +02:00
|
|
|
|
default:
|
2023-05-14 22:07:49 +03:30
|
|
|
|
clientId = oldClient.ID
|
|
|
|
|
|
}
|
|
|
|
|
|
break
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if len(clientId) == 0 {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, common.NewError("Client Not Found For Email:", clientEmail)
|
2023-05-14 22:07:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var settings map[string]any
|
2023-05-14 22:07:49 +03:30
|
|
|
|
err = json.Unmarshal([]byte(inbound.Settings), &settings)
|
|
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-05-14 22:07:49 +03:30
|
|
|
|
}
|
2025-03-12 20:13:51 +01:00
|
|
|
|
clients := settings["clients"].([]any)
|
|
|
|
|
|
var newClients []any
|
2023-05-14 22:07:49 +03:30
|
|
|
|
for client_index := range clients {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
c := clients[client_index].(map[string]any)
|
2023-05-14 22:07:49 +03:30
|
|
|
|
if c["email"] == clientEmail {
|
|
|
|
|
|
c["tgId"] = tgId
|
2025-08-27 21:00:49 +03:30
|
|
|
|
c["updated_at"] = time.Now().Unix() * 1000
|
2025-03-12 20:13:51 +01:00
|
|
|
|
newClients = append(newClients, any(c))
|
2023-05-14 22:07:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
settings["clients"] = newClients
|
|
|
|
|
|
modifiedSettings, err := json.MarshalIndent(settings, "", " ")
|
|
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-05-14 22:07:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
inbound.Settings = string(modifiedSettings)
|
2024-03-15 21:13:20 +03:00
|
|
|
|
needRestart, err := s.UpdateInboundClient(inbound, clientId)
|
|
|
|
|
|
return needRestart, err
|
2023-05-14 22:07:49 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2024-01-01 18:07:56 +03:00
|
|
|
|
func (s *InboundService) checkIsEnabledByEmail(clientEmail string) (bool, error) {
|
|
|
|
|
|
_, inbound, err := s.GetClientInboundByEmail(clientEmail)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if inbound == nil {
|
|
|
|
|
|
return false, common.NewError("Inbound Not Found For Email:", clientEmail)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
clients, err := s.GetClients(inbound)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
isEnable := false
|
|
|
|
|
|
|
|
|
|
|
|
for _, client := range clients {
|
|
|
|
|
|
if client.Email == clientEmail {
|
|
|
|
|
|
isEnable = client.Enable
|
|
|
|
|
|
break
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return isEnable, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-15 21:13:20 +03:00
|
|
|
|
func (s *InboundService) ToggleClientEnableByEmail(clientEmail string) (bool, bool, error) {
|
2023-05-05 19:50:40 +03:30
|
|
|
|
_, inbound, err := s.GetClientInboundByEmail(clientEmail)
|
2023-05-05 18:20:56 +03:30
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, false, err
|
2023-05-05 18:20:56 +03:30
|
|
|
|
}
|
2023-05-05 19:50:40 +03:30
|
|
|
|
if inbound == nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, false, common.NewError("Inbound Not Found For Email:", clientEmail)
|
2023-05-05 18:20:56 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-05-22 18:06:34 +03:30
|
|
|
|
oldClients, err := s.GetClients(inbound)
|
2023-05-05 18:20:56 +03:30
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, false, err
|
2023-05-05 18:20:56 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
clientId := ""
|
2023-05-05 19:50:40 +03:30
|
|
|
|
clientOldEnabled := false
|
2023-05-05 18:20:56 +03:30
|
|
|
|
|
|
|
|
|
|
for _, oldClient := range oldClients {
|
|
|
|
|
|
if oldClient.Email == clientEmail {
|
2025-08-17 13:37:49 +02:00
|
|
|
|
switch inbound.Protocol {
|
|
|
|
|
|
case "trojan":
|
2023-05-05 18:20:56 +03:30
|
|
|
|
clientId = oldClient.Password
|
2025-08-17 13:37:49 +02:00
|
|
|
|
case "shadowsocks":
|
2024-01-01 18:07:56 +03:00
|
|
|
|
clientId = oldClient.Email
|
2025-08-17 13:37:49 +02:00
|
|
|
|
default:
|
2023-05-05 18:20:56 +03:30
|
|
|
|
clientId = oldClient.ID
|
|
|
|
|
|
}
|
2023-05-05 19:50:40 +03:30
|
|
|
|
clientOldEnabled = oldClient.Enable
|
2023-05-05 18:20:56 +03:30
|
|
|
|
break
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if len(clientId) == 0 {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, false, common.NewError("Client Not Found For Email:", clientEmail)
|
2023-05-05 18:20:56 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var settings map[string]any
|
2023-05-05 18:20:56 +03:30
|
|
|
|
err = json.Unmarshal([]byte(inbound.Settings), &settings)
|
|
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, false, err
|
2023-05-05 18:20:56 +03:30
|
|
|
|
}
|
2025-03-12 20:13:51 +01:00
|
|
|
|
clients := settings["clients"].([]any)
|
|
|
|
|
|
var newClients []any
|
2023-05-05 18:20:56 +03:30
|
|
|
|
for client_index := range clients {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
c := clients[client_index].(map[string]any)
|
2023-05-05 18:20:56 +03:30
|
|
|
|
if c["email"] == clientEmail {
|
2023-05-05 19:50:40 +03:30
|
|
|
|
c["enable"] = !clientOldEnabled
|
2025-08-27 21:00:49 +03:30
|
|
|
|
c["updated_at"] = time.Now().Unix() * 1000
|
2025-03-12 20:13:51 +01:00
|
|
|
|
newClients = append(newClients, any(c))
|
2023-05-05 18:20:56 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
settings["clients"] = newClients
|
|
|
|
|
|
modifiedSettings, err := json.MarshalIndent(settings, "", " ")
|
|
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, false, err
|
2023-05-05 18:20:56 +03:30
|
|
|
|
}
|
|
|
|
|
|
inbound.Settings = string(modifiedSettings)
|
2023-06-05 00:32:19 +03:30
|
|
|
|
|
2024-03-15 21:13:20 +03:00
|
|
|
|
needRestart, err := s.UpdateInboundClient(inbound, clientId)
|
2023-06-05 00:32:19 +03:30
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, needRestart, err
|
2023-06-05 00:32:19 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return !clientOldEnabled, needRestart, nil
|
2023-05-05 18:20:56 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2025-09-28 22:00:16 +03:00
|
|
|
|
// SetClientEnableByEmail sets client enable state to desired value; returns (changed, needRestart, error)
|
|
|
|
|
|
func (s *InboundService) SetClientEnableByEmail(clientEmail string, enable bool) (bool, bool, error) {
|
2025-10-21 13:02:55 +02:00
|
|
|
|
current, err := s.checkIsEnabledByEmail(clientEmail)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if current == enable {
|
|
|
|
|
|
return false, false, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
newEnabled, needRestart, err := s.ToggleClientEnableByEmail(clientEmail)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, needRestart, err
|
|
|
|
|
|
}
|
|
|
|
|
|
return newEnabled == enable, needRestart, nil
|
2025-09-28 22:00:16 +03:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-15 21:13:20 +03:00
|
|
|
|
func (s *InboundService) ResetClientIpLimitByEmail(clientEmail string, count int) (bool, error) {
|
2023-05-05 19:50:40 +03:30
|
|
|
|
_, inbound, err := s.GetClientInboundByEmail(clientEmail)
|
2023-05-05 18:20:56 +03:30
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-05-05 18:20:56 +03:30
|
|
|
|
}
|
2023-05-05 19:50:40 +03:30
|
|
|
|
if inbound == nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, common.NewError("Inbound Not Found For Email:", clientEmail)
|
2023-05-05 18:20:56 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-05-22 18:06:34 +03:30
|
|
|
|
oldClients, err := s.GetClients(inbound)
|
2023-05-05 18:20:56 +03:30
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-05-05 18:20:56 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
clientId := ""
|
|
|
|
|
|
|
|
|
|
|
|
for _, oldClient := range oldClients {
|
|
|
|
|
|
if oldClient.Email == clientEmail {
|
2025-08-17 13:37:49 +02:00
|
|
|
|
switch inbound.Protocol {
|
|
|
|
|
|
case "trojan":
|
2023-05-05 18:20:56 +03:30
|
|
|
|
clientId = oldClient.Password
|
2025-08-17 13:37:49 +02:00
|
|
|
|
case "shadowsocks":
|
2024-01-01 18:07:56 +03:00
|
|
|
|
clientId = oldClient.Email
|
2025-08-17 13:37:49 +02:00
|
|
|
|
default:
|
2023-05-05 18:20:56 +03:30
|
|
|
|
clientId = oldClient.ID
|
|
|
|
|
|
}
|
|
|
|
|
|
break
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if len(clientId) == 0 {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, common.NewError("Client Not Found For Email:", clientEmail)
|
2023-05-05 18:20:56 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var settings map[string]any
|
2023-05-05 18:20:56 +03:30
|
|
|
|
err = json.Unmarshal([]byte(inbound.Settings), &settings)
|
|
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-05-05 18:20:56 +03:30
|
|
|
|
}
|
2025-03-12 20:13:51 +01:00
|
|
|
|
clients := settings["clients"].([]any)
|
|
|
|
|
|
var newClients []any
|
2023-05-05 18:20:56 +03:30
|
|
|
|
for client_index := range clients {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
c := clients[client_index].(map[string]any)
|
2023-05-05 18:20:56 +03:30
|
|
|
|
if c["email"] == clientEmail {
|
|
|
|
|
|
c["limitIp"] = count
|
2025-08-27 21:00:49 +03:30
|
|
|
|
c["updated_at"] = time.Now().Unix() * 1000
|
2025-03-12 20:13:51 +01:00
|
|
|
|
newClients = append(newClients, any(c))
|
2023-05-05 18:20:56 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
settings["clients"] = newClients
|
|
|
|
|
|
modifiedSettings, err := json.MarshalIndent(settings, "", " ")
|
|
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-05-05 04:34:39 +03:30
|
|
|
|
}
|
2023-05-05 18:20:56 +03:30
|
|
|
|
inbound.Settings = string(modifiedSettings)
|
2024-03-15 21:13:20 +03:00
|
|
|
|
needRestart, err := s.UpdateInboundClient(inbound, clientId)
|
|
|
|
|
|
return needRestart, err
|
2023-06-05 00:32:19 +03:30
|
|
|
|
}
|
2023-07-01 15:56:43 +03:30
|
|
|
|
|
2024-03-15 21:13:20 +03:00
|
|
|
|
func (s *InboundService) ResetClientExpiryTimeByEmail(clientEmail string, expiry_time int64) (bool, error) {
|
2023-05-05 19:50:40 +03:30
|
|
|
|
_, inbound, err := s.GetClientInboundByEmail(clientEmail)
|
2023-05-05 04:34:39 +03:30
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-05-05 04:34:39 +03:30
|
|
|
|
}
|
|
|
|
|
|
if inbound == nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, common.NewError("Inbound Not Found For Email:", clientEmail)
|
2023-05-05 04:34:39 +03:30
|
|
|
|
}
|
2023-05-05 01:16:43 +03:30
|
|
|
|
|
2023-05-22 18:06:34 +03:30
|
|
|
|
oldClients, err := s.GetClients(inbound)
|
2023-05-05 04:34:39 +03:30
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-05-05 04:34:39 +03:30
|
|
|
|
}
|
2023-05-05 01:16:43 +03:30
|
|
|
|
|
2023-05-05 04:34:39 +03:30
|
|
|
|
clientId := ""
|
|
|
|
|
|
|
|
|
|
|
|
for _, oldClient := range oldClients {
|
|
|
|
|
|
if oldClient.Email == clientEmail {
|
2025-08-17 13:37:49 +02:00
|
|
|
|
switch inbound.Protocol {
|
|
|
|
|
|
case "trojan":
|
2023-05-05 04:34:39 +03:30
|
|
|
|
clientId = oldClient.Password
|
2025-08-17 13:37:49 +02:00
|
|
|
|
case "shadowsocks":
|
2024-01-01 18:07:56 +03:00
|
|
|
|
clientId = oldClient.Email
|
2025-08-17 13:37:49 +02:00
|
|
|
|
default:
|
2023-05-05 04:34:39 +03:30
|
|
|
|
clientId = oldClient.ID
|
|
|
|
|
|
}
|
|
|
|
|
|
break
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if len(clientId) == 0 {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, common.NewError("Client Not Found For Email:", clientEmail)
|
2023-05-05 04:34:39 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var settings map[string]any
|
2023-05-05 04:34:39 +03:30
|
|
|
|
err = json.Unmarshal([]byte(inbound.Settings), &settings)
|
2023-05-05 01:16:43 +03:30
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-05-05 01:16:43 +03:30
|
|
|
|
}
|
2025-03-12 20:13:51 +01:00
|
|
|
|
clients := settings["clients"].([]any)
|
|
|
|
|
|
var newClients []any
|
2023-05-05 04:34:39 +03:30
|
|
|
|
for client_index := range clients {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
c := clients[client_index].(map[string]any)
|
2023-05-05 04:34:39 +03:30
|
|
|
|
if c["email"] == clientEmail {
|
|
|
|
|
|
c["expiryTime"] = expiry_time
|
2025-08-27 21:00:49 +03:30
|
|
|
|
c["updated_at"] = time.Now().Unix() * 1000
|
2025-03-12 20:13:51 +01:00
|
|
|
|
newClients = append(newClients, any(c))
|
2023-05-05 04:34:39 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
settings["clients"] = newClients
|
|
|
|
|
|
modifiedSettings, err := json.MarshalIndent(settings, "", " ")
|
|
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-05-05 04:34:39 +03:30
|
|
|
|
}
|
|
|
|
|
|
inbound.Settings = string(modifiedSettings)
|
2024-03-15 21:13:20 +03:00
|
|
|
|
needRestart, err := s.UpdateInboundClient(inbound, clientId)
|
|
|
|
|
|
return needRestart, err
|
2023-05-05 01:16:43 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-15 21:13:20 +03:00
|
|
|
|
func (s *InboundService) ResetClientTrafficLimitByEmail(clientEmail string, totalGB int) (bool, error) {
|
2023-11-20 17:47:59 +03:30
|
|
|
|
if totalGB < 0 {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, common.NewError("totalGB must be >= 0")
|
2023-11-20 17:47:59 +03:30
|
|
|
|
}
|
|
|
|
|
|
_, inbound, err := s.GetClientInboundByEmail(clientEmail)
|
|
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-11-20 17:47:59 +03:30
|
|
|
|
}
|
|
|
|
|
|
if inbound == nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, common.NewError("Inbound Not Found For Email:", clientEmail)
|
2023-11-20 17:47:59 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
oldClients, err := s.GetClients(inbound)
|
|
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-11-20 17:47:59 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
clientId := ""
|
|
|
|
|
|
|
|
|
|
|
|
for _, oldClient := range oldClients {
|
|
|
|
|
|
if oldClient.Email == clientEmail {
|
2025-08-17 13:37:49 +02:00
|
|
|
|
switch inbound.Protocol {
|
|
|
|
|
|
case "trojan":
|
2023-11-20 17:47:59 +03:30
|
|
|
|
clientId = oldClient.Password
|
2025-08-17 13:37:49 +02:00
|
|
|
|
case "shadowsocks":
|
2024-01-01 18:07:56 +03:00
|
|
|
|
clientId = oldClient.Email
|
2025-08-17 13:37:49 +02:00
|
|
|
|
default:
|
2023-11-20 17:47:59 +03:30
|
|
|
|
clientId = oldClient.ID
|
|
|
|
|
|
}
|
|
|
|
|
|
break
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if len(clientId) == 0 {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, common.NewError("Client Not Found For Email:", clientEmail)
|
2023-11-20 17:47:59 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var settings map[string]any
|
2023-11-20 17:47:59 +03:30
|
|
|
|
err = json.Unmarshal([]byte(inbound.Settings), &settings)
|
|
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-11-20 17:47:59 +03:30
|
|
|
|
}
|
2025-03-12 20:13:51 +01:00
|
|
|
|
clients := settings["clients"].([]any)
|
|
|
|
|
|
var newClients []any
|
2023-11-20 17:47:59 +03:30
|
|
|
|
for client_index := range clients {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
c := clients[client_index].(map[string]any)
|
2023-11-20 17:47:59 +03:30
|
|
|
|
if c["email"] == clientEmail {
|
|
|
|
|
|
c["totalGB"] = totalGB * 1024 * 1024 * 1024
|
2025-08-27 21:00:49 +03:30
|
|
|
|
c["updated_at"] = time.Now().Unix() * 1000
|
2025-03-12 20:13:51 +01:00
|
|
|
|
newClients = append(newClients, any(c))
|
2023-11-20 17:47:59 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
settings["clients"] = newClients
|
|
|
|
|
|
modifiedSettings, err := json.MarshalIndent(settings, "", " ")
|
|
|
|
|
|
if err != nil {
|
2024-03-15 21:13:20 +03:00
|
|
|
|
return false, err
|
2023-11-20 17:47:59 +03:30
|
|
|
|
}
|
|
|
|
|
|
inbound.Settings = string(modifiedSettings)
|
2024-03-15 21:13:20 +03:00
|
|
|
|
needRestart, err := s.UpdateInboundClient(inbound, clientId)
|
|
|
|
|
|
return needRestart, err
|
2023-11-20 17:47:59 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2023-05-05 01:16:43 +03:30
|
|
|
|
func (s *InboundService) ResetClientTrafficByEmail(clientEmail string) error {
|
2026-05-10 16:25:23 +02:00
|
|
|
|
return submitTrafficWrite(func() error {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
return db.Model(xray.ClientTraffic{}).
|
|
|
|
|
|
Where("email = ?", clientEmail).
|
|
|
|
|
|
Updates(map[string]any{"enable": true, "up": 0, "down": 0}).Error
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
2025-09-16 09:24:32 +02:00
|
|
|
|
|
2026-05-10 16:25:23 +02:00
|
|
|
|
func (s *InboundService) ResetClientTraffic(id int, clientEmail string) (needRestart bool, err error) {
|
|
|
|
|
|
err = submitTrafficWrite(func() error {
|
|
|
|
|
|
var inner error
|
|
|
|
|
|
needRestart, inner = s.resetClientTrafficLocked(id, clientEmail)
|
|
|
|
|
|
return inner
|
|
|
|
|
|
})
|
|
|
|
|
|
return
|
2023-05-05 01:16:43 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-10 16:25:23 +02:00
|
|
|
|
func (s *InboundService) resetClientTrafficLocked(id int, clientEmail string) (bool, error) {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
needRestart := false
|
2023-02-09 22:48:06 +03:30
|
|
|
|
|
2023-06-05 00:32:19 +03:30
|
|
|
|
traffic, err := s.GetClientTrafficByEmail(clientEmail)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
2023-02-28 23:24:29 +03:30
|
|
|
|
|
2023-06-05 00:32:19 +03:30
|
|
|
|
if !traffic.Enable {
|
|
|
|
|
|
inbound, err := s.GetInbound(id)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
clients, err := s.GetClients(inbound)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
for _, client := range clients {
|
2024-10-15 20:54:23 +02:00
|
|
|
|
if client.Email == clientEmail && client.Enable {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
rt, rterr := s.runtimeFor(inbound)
|
|
|
|
|
|
if rterr != nil {
|
|
|
|
|
|
if inbound.NodeID != nil {
|
|
|
|
|
|
return false, rterr
|
|
|
|
|
|
}
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
break
|
|
|
|
|
|
}
|
2023-07-27 11:58:12 +03:30
|
|
|
|
cipher := ""
|
|
|
|
|
|
if string(inbound.Protocol) == "shadowsocks" {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var oldSettings map[string]any
|
2023-07-27 11:58:12 +03:30
|
|
|
|
err = json.Unmarshal([]byte(inbound.Settings), &oldSettings)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
cipher = oldSettings["method"].(string)
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
err1 := rt.AddUser(context.Background(), inbound, map[string]any{
|
2023-06-05 00:32:19 +03:30
|
|
|
|
"email": client.Email,
|
|
|
|
|
|
"id": client.ID,
|
2026-04-20 16:05:27 +02:00
|
|
|
|
"auth": client.Auth,
|
2024-08-11 00:47:44 +02:00
|
|
|
|
"security": client.Security,
|
2023-06-05 00:32:19 +03:30
|
|
|
|
"flow": client.Flow,
|
|
|
|
|
|
"password": client.Password,
|
2023-07-27 11:58:12 +03:30
|
|
|
|
"cipher": cipher,
|
2023-06-05 00:32:19 +03:30
|
|
|
|
})
|
|
|
|
|
|
if err1 == nil {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
logger.Debug("Client enabled on", rt.Name(), "due to reset traffic:", clientEmail)
|
2023-06-05 00:32:19 +03:30
|
|
|
|
} else {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
logger.Debug("Error in enabling client on", rt.Name(), ":", err1)
|
2023-06-05 00:32:19 +03:30
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
|
|
|
|
|
break
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
traffic.Up = 0
|
|
|
|
|
|
traffic.Down = 0
|
|
|
|
|
|
traffic.Enable = true
|
2023-02-28 23:24:29 +03:30
|
|
|
|
|
2023-06-05 00:32:19 +03:30
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
err = db.Save(traffic).Error
|
2023-02-28 23:24:29 +03:30
|
|
|
|
if err != nil {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return false, err
|
2023-02-28 23:24:29 +03:30
|
|
|
|
}
|
2023-06-05 00:32:19 +03:30
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
now := time.Now().UnixMilli()
|
|
|
|
|
|
_ = db.Model(model.Inbound{}).
|
|
|
|
|
|
Where("id = ?", id).
|
|
|
|
|
|
Update("last_traffic_reset_time", now).Error
|
|
|
|
|
|
|
|
|
|
|
|
inbound, err := s.GetInbound(id)
|
|
|
|
|
|
if err == nil && inbound != nil && inbound.NodeID != nil {
|
|
|
|
|
|
if rt, rterr := s.runtimeFor(inbound); rterr == nil {
|
|
|
|
|
|
if e := rt.ResetClientTraffic(context.Background(), inbound, clientEmail); e != nil {
|
|
|
|
|
|
logger.Warning("ResetClientTraffic: remote propagation to", rt.Name(), "failed:", e)
|
|
|
|
|
|
}
|
|
|
|
|
|
} else {
|
|
|
|
|
|
logger.Warning("ResetClientTraffic: runtime lookup failed:", rterr)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-06-05 00:32:19 +03:30
|
|
|
|
return needRestart, nil
|
2023-02-28 23:24:29 +03:30
|
|
|
|
}
|
2023-04-09 23:13:18 +03:30
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) ResetAllClientTraffics(id int) error {
|
2026-05-10 16:25:23 +02:00
|
|
|
|
return submitTrafficWrite(func() error {
|
|
|
|
|
|
return s.resetAllClientTrafficsLocked(id)
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) resetAllClientTrafficsLocked(id int) error {
|
2023-02-09 22:48:06 +03:30
|
|
|
|
db := database.GetDB()
|
2025-09-16 09:24:32 +02:00
|
|
|
|
now := time.Now().Unix() * 1000
|
2023-02-09 22:48:06 +03:30
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if err := db.Transaction(func(tx *gorm.DB) error {
|
2025-09-16 09:24:32 +02:00
|
|
|
|
whereText := "inbound_id "
|
|
|
|
|
|
if id == -1 {
|
|
|
|
|
|
whereText += " > ?"
|
|
|
|
|
|
} else {
|
|
|
|
|
|
whereText += " = ?"
|
|
|
|
|
|
}
|
2023-04-25 14:38:35 +03:30
|
|
|
|
|
2025-09-16 09:24:32 +02:00
|
|
|
|
// Reset client traffics
|
|
|
|
|
|
result := tx.Model(xray.ClientTraffic{}).
|
|
|
|
|
|
Where(whereText, id).
|
|
|
|
|
|
Updates(map[string]any{"enable": true, "up": 0, "down": 0})
|
2023-04-09 23:13:18 +03:30
|
|
|
|
|
2025-09-16 09:24:32 +02:00
|
|
|
|
if result.Error != nil {
|
|
|
|
|
|
return result.Error
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Update lastTrafficResetTime for the inbound(s)
|
|
|
|
|
|
inboundWhereText := "id "
|
|
|
|
|
|
if id == -1 {
|
|
|
|
|
|
inboundWhereText += " > ?"
|
|
|
|
|
|
} else {
|
|
|
|
|
|
inboundWhereText += " = ?"
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
result = tx.Model(model.Inbound{}).
|
|
|
|
|
|
Where(inboundWhereText, id).
|
|
|
|
|
|
Update("last_traffic_reset_time", now)
|
|
|
|
|
|
|
|
|
|
|
|
return result.Error
|
2026-05-09 17:38:48 +02:00
|
|
|
|
}); err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var inbounds []model.Inbound
|
|
|
|
|
|
q := db.Model(model.Inbound{}).Where("node_id IS NOT NULL")
|
|
|
|
|
|
if id != -1 {
|
|
|
|
|
|
q = q.Where("id = ?", id)
|
|
|
|
|
|
}
|
|
|
|
|
|
if err := q.Find(&inbounds).Error; err != nil {
|
|
|
|
|
|
logger.Warning("ResetAllClientTraffics: discover node inbounds failed:", err)
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
for i := range inbounds {
|
|
|
|
|
|
ib := &inbounds[i]
|
|
|
|
|
|
rt, rterr := s.runtimeFor(ib)
|
|
|
|
|
|
if rterr != nil {
|
|
|
|
|
|
logger.Warning("ResetAllClientTraffics: runtime lookup for inbound", ib.Id, "failed:", rterr)
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
if e := rt.ResetInboundClientTraffics(context.Background(), ib); e != nil {
|
|
|
|
|
|
logger.Warning("ResetAllClientTraffics: remote propagation to", rt.Name(), "failed:", e)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
return nil
|
2023-04-09 23:13:18 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) ResetAllTraffics() error {
|
2026-05-10 16:25:23 +02:00
|
|
|
|
return submitTrafficWrite(func() error {
|
|
|
|
|
|
return s.resetAllTrafficsLocked()
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) resetAllTrafficsLocked() error {
|
2023-04-09 23:13:18 +03:30
|
|
|
|
db := database.GetDB()
|
2026-05-09 17:38:48 +02:00
|
|
|
|
now := time.Now().UnixMilli()
|
2023-04-09 23:13:18 +03:30
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if err := db.Model(model.Inbound{}).
|
2023-04-09 23:13:18 +03:30
|
|
|
|
Where("user_id > ?", 0).
|
2026-05-09 17:38:48 +02:00
|
|
|
|
Updates(map[string]any{
|
|
|
|
|
|
"up": 0,
|
|
|
|
|
|
"down": 0,
|
|
|
|
|
|
"last_traffic_reset_time": now,
|
|
|
|
|
|
}).Error; err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
2023-04-09 23:13:18 +03:30
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
var inbounds []model.Inbound
|
|
|
|
|
|
if err := db.Model(model.Inbound{}).
|
|
|
|
|
|
Where("node_id IS NOT NULL").
|
|
|
|
|
|
Find(&inbounds).Error; err != nil {
|
|
|
|
|
|
logger.Warning("ResetAllTraffics: discover node inbounds failed:", err)
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
for i := range inbounds {
|
|
|
|
|
|
ib := &inbounds[i]
|
|
|
|
|
|
rt, rterr := s.runtimeFor(ib)
|
|
|
|
|
|
if rterr != nil {
|
|
|
|
|
|
logger.Warning("ResetAllTraffics: runtime lookup for inbound", ib.Id, "failed:", rterr)
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
if e := rt.ResetInboundClientTraffics(context.Background(), ib); e != nil {
|
|
|
|
|
|
logger.Warning("ResetAllTraffics: remote propagation to", rt.Name(), "failed:", e)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
return nil
|
2023-04-09 23:13:18 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2026-04-19 22:37:34 +03:00
|
|
|
|
func (s *InboundService) ResetInboundTraffic(id int) error {
|
2026-05-10 16:25:23 +02:00
|
|
|
|
return submitTrafficWrite(func() error {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
return db.Model(model.Inbound{}).
|
|
|
|
|
|
Where("id = ?", id).
|
|
|
|
|
|
Updates(map[string]any{"up": 0, "down": 0}).Error
|
|
|
|
|
|
})
|
2026-04-19 22:37:34 +03:00
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-25 18:43:37 +03:30
|
|
|
|
func (s *InboundService) DelDepletedClients(id int) (err error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
tx := db.Begin()
|
|
|
|
|
|
defer func() {
|
|
|
|
|
|
if err == nil {
|
|
|
|
|
|
tx.Commit()
|
|
|
|
|
|
} else {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
}
|
|
|
|
|
|
}()
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
// Collect depleted emails globally — a shared-email row owned by one
|
|
|
|
|
|
// inbound depletes every sibling that lists the email.
|
2025-09-16 18:28:02 +02:00
|
|
|
|
now := time.Now().Unix() * 1000
|
2026-05-09 17:38:48 +02:00
|
|
|
|
depletedClause := "reset = 0 and ((total > 0 and up + down >= total) or (expiry_time > 0 and expiry_time <= ?))"
|
|
|
|
|
|
var depletedRows []xray.ClientTraffic
|
2025-09-16 18:28:02 +02:00
|
|
|
|
err = db.Model(xray.ClientTraffic{}).
|
2026-05-09 17:38:48 +02:00
|
|
|
|
Where(depletedClause, now).
|
|
|
|
|
|
Find(&depletedRows).Error
|
2023-04-25 18:43:37 +03:30
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if len(depletedRows) == 0 {
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
2023-04-25 18:43:37 +03:30
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
depletedEmails := make(map[string]struct{}, len(depletedRows))
|
|
|
|
|
|
for _, r := range depletedRows {
|
|
|
|
|
|
if r.Email == "" {
|
|
|
|
|
|
continue
|
2023-04-25 18:43:37 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
depletedEmails[strings.ToLower(r.Email)] = struct{}{}
|
|
|
|
|
|
}
|
|
|
|
|
|
if len(depletedEmails) == 0 {
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var inbounds []*model.Inbound
|
|
|
|
|
|
inboundQuery := db.Model(model.Inbound{})
|
|
|
|
|
|
if id >= 0 {
|
|
|
|
|
|
inboundQuery = inboundQuery.Where("id = ?", id)
|
|
|
|
|
|
}
|
|
|
|
|
|
if err = inboundQuery.Find(&inbounds).Error; err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for _, inbound := range inbounds {
|
|
|
|
|
|
var settings map[string]any
|
|
|
|
|
|
if err = json.Unmarshal([]byte(inbound.Settings), &settings); err != nil {
|
2023-04-25 18:43:37 +03:30
|
|
|
|
return err
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
rawClients, ok := settings["clients"].([]any)
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
continue
|
2023-04-25 18:43:37 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
newClients := make([]any, 0, len(rawClients))
|
|
|
|
|
|
removed := 0
|
|
|
|
|
|
for _, client := range rawClients {
|
|
|
|
|
|
c, ok := client.(map[string]any)
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
newClients = append(newClients, client)
|
|
|
|
|
|
continue
|
2023-04-25 18:43:37 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
email, _ := c["email"].(string)
|
|
|
|
|
|
if _, isDepleted := depletedEmails[strings.ToLower(email)]; isDepleted {
|
|
|
|
|
|
removed++
|
|
|
|
|
|
continue
|
2023-04-25 18:43:37 +03:30
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
newClients = append(newClients, client)
|
|
|
|
|
|
}
|
|
|
|
|
|
if removed == 0 {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
if len(newClients) == 0 {
|
|
|
|
|
|
s.DelInbound(inbound.Id)
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
settings["clients"] = newClients
|
|
|
|
|
|
ns, mErr := json.MarshalIndent(settings, "", " ")
|
|
|
|
|
|
if mErr != nil {
|
|
|
|
|
|
return mErr
|
|
|
|
|
|
}
|
|
|
|
|
|
inbound.Settings = string(ns)
|
|
|
|
|
|
if err = tx.Save(inbound).Error; err != nil {
|
|
|
|
|
|
return err
|
2023-04-25 18:43:37 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
// Drop now-orphaned rows. With id >= 0, a row is safe to drop only when
|
|
|
|
|
|
// no out-of-scope inbound still references the email.
|
|
|
|
|
|
if id < 0 {
|
|
|
|
|
|
err = tx.Where(depletedClause, now).Delete(xray.ClientTraffic{}).Error
|
2024-06-17 21:55:09 +02:00
|
|
|
|
return err
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
emails := make([]string, 0, len(depletedEmails))
|
|
|
|
|
|
for e := range depletedEmails {
|
|
|
|
|
|
emails = append(emails, e)
|
|
|
|
|
|
}
|
|
|
|
|
|
var stillReferenced []string
|
|
|
|
|
|
if err = tx.Raw(`
|
|
|
|
|
|
SELECT DISTINCT LOWER(JSON_EXTRACT(client.value, '$.email'))
|
|
|
|
|
|
FROM inbounds,
|
|
|
|
|
|
JSON_EACH(JSON_EXTRACT(inbounds.settings, '$.clients')) AS client
|
|
|
|
|
|
WHERE LOWER(JSON_EXTRACT(client.value, '$.email')) IN ?
|
|
|
|
|
|
`, emails).Scan(&stillReferenced).Error; err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
stillSet := make(map[string]struct{}, len(stillReferenced))
|
|
|
|
|
|
for _, e := range stillReferenced {
|
|
|
|
|
|
stillSet[e] = struct{}{}
|
|
|
|
|
|
}
|
|
|
|
|
|
toDelete := make([]string, 0, len(emails))
|
|
|
|
|
|
for _, e := range emails {
|
|
|
|
|
|
if _, kept := stillSet[e]; !kept {
|
|
|
|
|
|
toDelete = append(toDelete, e)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
if len(toDelete) > 0 {
|
|
|
|
|
|
if err = tx.Where("LOWER(email) IN ?", toDelete).Delete(xray.ClientTraffic{}).Error; err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2024-06-17 21:55:09 +02:00
|
|
|
|
return nil
|
2023-04-25 18:43:37 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2024-04-02 15:04:44 +03:30
|
|
|
|
func (s *InboundService) GetClientTrafficTgBot(tgId int64) ([]*xray.ClientTraffic, error) {
|
2023-04-09 23:13:18 +03:30
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
var inbounds []*model.Inbound
|
2024-07-08 23:08:00 +02:00
|
|
|
|
|
|
|
|
|
|
// Retrieve inbounds where settings contain the given tgId
|
|
|
|
|
|
err := db.Model(model.Inbound{}).Where("settings LIKE ?", fmt.Sprintf(`%%"tgId": %d%%`, tgId)).Find(&inbounds).Error
|
2023-04-09 23:13:18 +03:30
|
|
|
|
if err != nil && err != gorm.ErrRecordNotFound {
|
2024-07-08 23:08:00 +02:00
|
|
|
|
logger.Errorf("Error retrieving inbounds with tgId %d: %v", tgId, err)
|
2023-04-09 23:13:18 +03:30
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
2024-07-08 23:08:00 +02:00
|
|
|
|
|
2023-04-09 23:13:18 +03:30
|
|
|
|
var emails []string
|
|
|
|
|
|
for _, inbound := range inbounds {
|
2023-05-22 18:06:34 +03:30
|
|
|
|
clients, err := s.GetClients(inbound)
|
2023-04-09 23:13:18 +03:30
|
|
|
|
if err != nil {
|
2024-07-08 23:08:00 +02:00
|
|
|
|
logger.Errorf("Error retrieving clients for inbound %d: %v", inbound.Id, err)
|
|
|
|
|
|
continue
|
2023-04-09 23:13:18 +03:30
|
|
|
|
}
|
|
|
|
|
|
for _, client := range clients {
|
2024-01-01 18:07:56 +03:00
|
|
|
|
if client.TgID == tgId {
|
2023-04-09 23:13:18 +03:30
|
|
|
|
emails = append(emails, client.Email)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2024-07-08 23:08:00 +02:00
|
|
|
|
|
2026-05-05 18:27:49 +03:00
|
|
|
|
// Chunked to stay under SQLite's bind-variable limit when a single Telegram
|
|
|
|
|
|
// account owns thousands of clients across inbounds.
|
|
|
|
|
|
uniqEmails := uniqueNonEmptyStrings(emails)
|
|
|
|
|
|
traffics := make([]*xray.ClientTraffic, 0, len(uniqEmails))
|
|
|
|
|
|
for _, batch := range chunkStrings(uniqEmails, sqliteMaxVars) {
|
|
|
|
|
|
var page []*xray.ClientTraffic
|
|
|
|
|
|
if err = db.Model(xray.ClientTraffic{}).Where("email IN ?", batch).Find(&page).Error; err != nil {
|
|
|
|
|
|
if err == gorm.ErrRecordNotFound {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
logger.Errorf("Error retrieving ClientTraffic for emails %v: %v", batch, err)
|
|
|
|
|
|
return nil, err
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
2026-05-05 18:27:49 +03:00
|
|
|
|
traffics = append(traffics, page...)
|
|
|
|
|
|
}
|
|
|
|
|
|
if len(traffics) == 0 {
|
|
|
|
|
|
logger.Warning("No ClientTraffic records found for emails:", emails)
|
|
|
|
|
|
return nil, nil
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
2024-07-08 23:08:00 +02:00
|
|
|
|
|
2025-09-21 19:16:54 +02:00
|
|
|
|
// Populate UUID and other client data for each traffic record
|
|
|
|
|
|
for i := range traffics {
|
|
|
|
|
|
if ct, client, e := s.GetClientByEmail(traffics[i].Email); e == nil && ct != nil && client != nil {
|
|
|
|
|
|
traffics[i].Enable = client.Enable
|
|
|
|
|
|
traffics[i].UUID = client.ID
|
|
|
|
|
|
traffics[i].SubId = client.SubID
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-07-08 23:08:00 +02:00
|
|
|
|
return traffics, nil
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
2023-02-09 22:48:06 +03:30
|
|
|
|
|
2026-05-05 18:27:49 +03:00
|
|
|
|
// sqliteMaxVars is a safe ceiling for the number of bind parameters in a
|
|
|
|
|
|
// single SQL statement. SQLite's SQLITE_MAX_VARIABLE_NUMBER is 999 on builds
|
|
|
|
|
|
// before 3.32 and 32766 after; staying under 999 keeps queries portable
|
|
|
|
|
|
// across forks/old binaries and also bounds per-query memory on truly large
|
|
|
|
|
|
// installs (>32k clients) where even modern SQLite would refuse a single IN.
|
|
|
|
|
|
const sqliteMaxVars = 900
|
|
|
|
|
|
|
|
|
|
|
|
// uniqueNonEmptyStrings returns a deduplicated copy of in with empty strings
|
|
|
|
|
|
// removed, preserving the order of first occurrence.
|
|
|
|
|
|
func uniqueNonEmptyStrings(in []string) []string {
|
|
|
|
|
|
if len(in) == 0 {
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
seen := make(map[string]struct{}, len(in))
|
|
|
|
|
|
out := make([]string, 0, len(in))
|
|
|
|
|
|
for _, v := range in {
|
|
|
|
|
|
if v == "" {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
if _, ok := seen[v]; ok {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
seen[v] = struct{}{}
|
|
|
|
|
|
out = append(out, v)
|
|
|
|
|
|
}
|
|
|
|
|
|
return out
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// uniqueInts returns a deduplicated copy of in, preserving order of first occurrence.
|
|
|
|
|
|
func uniqueInts(in []int) []int {
|
|
|
|
|
|
if len(in) == 0 {
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
seen := make(map[int]struct{}, len(in))
|
|
|
|
|
|
out := make([]int, 0, len(in))
|
|
|
|
|
|
for _, v := range in {
|
|
|
|
|
|
if _, ok := seen[v]; ok {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
seen[v] = struct{}{}
|
|
|
|
|
|
out = append(out, v)
|
|
|
|
|
|
}
|
|
|
|
|
|
return out
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// chunkStrings splits s into consecutive sub-slices of at most size elements.
|
|
|
|
|
|
// Returns nil for an empty input or non-positive size.
|
|
|
|
|
|
func chunkStrings(s []string, size int) [][]string {
|
|
|
|
|
|
if size <= 0 || len(s) == 0 {
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
out := make([][]string, 0, (len(s)+size-1)/size)
|
|
|
|
|
|
for i := 0; i < len(s); i += size {
|
|
|
|
|
|
end := i + size
|
|
|
|
|
|
if end > len(s) {
|
|
|
|
|
|
end = len(s)
|
|
|
|
|
|
}
|
|
|
|
|
|
out = append(out, s[i:end])
|
|
|
|
|
|
}
|
|
|
|
|
|
return out
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// chunkInts splits s into consecutive sub-slices of at most size elements.
|
|
|
|
|
|
// Returns nil for an empty input or non-positive size.
|
|
|
|
|
|
func chunkInts(s []int, size int) [][]int {
|
|
|
|
|
|
if size <= 0 || len(s) == 0 {
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
out := make([][]int, 0, (len(s)+size-1)/size)
|
|
|
|
|
|
for i := 0; i < len(s); i += size {
|
|
|
|
|
|
end := i + size
|
|
|
|
|
|
if end > len(s) {
|
|
|
|
|
|
end = len(s)
|
|
|
|
|
|
}
|
|
|
|
|
|
out = append(out, s[i:end])
|
|
|
|
|
|
}
|
|
|
|
|
|
return out
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) GetActiveClientTraffics(emails []string) ([]*xray.ClientTraffic, error) {
|
|
|
|
|
|
uniq := uniqueNonEmptyStrings(emails)
|
|
|
|
|
|
if len(uniq) == 0 {
|
|
|
|
|
|
return nil, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
traffics := make([]*xray.ClientTraffic, 0, len(uniq))
|
|
|
|
|
|
for _, batch := range chunkStrings(uniq, sqliteMaxVars) {
|
|
|
|
|
|
var page []*xray.ClientTraffic
|
|
|
|
|
|
if err := db.Model(xray.ClientTraffic{}).Where("email IN ?", batch).Find(&page).Error; err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
traffics = append(traffics, page...)
|
|
|
|
|
|
}
|
|
|
|
|
|
return traffics, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-14 01:31:49 +02:00
|
|
|
|
// GetAllClientTraffics returns the full set of client_traffics rows so the
|
|
|
|
|
|
// websocket broadcasters can ship a complete snapshot every cycle. The old
|
|
|
|
|
|
// delta-only path (GetActiveClientTraffics on activeEmails) silently dropped
|
|
|
|
|
|
// the per-client section whenever no client moved bytes in the cycle or a
|
|
|
|
|
|
// node sync failed, leaving client rows in the UI stuck at stale numbers.
|
|
|
|
|
|
func (s *InboundService) GetAllClientTraffics() ([]*xray.ClientTraffic, error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
var traffics []*xray.ClientTraffic
|
|
|
|
|
|
if err := db.Model(xray.ClientTraffic{}).Find(&traffics).Error; err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
return traffics, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-05 18:27:49 +03:00
|
|
|
|
type InboundTrafficSummary struct {
|
|
|
|
|
|
Id int `json:"id"`
|
|
|
|
|
|
Up int64 `json:"up"`
|
|
|
|
|
|
Down int64 `json:"down"`
|
|
|
|
|
|
Total int64 `json:"total"`
|
|
|
|
|
|
AllTime int64 `json:"allTime"`
|
|
|
|
|
|
Enable bool `json:"enable"`
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) GetInboundsTrafficSummary() ([]InboundTrafficSummary, error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
var summaries []InboundTrafficSummary
|
|
|
|
|
|
if err := db.Model(&model.Inbound{}).
|
|
|
|
|
|
Select("id, up, down, total, all_time, enable").
|
|
|
|
|
|
Find(&summaries).Error; err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
return summaries, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-25 14:38:35 +03:30
|
|
|
|
func (s *InboundService) GetClientTrafficByEmail(email string) (traffic *xray.ClientTraffic, err error) {
|
2025-09-16 14:57:31 +02:00
|
|
|
|
// Prefer retrieving along with client to reflect actual enabled state from inbound settings
|
|
|
|
|
|
t, client, err := s.GetClientByEmail(email)
|
2023-02-09 22:48:06 +03:30
|
|
|
|
if err != nil {
|
2024-07-08 23:08:00 +02:00
|
|
|
|
logger.Warningf("Error retrieving ClientTraffic with email %s: %v", email, err)
|
2023-04-28 18:40:33 +03:30
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
2025-09-16 14:57:31 +02:00
|
|
|
|
if t != nil && client != nil {
|
2025-09-21 19:16:54 +02:00
|
|
|
|
t.UUID = client.ID
|
2025-09-17 01:29:22 +02:00
|
|
|
|
t.SubId = client.SubID
|
2025-09-16 14:57:31 +02:00
|
|
|
|
return t, nil
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2023-04-28 18:40:33 +03:30
|
|
|
|
return nil, nil
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2023-03-17 19:37:49 +03:30
|
|
|
|
|
2025-07-23 01:13:48 +03:30
|
|
|
|
func (s *InboundService) UpdateClientTrafficByEmail(email string, upload int64, download int64) error {
|
2026-05-10 16:25:23 +02:00
|
|
|
|
return submitTrafficWrite(func() error {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
err := db.Model(xray.ClientTraffic{}).
|
|
|
|
|
|
Where("email = ?", email).
|
|
|
|
|
|
Updates(map[string]any{
|
|
|
|
|
|
"up": upload,
|
|
|
|
|
|
"down": download,
|
|
|
|
|
|
"all_time": gorm.Expr("CASE WHEN COALESCE(all_time, 0) < ? THEN ? ELSE all_time END", upload+download, upload+download),
|
|
|
|
|
|
}).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
logger.Warningf("Error updating ClientTraffic with email %s: %v", email, err)
|
|
|
|
|
|
}
|
2025-07-23 01:13:48 +03:30
|
|
|
|
return err
|
2026-05-10 16:25:23 +02:00
|
|
|
|
})
|
2025-07-23 01:13:48 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
2024-07-23 11:28:28 +02:00
|
|
|
|
func (s *InboundService) GetClientTrafficByID(id string) ([]xray.ClientTraffic, error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
var traffics []xray.ClientTraffic
|
|
|
|
|
|
|
|
|
|
|
|
err := db.Model(xray.ClientTraffic{}).Where(`email IN(
|
|
|
|
|
|
SELECT JSON_EXTRACT(client.value, '$.email') as email
|
|
|
|
|
|
FROM inbounds,
|
|
|
|
|
|
JSON_EACH(JSON_EXTRACT(inbounds.settings, '$.clients')) AS client
|
|
|
|
|
|
WHERE
|
|
|
|
|
|
JSON_EXTRACT(client.value, '$.id') in (?)
|
|
|
|
|
|
)`, id).Find(&traffics).Error
|
|
|
|
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
logger.Debug(err)
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
2025-09-16 14:57:31 +02:00
|
|
|
|
// Reconcile enable flag with client settings per email to avoid stale DB value
|
|
|
|
|
|
for i := range traffics {
|
|
|
|
|
|
if ct, client, e := s.GetClientByEmail(traffics[i].Email); e == nil && ct != nil && client != nil {
|
|
|
|
|
|
traffics[i].Enable = client.Enable
|
2025-09-21 19:16:54 +02:00
|
|
|
|
traffics[i].UUID = client.ID
|
2025-09-17 01:29:22 +02:00
|
|
|
|
traffics[i].SubId = client.SubID
|
2025-09-16 14:57:31 +02:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2024-07-23 11:28:28 +02:00
|
|
|
|
return traffics, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-03-17 19:37:49 +03:30
|
|
|
|
func (s *InboundService) SearchClientTraffic(query string) (traffic *xray.ClientTraffic, err error) {
|
2023-02-09 22:48:06 +03:30
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
inbound := &model.Inbound{}
|
|
|
|
|
|
traffic = &xray.ClientTraffic{}
|
|
|
|
|
|
|
2024-07-08 23:08:00 +02:00
|
|
|
|
// Search for inbound settings that contain the query
|
|
|
|
|
|
err = db.Model(model.Inbound{}).Where("settings LIKE ?", "%\""+query+"\"%").First(inbound).Error
|
2023-02-09 22:48:06 +03:30
|
|
|
|
if err != nil {
|
|
|
|
|
|
if err == gorm.ErrRecordNotFound {
|
2024-07-08 23:08:00 +02:00
|
|
|
|
logger.Warningf("Inbound settings containing query %s not found: %v", query, err)
|
2023-02-09 22:48:06 +03:30
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
2024-07-08 23:08:00 +02:00
|
|
|
|
logger.Errorf("Error searching for inbound settings with query %s: %v", query, err)
|
|
|
|
|
|
return nil, err
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2024-07-08 23:08:00 +02:00
|
|
|
|
|
2023-02-09 22:48:06 +03:30
|
|
|
|
traffic.InboundId = inbound.Id
|
|
|
|
|
|
|
2024-07-08 23:08:00 +02:00
|
|
|
|
// Unmarshal settings to get clients
|
2023-02-09 22:48:06 +03:30
|
|
|
|
settings := map[string][]model.Client{}
|
2024-07-08 23:08:00 +02:00
|
|
|
|
if err := json.Unmarshal([]byte(inbound.Settings), &settings); err != nil {
|
|
|
|
|
|
logger.Errorf("Error unmarshalling inbound settings for inbound ID %d: %v", inbound.Id, err)
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-02-09 22:48:06 +03:30
|
|
|
|
clients := settings["clients"]
|
|
|
|
|
|
for _, client := range clients {
|
2024-07-08 23:08:00 +02:00
|
|
|
|
if (client.ID == query || client.Password == query) && client.Email != "" {
|
2023-02-09 22:48:06 +03:30
|
|
|
|
traffic.Email = client.Email
|
2023-03-17 19:37:49 +03:30
|
|
|
|
break
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
2024-07-08 23:08:00 +02:00
|
|
|
|
|
2023-03-17 19:37:49 +03:30
|
|
|
|
if traffic.Email == "" {
|
2024-07-08 23:08:00 +02:00
|
|
|
|
logger.Warningf("No client found with query %s in inbound ID %d", query, inbound.Id)
|
|
|
|
|
|
return nil, gorm.ErrRecordNotFound
|
2023-03-17 19:37:49 +03:30
|
|
|
|
}
|
2024-07-08 23:08:00 +02:00
|
|
|
|
|
|
|
|
|
|
// Retrieve ClientTraffic based on the found email
|
2023-02-09 22:48:06 +03:30
|
|
|
|
err = db.Model(xray.ClientTraffic{}).Where("email = ?", traffic.Email).First(traffic).Error
|
|
|
|
|
|
if err != nil {
|
2024-07-08 23:08:00 +02:00
|
|
|
|
if err == gorm.ErrRecordNotFound {
|
|
|
|
|
|
logger.Warningf("ClientTraffic for email %s not found: %v", traffic.Email, err)
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
logger.Errorf("Error retrieving ClientTraffic for email %s: %v", traffic.Email, err)
|
2023-02-09 22:48:06 +03:30
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
2024-07-08 23:08:00 +02:00
|
|
|
|
|
|
|
|
|
|
return traffic, nil
|
2023-02-09 22:48:06 +03:30
|
|
|
|
}
|
2023-03-17 18:51:43 +01:00
|
|
|
|
|
2023-03-17 19:37:49 +03:30
|
|
|
|
func (s *InboundService) GetInboundClientIps(clientEmail string) (string, error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
InboundClientIps := &model.InboundClientIps{}
|
|
|
|
|
|
err := db.Model(model.InboundClientIps{}).Where("client_email = ?", clientEmail).First(InboundClientIps).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return "", err
|
|
|
|
|
|
}
|
2026-02-14 22:49:19 +01:00
|
|
|
|
|
2026-02-11 22:21:09 +01:00
|
|
|
|
if InboundClientIps.Ips == "" {
|
|
|
|
|
|
return "", nil
|
|
|
|
|
|
}
|
2026-02-14 22:49:19 +01:00
|
|
|
|
|
2026-02-11 22:21:09 +01:00
|
|
|
|
// Try to parse as new format (with timestamps)
|
|
|
|
|
|
type IPWithTimestamp struct {
|
|
|
|
|
|
IP string `json:"ip"`
|
|
|
|
|
|
Timestamp int64 `json:"timestamp"`
|
|
|
|
|
|
}
|
2026-02-14 22:49:19 +01:00
|
|
|
|
|
2026-02-11 22:21:09 +01:00
|
|
|
|
var ipsWithTime []IPWithTimestamp
|
|
|
|
|
|
err = json.Unmarshal([]byte(InboundClientIps.Ips), &ipsWithTime)
|
2026-02-14 22:49:19 +01:00
|
|
|
|
|
2026-02-11 22:21:09 +01:00
|
|
|
|
// If successfully parsed as new format, return with timestamps
|
|
|
|
|
|
if err == nil && len(ipsWithTime) > 0 {
|
|
|
|
|
|
return InboundClientIps.Ips, nil
|
|
|
|
|
|
}
|
2026-02-14 22:49:19 +01:00
|
|
|
|
|
2026-02-11 22:21:09 +01:00
|
|
|
|
// Otherwise, assume it's old format (simple string array)
|
|
|
|
|
|
// Try to parse as simple array and convert to new format
|
|
|
|
|
|
var oldIps []string
|
|
|
|
|
|
err = json.Unmarshal([]byte(InboundClientIps.Ips), &oldIps)
|
|
|
|
|
|
if err == nil && len(oldIps) > 0 {
|
|
|
|
|
|
// Convert old format to new format with current timestamp
|
|
|
|
|
|
newIpsWithTime := make([]IPWithTimestamp, len(oldIps))
|
|
|
|
|
|
for i, ip := range oldIps {
|
|
|
|
|
|
newIpsWithTime[i] = IPWithTimestamp{
|
|
|
|
|
|
IP: ip,
|
|
|
|
|
|
Timestamp: time.Now().Unix(),
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
result, _ := json.Marshal(newIpsWithTime)
|
|
|
|
|
|
return string(result), nil
|
|
|
|
|
|
}
|
2026-02-14 22:49:19 +01:00
|
|
|
|
|
2026-02-11 22:21:09 +01:00
|
|
|
|
// Return as-is if parsing fails
|
2023-03-17 19:37:49 +03:30
|
|
|
|
return InboundClientIps.Ips, nil
|
|
|
|
|
|
}
|
2023-05-31 01:21:14 +04:30
|
|
|
|
|
2023-03-17 18:51:43 +01:00
|
|
|
|
func (s *InboundService) ClearClientIps(clientEmail string) error {
|
2023-03-17 19:37:49 +03:30
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
|
|
|
|
|
|
result := db.Model(model.InboundClientIps{}).
|
|
|
|
|
|
Where("client_email = ?", clientEmail).
|
|
|
|
|
|
Update("ips", "")
|
|
|
|
|
|
err := result.Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
2023-03-24 17:14:26 +03:30
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) SearchInbounds(query string) ([]*model.Inbound, error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
var inbounds []*model.Inbound
|
|
|
|
|
|
err := db.Model(model.Inbound{}).Preload("ClientStats").Where("remark like ?", "%"+query+"%").Find(&inbounds).Error
|
|
|
|
|
|
if err != nil && err != gorm.ErrRecordNotFound {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
return inbounds, nil
|
2023-04-09 23:13:18 +03:30
|
|
|
|
}
|
2023-04-25 14:38:35 +03:30
|
|
|
|
|
2023-04-24 14:13:25 +03:30
|
|
|
|
func (s *InboundService) MigrationRequirements() {
|
|
|
|
|
|
db := database.GetDB()
|
2023-06-05 00:32:19 +03:30
|
|
|
|
tx := db.Begin()
|
|
|
|
|
|
var err error
|
|
|
|
|
|
defer func() {
|
|
|
|
|
|
if err == nil {
|
|
|
|
|
|
tx.Commit()
|
2025-09-19 17:14:39 +02:00
|
|
|
|
if dbErr := db.Exec(`VACUUM "main"`).Error; dbErr != nil {
|
|
|
|
|
|
logger.Warningf("VACUUM failed: %v", dbErr)
|
|
|
|
|
|
}
|
2023-06-05 00:32:19 +03:30
|
|
|
|
} else {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
}
|
|
|
|
|
|
}()
|
2025-08-28 02:40:50 +03:30
|
|
|
|
|
|
|
|
|
|
// Calculate and backfill all_time from up+down for inbounds and clients
|
|
|
|
|
|
err = tx.Exec(`
|
|
|
|
|
|
UPDATE inbounds
|
|
|
|
|
|
SET all_time = IFNULL(up, 0) + IFNULL(down, 0)
|
|
|
|
|
|
WHERE IFNULL(all_time, 0) = 0 AND (IFNULL(up, 0) + IFNULL(down, 0)) > 0
|
|
|
|
|
|
`).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
err = tx.Exec(`
|
|
|
|
|
|
UPDATE client_traffics
|
|
|
|
|
|
SET all_time = IFNULL(up, 0) + IFNULL(down, 0)
|
|
|
|
|
|
WHERE IFNULL(all_time, 0) = 0 AND (IFNULL(up, 0) + IFNULL(down, 0)) > 0
|
|
|
|
|
|
`).Error
|
|
|
|
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
2023-04-27 19:05:36 +03:30
|
|
|
|
|
|
|
|
|
|
// Fix inbounds based problems
|
2023-04-24 14:13:25 +03:30
|
|
|
|
var inbounds []*model.Inbound
|
2023-06-05 00:32:19 +03:30
|
|
|
|
err = tx.Model(model.Inbound{}).Where("protocol IN (?)", []string{"vmess", "vless", "trojan"}).Find(&inbounds).Error
|
2023-04-24 14:13:25 +03:30
|
|
|
|
if err != nil && err != gorm.ErrRecordNotFound {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
for inbound_index := range inbounds {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
settings := map[string]any{}
|
2023-04-24 14:13:25 +03:30
|
|
|
|
json.Unmarshal([]byte(inbounds[inbound_index].Settings), &settings)
|
2025-03-12 20:13:51 +01:00
|
|
|
|
clients, ok := settings["clients"].([]any)
|
2023-04-24 14:13:25 +03:30
|
|
|
|
if ok {
|
2024-02-27 20:33:37 +09:00
|
|
|
|
// Fix Client configuration problems
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var newClients []any
|
2026-05-07 14:44:33 +02:00
|
|
|
|
hasVisionFlow := false
|
2023-04-24 14:13:25 +03:30
|
|
|
|
for client_index := range clients {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
c := clients[client_index].(map[string]any)
|
2023-04-24 14:13:25 +03:30
|
|
|
|
|
|
|
|
|
|
// Add email='' if it is not exists
|
|
|
|
|
|
if _, ok := c["email"]; !ok {
|
|
|
|
|
|
c["email"] = ""
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-04-05 12:24:18 +03:00
|
|
|
|
// Convert string tgId to int64
|
|
|
|
|
|
if _, ok := c["tgId"]; ok {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var tgId any = c["tgId"]
|
2024-04-05 12:24:18 +03:00
|
|
|
|
if tgIdStr, ok2 := tgId.(string); ok2 {
|
|
|
|
|
|
tgIdInt64, err := strconv.ParseInt(strings.ReplaceAll(tgIdStr, " ", ""), 10, 64)
|
|
|
|
|
|
if err == nil {
|
|
|
|
|
|
c["tgId"] = tgIdInt64
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-24 14:13:25 +03:30
|
|
|
|
// Remove "flow": "xtls-rprx-direct"
|
|
|
|
|
|
if _, ok := c["flow"]; ok {
|
|
|
|
|
|
if c["flow"] == "xtls-rprx-direct" {
|
|
|
|
|
|
c["flow"] = ""
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2026-05-07 14:44:33 +02:00
|
|
|
|
if flow, _ := c["flow"].(string); flow == "xtls-rprx-vision" {
|
|
|
|
|
|
hasVisionFlow = true
|
|
|
|
|
|
}
|
2025-08-27 21:00:49 +03:30
|
|
|
|
// Backfill created_at and updated_at
|
|
|
|
|
|
if _, ok := c["created_at"]; !ok {
|
|
|
|
|
|
c["created_at"] = time.Now().Unix() * 1000
|
|
|
|
|
|
}
|
|
|
|
|
|
c["updated_at"] = time.Now().Unix() * 1000
|
2025-03-12 20:13:51 +01:00
|
|
|
|
newClients = append(newClients, any(c))
|
2023-04-24 14:13:25 +03:30
|
|
|
|
}
|
|
|
|
|
|
settings["clients"] = newClients
|
2026-05-07 14:44:33 +02:00
|
|
|
|
|
|
|
|
|
|
// Drop orphaned testseed: VLESS-only field, only meaningful when at least
|
|
|
|
|
|
// one client uses the exact xtls-rprx-vision flow. Older versions saved it
|
|
|
|
|
|
// for any non-empty flow (including the UDP variant) or kept it after the
|
|
|
|
|
|
// flow was cleared from the client modal — clean those up here.
|
|
|
|
|
|
if inbounds[inbound_index].Protocol == model.VLESS && !hasVisionFlow {
|
|
|
|
|
|
delete(settings, "testseed")
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-24 14:13:25 +03:30
|
|
|
|
modifiedSettings, err := json.MarshalIndent(settings, "", " ")
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
inbounds[inbound_index].Settings = string(modifiedSettings)
|
|
|
|
|
|
}
|
2023-06-05 00:32:19 +03:30
|
|
|
|
|
2023-04-27 19:05:36 +03:30
|
|
|
|
// Add client traffic row for all clients which has email
|
2023-05-22 18:06:34 +03:30
|
|
|
|
modelClients, err := s.GetClients(inbounds[inbound_index])
|
2023-04-25 18:36:06 +03:30
|
|
|
|
if err != nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
for _, modelClient := range modelClients {
|
|
|
|
|
|
if len(modelClient.Email) > 0 {
|
|
|
|
|
|
var count int64
|
2023-06-05 00:32:19 +03:30
|
|
|
|
tx.Model(xray.ClientTraffic{}).Where("email = ?", modelClient.Email).Count(&count)
|
2023-04-25 18:36:06 +03:30
|
|
|
|
if count == 0 {
|
2023-06-05 00:32:19 +03:30
|
|
|
|
s.AddClientStat(tx, inbounds[inbound_index].Id, &modelClient)
|
2023-04-25 18:36:06 +03:30
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2023-04-24 14:13:25 +03:30
|
|
|
|
}
|
2023-06-05 00:32:19 +03:30
|
|
|
|
tx.Save(inbounds)
|
2023-04-27 19:05:36 +03:30
|
|
|
|
|
|
|
|
|
|
// Remove orphaned traffics
|
2023-06-05 00:32:19 +03:30
|
|
|
|
tx.Where("inbound_id = 0").Delete(xray.ClientTraffic{})
|
2023-12-08 18:45:21 +01:00
|
|
|
|
|
|
|
|
|
|
// Migrate old MultiDomain to External Proxy
|
|
|
|
|
|
var externalProxy []struct {
|
|
|
|
|
|
Id int
|
|
|
|
|
|
Port int
|
|
|
|
|
|
StreamSettings []byte
|
|
|
|
|
|
}
|
|
|
|
|
|
err = tx.Raw(`select id, port, stream_settings
|
|
|
|
|
|
from inbounds
|
|
|
|
|
|
WHERE protocol in ('vmess','vless','trojan')
|
|
|
|
|
|
AND json_extract(stream_settings, '$.security') = 'tls'
|
|
|
|
|
|
AND json_extract(stream_settings, '$.tlsSettings.settings.domains') IS NOT NULL`).Scan(&externalProxy).Error
|
|
|
|
|
|
if err != nil || len(externalProxy) == 0 {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for _, ep := range externalProxy {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
var reverses any
|
|
|
|
|
|
var stream map[string]any
|
2023-12-08 18:45:21 +01:00
|
|
|
|
json.Unmarshal(ep.StreamSettings, &stream)
|
2025-03-12 20:13:51 +01:00
|
|
|
|
if tlsSettings, ok := stream["tlsSettings"].(map[string]any); ok {
|
|
|
|
|
|
if settings, ok := tlsSettings["settings"].(map[string]any); ok {
|
|
|
|
|
|
if domains, ok := settings["domains"].([]any); ok {
|
2023-12-08 18:45:21 +01:00
|
|
|
|
for _, domain := range domains {
|
2025-03-12 20:13:51 +01:00
|
|
|
|
if domainMap, ok := domain.(map[string]any); ok {
|
2023-12-08 18:45:21 +01:00
|
|
|
|
domainMap["forceTls"] = "same"
|
|
|
|
|
|
domainMap["port"] = ep.Port
|
|
|
|
|
|
domainMap["dest"] = domainMap["domain"].(string)
|
|
|
|
|
|
delete(domainMap, "domain")
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
reverses = settings["domains"]
|
|
|
|
|
|
delete(settings, "domains")
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
stream["externalProxy"] = reverses
|
|
|
|
|
|
newStream, _ := json.MarshalIndent(stream, " ", " ")
|
|
|
|
|
|
tx.Model(model.Inbound{}).Where("id = ?", ep.Id).Update("stream_settings", newStream)
|
|
|
|
|
|
}
|
2024-02-22 23:12:26 +03:30
|
|
|
|
|
|
|
|
|
|
err = tx.Raw(`UPDATE inbounds
|
|
|
|
|
|
SET tag = REPLACE(tag, '0.0.0.0:', '')
|
|
|
|
|
|
WHERE INSTR(tag, '0.0.0.0:') > 0;`).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
2023-04-24 14:13:25 +03:30
|
|
|
|
}
|
2023-05-06 00:22:39 +04:30
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) MigrateDB() {
|
|
|
|
|
|
s.MigrationRequirements()
|
|
|
|
|
|
s.MigrationRemoveOrphanedTraffics()
|
|
|
|
|
|
}
|
2023-12-04 19:13:21 +01:00
|
|
|
|
|
2024-07-07 11:55:59 +02:00
|
|
|
|
func (s *InboundService) GetOnlineClients() []string {
|
2023-12-04 19:13:21 +01:00
|
|
|
|
return p.GetOnlineClients()
|
|
|
|
|
|
}
|
2025-05-06 19:57:17 +03:30
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
func (s *InboundService) SetNodeOnlineClients(nodeID int, emails []string) {
|
|
|
|
|
|
if p != nil {
|
|
|
|
|
|
p.SetNodeOnlineClients(nodeID, emails)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) ClearNodeOnlineClients(nodeID int) {
|
|
|
|
|
|
if p != nil {
|
|
|
|
|
|
p.ClearNodeOnlineClients(nodeID)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-08-31 20:03:50 +03:30
|
|
|
|
func (s *InboundService) GetClientsLastOnline() (map[string]int64, error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
var rows []xray.ClientTraffic
|
|
|
|
|
|
err := db.Model(&xray.ClientTraffic{}).Select("email, last_online").Find(&rows).Error
|
|
|
|
|
|
if err != nil && err != gorm.ErrRecordNotFound {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
result := make(map[string]int64, len(rows))
|
|
|
|
|
|
for _, r := range rows {
|
|
|
|
|
|
result[r.Email] = r.LastOnline
|
|
|
|
|
|
}
|
|
|
|
|
|
return result, nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-05-06 19:57:17 +03:30
|
|
|
|
func (s *InboundService) FilterAndSortClientEmails(emails []string) ([]string, []string, error) {
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
|
2026-05-05 18:27:49 +03:00
|
|
|
|
// Step 1: Get ClientTraffic records for emails in the input list.
|
|
|
|
|
|
// Chunked to stay under SQLite's bind-variable limit on huge inputs.
|
|
|
|
|
|
uniqEmails := uniqueNonEmptyStrings(emails)
|
|
|
|
|
|
clients := make([]xray.ClientTraffic, 0, len(uniqEmails))
|
|
|
|
|
|
for _, batch := range chunkStrings(uniqEmails, sqliteMaxVars) {
|
|
|
|
|
|
var page []xray.ClientTraffic
|
|
|
|
|
|
if err := db.Where("email IN ?", batch).Find(&page).Error; err != nil && err != gorm.ErrRecordNotFound {
|
|
|
|
|
|
return nil, nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
clients = append(clients, page...)
|
2025-05-06 19:57:17 +03:30
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Step 2: Sort clients by (Up + Down) descending
|
|
|
|
|
|
sort.Slice(clients, func(i, j int) bool {
|
|
|
|
|
|
return (clients[i].Up + clients[i].Down) > (clients[j].Up + clients[j].Down)
|
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
// Step 3: Extract sorted valid emails and track found ones
|
|
|
|
|
|
validEmails := make([]string, 0, len(clients))
|
|
|
|
|
|
found := make(map[string]bool)
|
|
|
|
|
|
for _, client := range clients {
|
|
|
|
|
|
validEmails = append(validEmails, client.Email)
|
|
|
|
|
|
found[client.Email] = true
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Step 4: Identify emails that were not found in the database
|
|
|
|
|
|
extraEmails := make([]string, 0)
|
|
|
|
|
|
for _, email := range emails {
|
|
|
|
|
|
if !found[email] {
|
|
|
|
|
|
extraEmails = append(extraEmails, email)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return validEmails, extraEmails, nil
|
|
|
|
|
|
}
|
2025-09-10 16:36:12 +02:00
|
|
|
|
func (s *InboundService) DelInboundClientByEmail(inboundId int, email string) (bool, error) {
|
|
|
|
|
|
oldInbound, err := s.GetInbound(inboundId)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
logger.Error("Load Old Data Error")
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var settings map[string]any
|
|
|
|
|
|
if err := json.Unmarshal([]byte(oldInbound.Settings), &settings); err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
interfaceClients, ok := settings["clients"].([]any)
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
return false, common.NewError("invalid clients format in inbound settings")
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var newClients []any
|
|
|
|
|
|
needApiDel := false
|
|
|
|
|
|
found := false
|
|
|
|
|
|
|
|
|
|
|
|
for _, client := range interfaceClients {
|
|
|
|
|
|
c, ok := client.(map[string]any)
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
if cEmail, ok := c["email"].(string); ok && cEmail == email {
|
|
|
|
|
|
// matched client, drop it
|
|
|
|
|
|
found = true
|
|
|
|
|
|
needApiDel, _ = c["enable"].(bool)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
newClients = append(newClients, client)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !found {
|
|
|
|
|
|
return false, common.NewError(fmt.Sprintf("client with email %s not found", email))
|
|
|
|
|
|
}
|
|
|
|
|
|
if len(newClients) == 0 {
|
|
|
|
|
|
return false, common.NewError("no client remained in Inbound")
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
settings["clients"] = newClients
|
|
|
|
|
|
newSettings, err := json.MarshalIndent(settings, "", " ")
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
oldInbound.Settings = string(newSettings)
|
|
|
|
|
|
|
|
|
|
|
|
db := database.GetDB()
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
// Drop the row and IPs only when this was the last inbound referencing
|
|
|
|
|
|
// the email — siblings still need the shared accounting state.
|
|
|
|
|
|
emailShared, err := s.emailUsedByOtherInbounds(email, inboundId)
|
|
|
|
|
|
if err != nil {
|
2025-09-10 16:36:12 +02:00
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if !emailShared {
|
|
|
|
|
|
if err := s.DelClientIPs(db, email); err != nil {
|
|
|
|
|
|
logger.Error("Error in delete client IPs")
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-09-10 16:36:12 +02:00
|
|
|
|
needRestart := false
|
|
|
|
|
|
|
|
|
|
|
|
// remove stats too
|
2026-05-09 17:38:48 +02:00
|
|
|
|
if len(email) > 0 && !emailShared {
|
2025-09-10 16:36:12 +02:00
|
|
|
|
traffic, err := s.GetClientTrafficByEmail(email)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if traffic != nil {
|
|
|
|
|
|
if err := s.DelClientStat(db, email); err != nil {
|
|
|
|
|
|
logger.Error("Delete stats Data Error")
|
|
|
|
|
|
return false, err
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if needApiDel {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
rt, rterr := s.runtimeFor(oldInbound)
|
|
|
|
|
|
if rterr != nil {
|
|
|
|
|
|
if oldInbound.NodeID != nil {
|
|
|
|
|
|
return false, rterr
|
|
|
|
|
|
}
|
|
|
|
|
|
needRestart = true
|
|
|
|
|
|
} else if oldInbound.NodeID == nil {
|
|
|
|
|
|
if err1 := rt.RemoveUser(context.Background(), oldInbound, email); err1 == nil {
|
|
|
|
|
|
logger.Debug("Client deleted on", rt.Name(), ":", email)
|
|
|
|
|
|
needRestart = false
|
|
|
|
|
|
} else if strings.Contains(err1.Error(), fmt.Sprintf("User %s not found.", email)) {
|
2025-09-10 16:36:12 +02:00
|
|
|
|
logger.Debug("User is already deleted. Nothing to do more...")
|
|
|
|
|
|
} else {
|
2026-05-09 17:38:48 +02:00
|
|
|
|
logger.Debug("Error in deleting client on", rt.Name(), ":", err1)
|
2025-09-10 16:36:12 +02:00
|
|
|
|
needRestart = true
|
|
|
|
|
|
}
|
2026-05-09 17:38:48 +02:00
|
|
|
|
} else {
|
|
|
|
|
|
if err1 := rt.UpdateInbound(context.Background(), oldInbound, oldInbound); err1 != nil {
|
|
|
|
|
|
return false, err1
|
|
|
|
|
|
}
|
2025-09-10 16:36:12 +02:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return needRestart, db.Save(oldInbound).Error
|
|
|
|
|
|
}
|
2026-05-11 15:03:47 +02:00
|
|
|
|
|
|
|
|
|
|
type SubLinkProvider interface {
|
|
|
|
|
|
SubLinksForSubId(host, subId string) ([]string, error)
|
|
|
|
|
|
LinksForClient(host string, inbound *model.Inbound, email string) []string
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var registeredSubLinkProvider SubLinkProvider
|
|
|
|
|
|
|
|
|
|
|
|
func RegisterSubLinkProvider(p SubLinkProvider) {
|
|
|
|
|
|
registeredSubLinkProvider = p
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *InboundService) GetSubLinks(host, subId string) ([]string, error) {
|
|
|
|
|
|
if registeredSubLinkProvider == nil {
|
|
|
|
|
|
return nil, common.NewError("sub link provider not registered")
|
|
|
|
|
|
}
|
|
|
|
|
|
return registeredSubLinkProvider.SubLinksForSubId(host, subId)
|
|
|
|
|
|
}
|
|
|
|
|
|
func (s *InboundService) GetClientLinks(host string, id int, email string) ([]string, error) {
|
|
|
|
|
|
inbound, err := s.GetInbound(id)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if registeredSubLinkProvider == nil {
|
|
|
|
|
|
return nil, common.NewError("sub link provider not registered")
|
|
|
|
|
|
}
|
|
|
|
|
|
return registeredSubLinkProvider.LinksForClient(host, inbound, email), nil
|
|
|
|
|
|
}
|