sfun commited on
Commit
1ef33a6
1 Parent(s): 75a98c8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -81
app.py CHANGED
@@ -7,106 +7,65 @@ import sys
7
  import traceback
8
  from aiohttp import web
9
  from urllib.parse import parse_qs
 
 
 
 
 
10
 
11
  async def fetch_url(url, session):
12
  async with session.get(url) as response:
13
  return await response.text()
14
 
15
  async def extract_and_transform_proxies(input_text):
16
- try:
17
- data = yaml.safe_load(input_text)
18
- if isinstance(data, dict) and 'proxies' in data:
19
- proxies_list = data['proxies']
20
- elif isinstance(data, list):
21
- proxies_list = data
22
- else:
23
- proxies_match = re.search(r'proxies:\s*\n((?:[-\s]*{.*\n?)*)', input_text, re.MULTILINE)
24
- if proxies_match:
25
- proxies_text = proxies_match.group(1)
26
- proxies_list = yaml.safe_load(proxies_text)
27
- else:
28
- return "未找到有效的代理配置"
29
- except yaml.YAMLError:
30
- return "YAML解析错误"
31
-
32
- if not proxies_list:
33
- return "未找到有效的代理配置"
34
-
35
- transformed_proxies = []
36
-
37
- for proxy in proxies_list:
38
- if proxy.get('type') == 'ss':
39
- name = proxy.get('name', '').strip()
40
- server = proxy.get('server', '').strip()
41
- port = str(proxy.get('port', '')).strip()
42
-
43
- ss_parts = [f"{name} = ss, {server}, {port}"]
44
-
45
- if 'cipher' in proxy:
46
- ss_parts.append(f"encrypt-method={proxy['cipher'].strip()}")
47
- if 'password' in proxy:
48
- ss_parts.append(f"password={proxy['password'].strip()}")
49
- if 'udp' in proxy:
50
- ss_parts.append(f"udp-relay={'true' if proxy['udp'] in [True, 'true', 'True'] else 'false'}")
51
 
52
- transformed = ", ".join(ss_parts)
53
- transformed_proxies.append(transformed)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
 
55
- elif proxy.get('type') == 'trojan':
56
- name = proxy.get('name', '').strip()
57
- server = proxy.get('server', '').strip()
58
- port = str(proxy.get('port', '')).strip()
59
-
60
- trojan_parts = [f"{name} = trojan, {server}, {port}"]
61
-
62
- if 'password' in proxy:
63
- trojan_parts.append(f"password={proxy['password'].strip()}")
64
- if 'sni' in proxy:
65
- trojan_parts.append(f"sni={proxy['sni'].strip()}")
66
- if 'skip-cert-verify' in proxy:
67
- trojan_parts.append(f"skip-cert-verify={str(proxy['skip-cert-verify']).lower()}")
68
- if 'udp' in proxy:
69
- trojan_parts.append(f"udp={'true' if proxy['udp'] in [True, 'true', 'True'] else 'false'}")
70
-
71
- transformed = ", ".join(trojan_parts)
72
- transformed_proxies.append(transformed)
73
-
74
- return "\n".join(transformed_proxies) if transformed_proxies else "未找到有效的SS或Trojan代理配置"
75
 
76
  async def log_request(request, response):
77
- timestamp = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
78
- client_ip = request.remote
79
- request_line = f"{request.method} {request.path}"
80
- if request.query_string:
81
- request_line += f"?{request.query_string}"
82
- status_code = response.status
83
- content_length = response.content_length
84
-
85
- log_message = f"{timestamp} - {client_ip} - \"{request_line}\" {status_code} {content_length}"
86
- print(log_message, flush=True)
87
 
88
  @web.middleware
89
  async def logging_middleware(request, handler):
90
- start_time = datetime.datetime.now()
91
- try:
92
- response = await handler(request)
93
- await log_request(request, response)
94
- end_time = datetime.datetime.now()
95
- print(f"Request processing time: {end_time - start_time}", flush=True)
96
- return response
97
- except Exception as e:
98
- end_time = datetime.datetime.now()
99
- print(f"Error occurred: {str(e)}", flush=True)
100
- print(f"Request processing time: {end_time - start_time}", flush=True)
101
- print("Traceback:", flush=True)
102
- traceback.print_exc()
103
- return web.Response(text=f"Internal Server Error: {str(e)}", status=500)
104
 
105
  async def handle_request(request):
106
  if request.path == '/':
107
  query_params = parse_qs(request.query_string)
108
  if 'url' in query_params:
109
  url = query_params['url'][0]
 
 
 
 
 
 
110
  try:
111
  print(f"Fetching URL: {url}", flush=True)
112
  async with aiohttp.ClientSession() as session:
@@ -114,6 +73,10 @@ async def handle_request(request):
114
  print(f"URL content length: {len(input_text)}", flush=True)
115
  result = await extract_and_transform_proxies(input_text)
116
  print(f"Transformed result length: {len(result)}", flush=True)
 
 
 
 
117
  return web.Response(text=result, content_type='text/plain')
118
  except Exception as e:
119
  print(f"Error processing request: {str(e)}", flush=True)
@@ -126,6 +89,7 @@ async def handle_request(request):
126
  <h1>代理配置转换工具</h1>
127
  <p>使用方法:在URL参数中提供包含代理配置的网址。</p>
128
  <p>示例:<code>http://localhost:8080/?url=https://example.com/path-to-proxy-config</code></p>
 
129
  </body>
130
  </html>
131
  """
 
7
  import traceback
8
  from aiohttp import web
9
  from urllib.parse import parse_qs
10
+ from cachetools import TTLCache
11
+ from functools import partial
12
+
13
+ # 创建一个TTL缓存,最多存储1000个项目,每个项目的有效期为30分钟
14
+ cache = TTLCache(maxsize=1000, ttl=1800)
15
 
16
  async def fetch_url(url, session):
17
  async with session.get(url) as response:
18
  return await response.text()
19
 
20
  async def extract_and_transform_proxies(input_text):
21
+ # 使用正则表达式提取代理信息
22
+ pattern = r'([a-zA-Z0-9+/=]+)(?:@|:\/\/)([^:]+):(\d+)'
23
+ matches = re.findall(pattern, input_text)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
 
25
+ proxies = []
26
+ for match in matches:
27
+ encoded_info, server, port = match
28
+ try:
29
+ decoded_info = base64.b64decode(encoded_info).decode('utf-8')
30
+ method, password = decoded_info.split(':')
31
+ proxy = {
32
+ 'name': f'{server}:{port}',
33
+ 'type': 'ss',
34
+ 'server': server,
35
+ 'port': int(port),
36
+ 'cipher': method,
37
+ 'password': password
38
+ }
39
+ proxies.append(proxy)
40
+ except:
41
+ continue
42
 
43
+ # 转换为YAML格式
44
+ yaml_data = yaml.dump({'proxies': proxies}, allow_unicode=True)
45
+ return yaml_data
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
 
47
  async def log_request(request, response):
48
+ print(f"{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - "
49
+ f"Request: {request.method} {request.path} - "
50
+ f"Response: {response.status}", flush=True)
 
 
 
 
 
 
 
51
 
52
  @web.middleware
53
  async def logging_middleware(request, handler):
54
+ response = await handler(request)
55
+ await log_request(request, response)
56
+ return response
 
 
 
 
 
 
 
 
 
 
 
57
 
58
  async def handle_request(request):
59
  if request.path == '/':
60
  query_params = parse_qs(request.query_string)
61
  if 'url' in query_params:
62
  url = query_params['url'][0]
63
+ force_refresh = 'nocache' in query_params
64
+
65
+ if not force_refresh and url in cache:
66
+ print(f"Cache hit for URL: {url}", flush=True)
67
+ return web.Response(text=cache[url], content_type='text/plain')
68
+
69
  try:
70
  print(f"Fetching URL: {url}", flush=True)
71
  async with aiohttp.ClientSession() as session:
 
73
  print(f"URL content length: {len(input_text)}", flush=True)
74
  result = await extract_and_transform_proxies(input_text)
75
  print(f"Transformed result length: {len(result)}", flush=True)
76
+
77
+ # 将结果存入缓存
78
+ cache[url] = result
79
+
80
  return web.Response(text=result, content_type='text/plain')
81
  except Exception as e:
82
  print(f"Error processing request: {str(e)}", flush=True)
 
89
  <h1>代理配置转换工具</h1>
90
  <p>使用方法:在URL参数中提供包含代理配置的网址。</p>
91
  <p>示例:<code>http://localhost:8080/?url=https://example.com/path-to-proxy-config</code></p>
92
+ <p>强制刷新缓存:<code>http://localhost:8080/?url=https://example.com/path-to-proxy-config&nocache</code></p>
93
  </body>
94
  </html>
95
  """