Compare commits

..

17 Commits

Author SHA1 Message Date
panleicim baa316f272 request_sender.py optimization 2026-02-18 11:56:15 +01:00
panleicim 385bda4557 request_sender.py optimization 2026-02-17 08:56:57 +01:00
panleicim 945388bdb5 Merge branch 'master' of gitlab.lpaconsulting.fr:panleicim/appointment_request 2025-12-27 10:16:28 +01:00
Lei PAN ae7c6c13fb Merge branch 'master' of gitlab.lpaconsulting.fr:panleicim/appointment_request 2025-12-21 23:11:11 +01:00
Lei PAN 4e174a02aa Merge branch 'master' of git.lpaconsulting.fr:panleicim/appointment_request 2025-12-21 23:10:06 +01:00
Lei PAN 0719dbdf89 migrate 2 queues to bak queue 2025-12-21 23:09:46 +01:00
panleicim c487defeed Merge branch 'master' of gitlab.lpaconsulting.fr:panleicim/appointment_request 2025-12-21 11:11:43 +01:00
panleicim 717896d92e use contact_list_2025-11-28 contact list 2025-12-21 11:10:24 +01:00
panleicim 6bc925a06b migrate moring_data_cache_2 too 2025-12-20 20:39:34 +01:00
panleicim d7e96b0b08 Merge branch 'feature/5_1_12' 2025-12-20 11:20:56 +01:00
panleicim 2073dd2377 use mongo2 db 2025-12-17 11:18:46 +01:00
panleicim 6438e4dbd3 added script to backup db 2025-12-16 09:56:04 +01:00
panleicim c7a619503b define PROXY_TIMEOUT_S 2025-12-13 10:29:23 +01:00
panleicim f591ff14ac use 5.1.12 tags.js 2025-12-12 14:50:23 +01:00
panleicim 0a7657f716 Merge branch 'refs/heads/feature/get_ip_geo_info' 2025-12-10 23:36:55 +01:00
panleicim 9085a3c1c4 use 100 threads to read mails 2025-12-09 10:30:43 +01:00
panleicim 605bc8b252 use requests instead of curl_cffi 2025-12-01 15:52:03 +01:00
12 changed files with 236 additions and 177 deletions
+1
View File
@@ -5,3 +5,4 @@ __pycache__/
db/__pycache__/*
*.iml
venv
.env
+2 -2
View File
@@ -11,7 +11,7 @@ from models.contact_pojo import ContactPojo
from models.mail_pojo import MailAddress
from models.regisered_user_pojo import RegisteredUserPojo
MONGO_DB_URL = "mongodb://mongo.lpaconsulting.fr/?timeoutMS=100000"
MONGO_DB_URL = "mongodb://mongo2.lpaconsulting.fr/?timeoutMS=100000"
CAPTCHA_ERROR_COLLECTION_PREFIX = "CAPTCHA_ERROR_"
BLACK_LIST = "BLACK_LIST"
ACCEPTED_APPOINTMENT_LIST = "ACCEPTED_APPOINTMENT_LIST"
@@ -25,7 +25,7 @@ CONTACT_LIST_SERIAL_MAP = "CONTACT_LIST_SERIAL_MAP"
class MongoDbManager:
def __init__(self):
client = MongoClient(MONGO_DB_URL, username='appointment', password='Rdv@2022', authSource='appointment')
client = MongoClient(MONGO_DB_URL, username='appointment', password='Rdv@20222021', authSource='appointment')
self.db = client.appointment
self.logger = logging.getLogger("mongoDb")
+4 -2
View File
@@ -363,7 +363,7 @@ def find_links_to_validate_from_mail_list(mail_list: List[MailAddress], logger)
mails_messages = []
# 使用线程池处理邮件
with ThreadPoolExecutor(max_workers=20) as executor:
with ThreadPoolExecutor(max_workers=100) as executor:
futures = []
for mail in mail_list:
@@ -411,7 +411,9 @@ def find_links_to_validate_from_mail_list(mail_list: List[MailAddress], logger)
if __name__ == '__main__':
# 读取联系人列表
contact_to_book_list = read_contacts(
file_name="~/Desktop/contact_list_2025-11-06.xlsx")
file_name="~/Desktop/contact_list_2025-11-28.xlsx")
# file_name="~/Desktop/contact_list_all.xlsx")
# file_name="~/Desktop/contact_list_2025-11-06.xlsx")
# 获取目标邮箱列表
all_mail_list = MONGO_STORE_MANAGER.get_destination_emails()
+3 -3
View File
@@ -4,10 +4,10 @@ from queue_message.CookiesPublisher import MORNING_DATA_CACHE_2, MORNING_DATA_CA
from workers.MessagerTransporter import migrate_message_to_queue
if __name__ == '__main__':
# p1 = Process(target=migrate_message_to_queue, args=(MORNING_DATA_CACHE_2, MORNING_DATA_CACHE_BAK))
# p1.start()
p1 = Process(target=migrate_message_to_queue, args=(MORNING_DATA_CACHE_2, MORNING_DATA_CACHE_BAK))
p1.start()
p2 = Process(target=migrate_message_to_queue, args=(MORNING_DATA_CACHE, MORNING_DATA_CACHE_BAK))
p2.start()
# p2.join()
p2.join()
# migrate_message_to_queue(from_queue=MORNING_DATA_CACHE_2)
# migrate_message_to_queue(from_queue=MORNING_DATA_CACHE)
+74 -45
View File
@@ -1,54 +1,40 @@
import datetime
import logging
import random
import sys
from threading import Thread
from db.mongo_manager import MONGO_STORE_MANAGER
from excel_reader import read_contacts
from models.contact_pojo import ContactPojo
from queue_message.CookiesPublisher import CookiesPublisher, SHARED_OBJECT, TEST_QUEUE, MORNING_DATA_CACHE_2, \
MORNING_DATA_CACHE
from queue_message.CookiesPublisher import CookiesPublisher, SHARED_OBJECT, TEST_QUEUE, MORNING_DATA_CACHE, \
MORNING_DATA_CACHE_2, MORNING_DATA_CACHE_BAK
from queue_message.appointmentrequestsender import AppointmentRequestSender
from utiles import is_time_between
from utils.AppLogging import init_logger
from workers.proxies_constants import MOBILE_PROXY_LIST_FR
IPFIY = 'http://api.ipify.org'
NGROK_TEST = "https://bcc6-193-164-156-53.ngrok-free.app"
def is_already_sent(contact: ContactPojo) -> bool:
already_sent_contacts = MONGO_STORE_MANAGER.get_all_successful_items_for_day()
for required_contact in already_sent_contacts:
if contact.mail == required_contact.email:
return True
return False
def filter_contacts(_contact_list: list) -> list:
already_sent_contacts = MONGO_STORE_MANAGER.get_all_successful_items_for_day()
_link_to_validate_list = MONGO_STORE_MANAGER.get_links_to_validate()
# Optimization: Use sets for O(1) lookup complexity
sent_emails = {booked.email for booked in already_sent_contacts}
validate_emails = {link.email for link in _link_to_validate_list}
_contact_list_to_book = []
for contact in _contact_list:
_to_add = True
for booked in already_sent_contacts:
if contact.mail == booked.email:
_to_add = False
if contact.mail in sent_emails:
continue
# 如果已经收到链接了,就不要再请求
for link_to_validate in _link_to_validate_list:
if contact.mail == link_to_validate.email:
if contact.mail in validate_emails:
logger.info("{}: link already received".format(contact.mail))
_to_add = False
if _to_add:
continue
_contact_list_to_book.append(contact)
return _contact_list_to_book
def is_open():
return is_time_between(datetime.time(10, 30), datetime.time(19, 00))
count = 0
init_logger()
logger = logging.getLogger()
@@ -56,36 +42,79 @@ logger = logging.getLogger()
logger.addHandler(logging.StreamHandler(stream=sys.stdout))
def send_appointment_request(message_queue_name, _contact_list):
def send_appointment_request(message_queue_name, _contact_list, stop_at_hour=11, stop_at_mins=30):
global count
count = count + 1
for _contact in _contact_list:
logger.info(_contact)
_cookiesPublisher = CookiesPublisher(queue_name=message_queue_name)
_cookiesPublisher.set_up_connection()
receiver = AppointmentRequestSender(sub_contact_list=_contact_list, queue_name=message_queue_name,
cookiesPublisher=_cookiesPublisher, logger=logger)
_backUp_cookiesPublisher = CookiesPublisher(queue_name=MORNING_DATA_CACHE_BAK)
_backUp_cookiesPublisher.set_up_connection()
receiver = AppointmentRequestSender(sub_contact_list=_contact_list,
queue_name=message_queue_name,
cookiesPublisher=_cookiesPublisher,
bakeUpCookiesPublisher=_backUp_cookiesPublisher, logger=logger,
stop_at_hour=stop_at_hour, stop_at_mins=stop_at_mins)
print("count is " + str(count))
receiver.run()
def start_send_requests():
def start_send_requests(thread_number, contact_list, data_queue_name=MORNING_DATA_CACHE, stop_at_hour=14,
stop_at_mins=56):
print("start send requests")
contacts_file_path = '~/Desktop/06_05_to_test.xlsx'
_contact_list = read_contacts(contacts_file_path)[:1]
_contact_list_to_book = filter_contacts(_contact_list)
_segment_number = 1
logger.info("{} contacts to book".format(len(_contact_list_to_book)))
last_thread = None
for i in range(0, _segment_number):
_contact_list_to_book = filter_contacts(contact_list)
_segment_number = thread_number
total_contacts = len(_contact_list_to_book)
logger.info("{} contacts to book".format(total_contacts))
if total_contacts == 0:
return
# Optimization: Better distribution of contacts among threads
thread_list = []
chunk_size = total_contacts // _segment_number
remainder = total_contacts % _segment_number
start_index = 0
for i in range(_segment_number):
# If we have more threads than contacts, some threads will get empty lists, which is fine
if start_index >= total_contacts:
break
logger.info("segment is {}".format(i))
_step = int(len(_contact_list_to_book) / _segment_number)
_sublist = _contact_list_to_book[i * _step:_step * (i + 1)]
_thread1 = Thread(target=send_appointment_request, args=(MORNING_DATA_CACHE, _sublist))
last_thread = _thread1
# Distribute remainder to the first few threads
current_chunk_size = chunk_size + (1 if i < remainder else 0)
end_index = start_index + current_chunk_size
_sublist = _contact_list_to_book[start_index:end_index]
start_index = end_index
if _sublist:
_thread1 = Thread(target=send_appointment_request, args=(data_queue_name, _sublist, stop_at_hour, stop_at_mins))
thread_list.append(_thread1)
_thread1.start()
last_thread.join()
for _thread in thread_list:
_thread.join()
def send_request_for_file_list(file_list: list, thread_number: int = 20, data_queue_name=MORNING_DATA_CACHE,
stop_at_hour=11, stop_at_mins=30):
logger.info("stop_at_hour is " + str(stop_at_hour) + " stop_at_mins is " + str(stop_at_mins))
for _file_path in file_list:
logger.info("send request for file: " + _file_path)
_contact_list = read_contacts(_file_path)
random.shuffle(_contact_list)
start_send_requests(thread_number=thread_number, contact_list=_contact_list,
data_queue_name=data_queue_name, stop_at_hour=stop_at_hour, stop_at_mins=stop_at_mins)
if __name__ == '__main__':
start_send_requests()
# file_list = ['~/Desktop/contact_list_2024-05-23.xlsx',
# '~/Desktop/contact_list_2024-05-21.xlsx',
# file_list = ['~/Desktop/contact_list_2025-10-30.xlsx']
file_list = ['~/Desktop/contact_list_2025-11-28.xlsx']
send_request_for_file_list(file_list=file_list, thread_number=10,
data_queue_name=MORNING_DATA_CACHE_2, stop_at_hour=19, stop_at_mins=50)
-109
View File
@@ -1,109 +0,0 @@
import datetime
import logging
import random
import sys
from threading import Thread
from db.mongo_manager import MONGO_STORE_MANAGER
from excel_reader import read_contacts
from models.contact_pojo import ContactPojo
from queue_message.CookiesPublisher import CookiesPublisher, SHARED_OBJECT, TEST_QUEUE, MORNING_DATA_CACHE, \
MORNING_DATA_CACHE_2, MORNING_DATA_CACHE_BAK
from queue_message.appointmentrequestsender import AppointmentRequestSender
from utils.AppLogging import init_logger
def is_already_sent(contact: ContactPojo) -> bool:
already_sent_contacts = MONGO_STORE_MANAGER.get_all_successful_items_for_day()
for required_contact in already_sent_contacts:
if contact.mail == required_contact.email:
return True
return False
def filter_contacts(_contact_list: list) -> list:
already_sent_contacts = MONGO_STORE_MANAGER.get_all_successful_items_for_day()
_link_to_validate_list = MONGO_STORE_MANAGER.get_links_to_validate()
_contact_list_to_book = []
for contact in _contact_list:
_to_add = True
for booked in already_sent_contacts:
if contact.mail == booked.email:
_to_add = False
# 如果已经收到链接了,就不要再请求
for link_to_validate in _link_to_validate_list:
if contact.mail == link_to_validate.email:
logger.info("{}: link already received".format(contact.mail))
_to_add = False
if _to_add:
_contact_list_to_book.append(contact)
return _contact_list_to_book
count = 0
init_logger()
logger = logging.getLogger()
logger.addHandler(logging.StreamHandler(stream=sys.stdout))
def send_appointment_request(message_queue_name, _contact_list, stop_at_hour=11, stop_at_mins=30):
global count
count = count + 1
for _contact in _contact_list:
logger.info(_contact)
_cookiesPublisher = CookiesPublisher(queue_name=message_queue_name)
_cookiesPublisher.set_up_connection()
_backUp_cookiesPublisher = CookiesPublisher(queue_name=MORNING_DATA_CACHE_BAK)
_backUp_cookiesPublisher.set_up_connection()
receiver = AppointmentRequestSender(sub_contact_list=_contact_list,
queue_name=message_queue_name,
cookiesPublisher=_cookiesPublisher,
bakeUpCookiesPublisher=_backUp_cookiesPublisher, logger=logger,
stop_at_hour=stop_at_hour, stop_at_mins=stop_at_mins)
print("count is " + str(count))
receiver.run()
def start_send_requests(thread_number, contact_list, data_queue_name=MORNING_DATA_CACHE, stop_at_hour=14,
stop_at_mins=56):
print("start send requests")
_contact_list_to_book = filter_contacts(contact_list)
_segment_number = thread_number
logger.info("{} contacts to book".format(len(_contact_list_to_book)))
# last_thread = None
thread_list = []
for i in range(0, _segment_number):
logger.info("segment is {}".format(i))
_step = int(len(_contact_list_to_book) / _segment_number)
_sublist = _contact_list_to_book[i * _step:_step * (i + 1)]
_thread1 = Thread(target=send_appointment_request, args=(data_queue_name, _sublist, stop_at_hour, stop_at_mins))
thread_list.append(_thread1)
_thread1.start()
for _thread in thread_list:
_thread.join()
def send_request_for_file_list(file_list: list, thread_number: int = 20, data_queue_name=MORNING_DATA_CACHE,
stop_at_hour=11, stop_at_mins=30):
logger.info("stop_at_hour is " + str(stop_at_hour) + " stop_at_mins is " + str(stop_at_mins))
for _file_path in file_list:
logger.info("send request for file: " + _file_path)
_contact_list = read_contacts(_file_path)
random.shuffle(_contact_list)
start_send_requests(thread_number=thread_number, contact_list=_contact_list,
data_queue_name=data_queue_name, stop_at_hour=stop_at_hour, stop_at_mins=stop_at_mins)
if __name__ == '__main__':
# file_list = ['~/Desktop/contact_list_2024-05-23.xlsx',
# '~/Desktop/contact_list_2024-05-21.xlsx',
# '~/Desktop/15_05_to_test.xlsx']
# file_list = ['~/Desktop/contact_list_2025-10-30.xlsx']
file_list = ['~/Desktop/contact_list_2025-11-06.xlsx']
# file_list = ['~/Desktop/contact_list_all.xlsx']
# file_list = ['~/Desktop/contact_list_2025-09-08.xlsx']
# file_list = ['~/Desktop/real_name_contacts_100_aol_17_04.xlsx']
send_request_for_file_list(file_list=file_list, thread_number=30,
data_queue_name=MORNING_DATA_CACHE, stop_at_hour=19, stop_at_mins=50)
+2 -2
View File
@@ -5,8 +5,8 @@ from link_validator_executor import start_link_validation
def start_check_results_job(sched):
sched.add_job(start_link_validation, 'cron', day_of_week='mon-sat', hour='13',
minute='48',
sched.add_job(start_link_validation, 'cron', day_of_week='mon-sun', hour='14',
minute='10',
misfire_grace_time=10,
second='10', timezone='Europe/Paris', max_instances=1, args=[])
+3 -3
View File
@@ -7,13 +7,13 @@ from request_sender_test import send_request_for_file_list
def start_book_appointment():
# file_list = ['~/Desktop/contact_list_2025-09-08.xlsx']
file_list = ['~/Desktop/contact_list_2025-11-06.xlsx']
send_request_for_file_list(file_list=file_list, thread_number=70,
file_list = ['~/Desktop/contact_list_2025-11-28.xlsx']
send_request_for_file_list(file_list=file_list, thread_number=73,
data_queue_name=MORNING_DATA_CACHE, stop_at_hour=11, stop_at_mins=10)
def start_check_results_job(sched):
sched.add_job(start_book_appointment, 'cron', day_of_week='mon-sat', hour='10',
sched.add_job(start_book_appointment, 'cron', day_of_week='mon-sun', hour='10',
minute='30',
misfire_grace_time=10,
second='10', timezone='Europe/Paris', max_instances=1, args=[])
+134
View File
@@ -0,0 +1,134 @@
import subprocess
import os
import datetime
import sys
from db.mongo_manager import MONGO_DB_URL
# ================= 配置区域 =================
# 数据库连接信息
MONGO_HOST = "mongo.lpaconsulting.fr"
MONGO_PORT = "27017"
MONGO_DB_NAME = "appointment" # 你要备份/恢复的数据库名
MONGO_USER = "appointment" # 如果没有密码,保持为空字符串 ""
MONGO_PASS = "Rdv@2022" # 如果没有密码,保持为空字符串 ""
# 备份存放的根目录
BACKUP_DIR_ROOT = "./mongo_backups"
# ===========================================
def get_auth_args():
"""构建认证参数列表"""
args = []
if MONGO_USER and MONGO_PASS:
args.extend(["--username", MONGO_USER, "--password", MONGO_PASS, "--authenticationDatabase", "appointment"])
return args
def backup_mongo():
"""执行备份操作"""
# 1. 创建带有时间戳的备份文件夹
timestamp = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
backup_path = os.path.join(BACKUP_DIR_ROOT, timestamp)
if not os.path.exists(backup_path):
os.makedirs(backup_path)
print(f"[*] 开始备份数据库: {MONGO_DB_NAME}{backup_path} ...")
# 2. 构建 mongodump 命令
# 命令格式: mongodump --host <host> --port <port> --db <db> --out <path> [auth]
cmd = [
"mongodump",
"--host", MONGO_HOST,
"--port", MONGO_PORT,
"--db", MONGO_DB_NAME,
"--out", backup_path
]
# 添加认证参数
cmd.extend(get_auth_args())
try:
# 3. 执行命令
result = subprocess.run(cmd, check=True, text=True, capture_output=True)
print(f"[+] 备份成功!")
print(f" 存储路径: {backup_path}")
print(f" 日志: {result.stderr}") # mongodump 通常把进度输出到 stderr
return backup_path
except subprocess.CalledProcessError as e:
print(f"[-] 备份失败: {e}")
print(f" 错误信息: {e.stderr}")
return None
def restore_mongo(backup_source_path):
"""
执行恢复操作
backup_source_path: 备份文件夹的路径 (例如 ./mongo_backups/2023-10-27_10-00-00)
"""
# mongodump 的输出结构通常是: backup_dir/db_name/collection.bson
# 所以我们需要指向具体的数据库文件夹,或者指向父文件夹并指定 --db
target_dir = os.path.join(backup_source_path, MONGO_DB_NAME)
if not os.path.exists(target_dir):
print(f"[-] 错误: 在路径 {backup_source_path} 下找不到数据库 {MONGO_DB_NAME} 的备份文件。")
return
print(f"[*] 开始恢复数据库: {MONGO_DB_NAME}{target_dir} ...")
# 构建 mongorestore 命令
# 命令格式: mongorestore --host <host> --port <port> --db <db> <path_to_bson_files> [auth]
cmd = [
"mongorestore",
"--host", MONGO_HOST,
"--port", MONGO_PORT,
"--db", MONGO_DB_NAME,
"--drop", # 警告:这会在恢复前删除现有集合,确保数据干净。根据需要移除此项。
target_dir
]
cmd.extend(get_auth_args())
try:
result = subprocess.run(cmd, check=True, text=True, capture_output=True)
print(f"[+] 恢复成功!")
print(f" 日志: {result.stderr}")
except subprocess.CalledProcessError as e:
print(f"[-] 恢复失败: {e}")
print(f" 错误信息: {e.stderr}")
# ================= 主程序入口 =================
if __name__ == "__main__":
print("请选择操作:")
print("1. 备份数据库 (Backup)")
print("2. 恢复数据库 (Restore)")
choice = input("请输入数字 (1/2): ").strip()
if choice == "1":
backup_mongo()
elif choice == "2":
# 列出所有备份供用户选择
if not os.path.exists(BACKUP_DIR_ROOT):
print("[-] 没有找到备份目录。")
else:
backups = sorted(os.listdir(BACKUP_DIR_ROOT))
if not backups:
print("[-] 目录为空。")
else:
print("\n可用备份:")
for idx, name in enumerate(backups):
print(f"{idx + 1}. {name}")
try:
idx_choice = int(input("\n请选择要恢复的备份编号: ")) - 1
if 0 <= idx_choice < len(backups):
selected_backup = os.path.join(BACKUP_DIR_ROOT, backups[idx_choice])
restore_mongo(selected_backup)
else:
print("[-] 无效的选择。")
except ValueError:
print("[-] 请输入数字。")
else:
print("[-] 无效输入,退出。")
+6 -6
View File
@@ -14,13 +14,13 @@ from models.jsdata_pojo import JsDataPojo
from models.result_pojo import RequestResult
from utils.get_only_datadome_cookies import get_datadome_cookies, get_app_cookies, get_lang_cookies, \
retain_only_dataome_cookies
from workers.proxy_constants import PROXY_TIMEOUT_S
API_KEY = "d66aaf490d8aa424a5175e1fbd1aadea"
HOST_ADDRESS = "https://api.2captcha.com/createTask"
HERMES_REGISTER = "https://rendezvousparis.hermes.com/client/register"
class CaptchaResultGetter:
def __init__(self):
@@ -51,7 +51,7 @@ class CaptchaResultGetter:
print(proxy_to_use)
try:
response = requests.get(url=HERMES_REGISTER, headers=headers, verify=False, proxies=proxy_to_use,
timeout=15)
timeout=PROXY_TIMEOUT_S)
print(response.status_code)
if response.status_code == 200:
print(response.text)
@@ -94,7 +94,7 @@ class CaptchaResultGetter:
raw_data = self.get_le_type_raw_data(old_valid_cookie=old_valid_cookie, js_le_type_data=js_le_type_data)
response = requests.post(url="https://d.digital.hermes/js/", headers=headers, verify=False,
data=raw_data,
proxies=proxy_to_use, timeout=15)
proxies=proxy_to_use, timeout=PROXY_TIMEOUT_S)
# print(response.status_code)
if response.status_code == 200:
print(response.text)
@@ -109,7 +109,7 @@ class CaptchaResultGetter:
return None
def get_ch_raw_data_from_js_data(self, js_data: JsDataPojo, old_valid_cookie) -> str:
_tag_version = "5.1.9"
_tag_version = "5.1.12"
_jspl = encrpte_to_jspl(js_data.to_url_encoded_json())
_raw_data = "jspl={}&eventCounters=%5B%5D&jsType=ch&cid={}&ddk=789361B674144528D0B7EE76B35826&Referer=https%253A%252F%252Frendezvousparis.hermes.com%252Fclient%252Fregister&request=%252Fclient%252Fregister&responsePage=origin&ddv={}".format(
_jspl, old_valid_cookie, _tag_version)
@@ -149,7 +149,7 @@ class CaptchaResultGetter:
response = requests.post(url="https://d.digital.hermes/js/", headers=headers, verify=False,
data=self.get_ch_raw_data_from_js_data(js_data=js_data,
old_valid_cookie=_cookies_to_use),
proxies=proxy_to_use, timeout=15)
proxies=proxy_to_use, timeout=PROXY_TIMEOUT_S)
print(response.status_code)
if response.status_code == 200:
print(response.text)
@@ -180,7 +180,7 @@ class CaptchaResultGetter:
# old_valid_cookie=old_valid_cookie)
_cid = get_datadome_cookies(old_valid_cookie)
_jspl = encrpte_to_jspl(js_le_type_data.to_url_encoded_json())
_raw_data = "jsData={}&eventCounters=%7B%22mousemove%22%3A{}%2C%22click%22%3A{}%2C%22scroll%22%3A{}%2C%22touchstart%22%3A{}%2C%22touchend%22%3A{}%2C%22touchmove%22%3A{}%2C%22keydown%22%3A{}%2C%22keyup%22%3A{}%7D&jsType=le&cid={}&ddk=789361B674144528D0B7EE76B35826&Referer=https%253A%252F%252Frendezvousparis.hermes.com%252Fclient%252Fregister&request=%252Fclient%252Fregister&responsePage=origin&ddv=5.1.9".format(
_raw_data = "jsData={}&eventCounters=%7B%22mousemove%22%3A{}%2C%22click%22%3A{}%2C%22scroll%22%3A{}%2C%22touchstart%22%3A{}%2C%22touchend%22%3A{}%2C%22touchmove%22%3A{}%2C%22keydown%22%3A{}%2C%22keyup%22%3A{}%7D&jsType=le&cid={}&ddk=789361B674144528D0B7EE76B35826&Referer=https%253A%252F%252Frendezvousparis.hermes.com%252Fclient%252Fregister&request=%252Fclient%252Fregister&responsePage=origin&ddv=5.1.12".format(
_jspl, mousemove_count, click_count, scroll_count, touch_count, touch_count,
touch_move,
key_count,
+1
View File
@@ -0,0 +1 @@
PROXY_TIMEOUT_S = 20
+2 -1
View File
@@ -13,6 +13,7 @@ from models.result_pojo import RequestResult
from queue_message.CookiesPublisher import CookiesPublisher
from utils.address_ip import get_address_ip
from workers.captcha_result_getter import CaptchaResultGetter
from workers.proxy_constants import PROXY_TIMEOUT_S
def get_chrome_version_from_ua(ua):
@@ -206,7 +207,7 @@ class Sender:
proxy_to_use = self.proxy_to_use
# print(proxy_to_use)
response = requests.post(url=url, proxies=proxy_to_use, verify=False, headers=headers, data=data,
timeout=15, allow_redirects=False, impersonate="chrome99_android")
timeout=PROXY_TIMEOUT_S, allow_redirects=False, impersonate="chrome99_android")
self.logger.info(response.status_code)
if response.status_code == 302:
# add to mongodb