import requests
import warnings
warnings.filterwarnings("ignore")
class HTBClient:
def __init__(self, password):
self.password = password
self.base_url = 'https://labs.hackthebox.com/api/v4'
self.proxies = {
#"http": "http://127.0.0.1:8080", # Burp proxy for HTTP traffic
#"https": "http://127.0.0.1:8080" # Burp proxy for HTTPS traffic
}
# Fetch user info
self.user = self.get_user_info()
# Fetch user stats and store them in self.user
user_owns, root_owns, respects = self.get_user_stats(self.user['id'])
self.user['user_owns'] = user_owns
self.user['root_owns'] = root_owns
self.user['respects'] = respects
def get_user_info(self):
headers = {
"Authorization": f"Bearer {self.password}",
"User-Agent": None # Explicitly remove User-Agent
}
response = requests.get(
f'{self.base_url}/user/info',
headers=headers,
proxies=self.proxies,
verify=False # Disable SSL verification
)
if response.status_code != 200:
raise Exception(f"Error fetching {self.base_url}/user/info user info: {response.status_code}, {response.text}")
# Return the user info as a dictionary
data = response.json().get('info')
return {'id': data['id'], 'name': data['name'], 'email': data['email']}
def get_user_stats(self, user_id):
headers = {
"Authorization": f"Bearer {self.password}",
"User-Agent": None # Explicitly remove User-Agent
}
response = requests.get(
f'{self.base_url}/user/profile/basic/{user_id}',
headers=headers,
proxies=self.proxies,
verify=False # Disable SSL verification
)
if response.status_code != 200:
raise Exception(f"Error fetching user stats: {response.status_code}, {response.text}")
# Extract user statistics from the response
data = response.json().get('profile')
user_owns = data['user_owns']
root_owns = data['system_owns']
respects = data['respects']
return user_owns, root_owns, respects
def get_active_machines(self):
machines = []
seen_ids = set() # Track unique machine IDs
seen_names = set() # Track unique machine names
page = 1
while True:
response = requests.get(
f'{self.base_url}/machine/paginated?per_page=100&page={page}',
headers={
"Authorization": f"Bearer {self.password}",
"User-Agent": None # Explicitly remove User-Agent
},
proxies=self.proxies,
verify=False # Disable SSL verification
)
if response.status_code != 200:
raise Exception(f"Error fetching active machines: {response.status_code}, {response.text}")
data = response.json()
for machine in data['data']:
if machine['id'] not in seen_ids and machine['name'] not in seen_names:
machines.append(machine)
seen_ids.add(machine['id'])
seen_names.add(machine['name'])
# Check for pagination
if page >= data['meta']['last_page']:
break
page += 1
return machines
def get_retired_machines(self):
machines = []
seen_ids = set() # Track unique machine IDs
seen_names = set() # Track unique machine names
page = 1
while True:
response = requests.get(
f'{self.base_url}/machine/list/retired/paginated?per_page=100&page={page}',
headers={
"Authorization": f"Bearer {self.password}",
"User-Agent": None # Explicitly remove User-Agent
},
proxies=self.proxies,
verify=False # Disable SSL verification
)
if response.status_code != 200:
raise Exception(f"Error fetching retired machines: {response.status_code}, {response.text}")
data = response.json()
for machine in data['data']:
if machine['id'] not in seen_ids and machine['name'] not in seen_names:
machines.append(machine)
seen_ids.add(machine['id'])
seen_names.add(machine['name'])
# Check for pagination
if page >= data['meta']['last_page']:
break
page += 1
return machines
def get_all_machines(self):
# Combine active and retired machines, ensuring no duplicates
active_machines = self.get_active_machines()
retired_machines = self.get_retired_machines()
all_machines = active_machines + retired_machines
seen_ids = set() # Track unique machine IDs
seen_names = set() # Track unique machine names
unique_machines = []
for machine in all_machines:
if machine['id'] not in seen_ids and machine['name'] not in seen_names:
unique_machines.append(machine)
seen_ids.add(machine['id'])
seen_names.add(machine['name'])
return unique_machines
def get_all_challenges(self):
challenges = []
seen_ids = set() # Track unique machine IDs
seen_names = set() # Track unique machine names
page = 1
while True:
response = requests.get(
f'{self.base_url}/challenges?per_page=100&page={page}',
headers={
"Authorization": f"Bearer {self.password}",
"User-Agent": None # Explicitly remove User-Agent
},
proxies=self.proxies,
verify=False # Disable SSL verification
)
if response.status_code != 200:
raise Exception(f"Error fetching challenges: {response.status_code}, {response.text}")
data = response.json()
for challenge in data['data']:
if challenge['id'] not in seen_ids and challenge['name'] not in seen_names:
challenges.append(challenge)
seen_ids.add(challenge['id'])
seen_names.add(challenge['name'])
# Check for pagination
if page >= data['meta']['last_page']:
break
page += 1
return challenges
def get_all_sherlocks(self):
sherlocks = []
seen_ids = set() # Track unique machine IDs
seen_names = set() # Track unique machine names
page = 1
while True:
response = requests.get(
f'{self.base_url}/sherlocks?per_page=100&page={page}',
headers={
"Authorization": f"Bearer {self.password}",
"User-Agent": None # Explicitly remove User-Agent
},
proxies=self.proxies,
verify=False # Disable SSL verification
)
if response.status_code != 200:
raise Exception(f"Error fetching sherlocks: {response.status_code}, {response.text}")
data = response.json()
for sherlock in data['data']:
if sherlock['id'] not in seen_ids and sherlock['name'] not in seen_names:
sherlocks.append(sherlock)
seen_ids.add(sherlock['id'])
seen_names.add(sherlock['name'])
# Check for pagination
if page >= data['meta']['last_page']:
break
page += 1
return sherlocks