mirror of
https://github.com/mxrch/GHunt.git
synced 2025-12-22 13:47:07 +00:00
Spiderdal !
This commit is contained in:
@@ -67,6 +67,7 @@ Positional Arguments:
|
||||
gaia Get information on a Gaia ID.
|
||||
drive Get information on a Drive file or folder.
|
||||
geolocate Geolocate a BSSID.
|
||||
spiderdal Find assets using Digital Assets Links.
|
||||
|
||||
Options:
|
||||
-h, --help show this help message and exit
|
||||
@@ -115,7 +116,7 @@ This project is under [AGPL Licence](https://choosealicense.com/licenses/agpl-3.
|
||||
|
||||
Thanks to these awesome people for supporting me !
|
||||
|
||||
<!-- sponsors --><a href="https://github.com/BlWasp"><img src="https://github.com/BlWasp.png" width="50px" alt="BlWasp" /></a> <a href="https://github.com/C3n7ral051nt4g3ncy"><img src="https://github.com/C3n7ral051nt4g3ncy.png" width="50px" alt="C3n7ral051nt4g3ncy" /></a> <a href="https://github.com/im-hanzou"><img src="https://github.com/im-hanzou.png" width="50px" alt="im-hanzou" /></a> <a href="https://github.com/gingeleski"><img src="https://github.com/gingeleski.png" width="50px" alt="gingeleski" /></a> <a href="https://github.com/ItsMalware"><img src="https://github.com/ItsMalware.png" width="50px" alt="ItsMalware" /></a> <!-- sponsors -->
|
||||
<!-- sponsors --><a href="https://github.com/BlWasp"><img src="https://github.com/BlWasp.png" width="50px" alt="BlWasp" /></a> <a href="https://github.com/C3n7ral051nt4g3ncy"><img src="https://github.com/C3n7ral051nt4g3ncy.png" width="50px" alt="C3n7ral051nt4g3ncy" /></a> <a href="https://github.com/gingeleski"><img src="https://github.com/gingeleski.png" width="50px" alt="gingeleski" /></a> <!-- sponsors -->
|
||||
|
||||
\
|
||||
You like my work ?\
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.errors import *
|
||||
import ghunt.globals as gb
|
||||
from ghunt.objects.apis import GAPI
|
||||
from ghunt.objects.apis import GAPI, EndpointConfig
|
||||
|
||||
import httpx
|
||||
|
||||
@@ -36,19 +36,23 @@ class Accounts(GAPI):
|
||||
self._load_api(creds, headers)
|
||||
|
||||
async def OAuthLogin(self, as_client: httpx.AsyncClient) -> str:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "GET",
|
||||
data_type = None, # json, data or None
|
||||
authentication_mode = "oauth", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = None, # key name, or None
|
||||
key_origin = None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
verb = "GET"
|
||||
base_url = f"/OAuthLogin"
|
||||
data_type = None # json, data or None
|
||||
|
||||
params = {
|
||||
"source": "ChromiumBrowser",
|
||||
"issueuberauth": 1
|
||||
}
|
||||
|
||||
self._load_endpoint(endpoint_name)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, params, None, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url, params)
|
||||
|
||||
# Parsing
|
||||
uber_auth = req.text
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.errors import *
|
||||
import ghunt.globals as gb
|
||||
from ghunt.objects.apis import GAPI
|
||||
from ghunt.objects.apis import GAPI, EndpointConfig
|
||||
from ghunt.parsers.calendar import Calendar, CalendarEvents
|
||||
|
||||
import httpx
|
||||
@@ -26,20 +26,21 @@ class CalendarHttp(GAPI):
|
||||
self.hostname = "clients6.google.com"
|
||||
self.scheme = "https"
|
||||
|
||||
self.authentication_mode = "sapisidhash" # sapisidhash, cookies_only, oauth or None
|
||||
self.require_key = "calendar" # key name, or None
|
||||
|
||||
self._load_api(creds, headers)
|
||||
|
||||
async def get_calendar(self, as_client: httpx.AsyncClient, calendar_id: str) -> Tuple[bool, Calendar]:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "GET",
|
||||
data_type = None, # json, data or None
|
||||
authentication_mode = "sapisidhash", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = "calendar", # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
verb = "GET"
|
||||
base_url = f"/calendar/v3/calendars/{calendar_id}"
|
||||
data_type = None # json, data or None
|
||||
|
||||
self._load_endpoint(endpoint_name)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, None, None, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
@@ -54,11 +55,17 @@ class CalendarHttp(GAPI):
|
||||
|
||||
async def get_events(self, as_client: httpx.AsyncClient, calendar_id: str, params_template="next_events",
|
||||
time_min=datetime.today().replace(tzinfo=timezone.utc).isoformat(), max_results=250, page_token="") -> Tuple[bool, CalendarEvents]:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "GET",
|
||||
data_type = None, # json, data or None
|
||||
authentication_mode = "sapisidhash", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = "calendar", # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
verb = "GET"
|
||||
base_url = f"/calendar/v3/calendars/{calendar_id}/events"
|
||||
data_type = None # json, data or None
|
||||
|
||||
params_templates = {
|
||||
"next_events": {
|
||||
"calendarId": calendar_id,
|
||||
@@ -82,14 +89,13 @@ class CalendarHttp(GAPI):
|
||||
}
|
||||
|
||||
if not params_templates.get(params_template):
|
||||
raise GHuntParamsTemplateError(f"The asked template {params_template} for the endpoint {endpoint_name} wasn't recognized by GHunt.")
|
||||
raise GHuntParamsTemplateError(f"The asked template {params_template} for the endpoint {endpoint.name} wasn't recognized by GHunt.")
|
||||
|
||||
params = params_templates[params_template]
|
||||
if page_token:
|
||||
params["pageToken"] = page_token
|
||||
|
||||
self._load_endpoint(endpoint_name)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, params, None, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url, params=params)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.errors import *
|
||||
import ghunt.globals as gb
|
||||
from ghunt.objects.apis import GAPI
|
||||
from ghunt.objects.apis import GAPI, EndpointConfig
|
||||
from ghunt.parsers.clientauthconfig import CacBrand
|
||||
|
||||
import httpx
|
||||
@@ -25,25 +25,26 @@ class ClientAuthConfigHttp(GAPI):
|
||||
self.hostname = "clientauthconfig.googleapis.com"
|
||||
self.scheme = "https"
|
||||
|
||||
self.authentication_mode = None # sapisidhash, cookies_only, oauth or None
|
||||
self.require_key = "pantheon" # key name, or None
|
||||
|
||||
self._load_api(creds, headers)
|
||||
|
||||
async def get_brand(self, as_client: httpx.AsyncClient, project_number: int) -> Tuple[bool, CacBrand]:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "GET",
|
||||
data_type = None, # json, data or None
|
||||
authentication_mode = None, # sapisidhash, cookies_only, oauth or None
|
||||
require_key = "pantheon", # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
verb = "GET"
|
||||
base_url = f"/v1/brands/lookupkey/brand/{project_number}"
|
||||
data_type = None # json, data or None
|
||||
|
||||
params = {
|
||||
"readMask": "*",
|
||||
"$outputDefaults": True
|
||||
}
|
||||
|
||||
self._load_endpoint(endpoint_name)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, params, None, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url, params=params)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
|
||||
71
ghunt/apis/digitalassetslinks.py
Normal file
71
ghunt/apis/digitalassetslinks.py
Normal file
@@ -0,0 +1,71 @@
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.errors import *
|
||||
import ghunt.globals as gb
|
||||
from ghunt.objects.apis import GAPI, EndpointConfig
|
||||
from ghunt.parsers.digitalassetslinks import DalStatements
|
||||
|
||||
import httpx
|
||||
|
||||
from typing import *
|
||||
import inspect
|
||||
import json
|
||||
|
||||
|
||||
class DigitalAssetsLinksHttp(GAPI):
|
||||
def __init__(self, creds: GHuntCreds, headers: Dict[str, str] = {}):
|
||||
super().__init__()
|
||||
|
||||
if not headers:
|
||||
headers = gb.config.headers
|
||||
|
||||
base_headers = {}
|
||||
|
||||
headers = {**headers, **base_headers}
|
||||
|
||||
self.hostname = "digitalassetlinks.googleapis.com"
|
||||
self.scheme = "https"
|
||||
|
||||
self._load_api(creds, headers)
|
||||
|
||||
async def list_statements(self, as_client: httpx.AsyncClient, website: str="",
|
||||
android_package_name: str="", android_cert_fingerprint: str="") -> Tuple[bool, DalStatements]:
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "GET",
|
||||
data_type = None, # json, data or None
|
||||
authentication_mode = None, # sapisidhash, cookies_only, oauth or None
|
||||
require_key = None, # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
base_url = "/v1/statements:list"
|
||||
|
||||
# Inputs checks
|
||||
if website and (android_package_name or android_cert_fingerprint):
|
||||
raise GHuntParamsInputError(f"[DigitalAssetsLinks API list statements] website and {android_package_name if android_package_name else android_cert_fingerprint} can't be both put at the same time.")
|
||||
elif not website and not (android_package_name and android_cert_fingerprint):
|
||||
raise GHuntParamsInputError("[DigitalAssetsLinks API list statements] Please , android_package_name and android_cert_ingerprint.")
|
||||
elif not (website or android_package_name or android_cert_fingerprint):
|
||||
raise GHuntParamsInputError("[DigitalAssetsLinks API list statements] Please choose at least one parameter between website, android_package_name and android_cert_ingerprint.")
|
||||
|
||||
params = {}
|
||||
if website:
|
||||
params["source.web.site"] = website
|
||||
if android_package_name:
|
||||
params["source.androidApp.packageName"] = android_package_name
|
||||
if android_cert_fingerprint:
|
||||
params["source.androidApp.certificate.sha256Fingerprint"] = android_cert_fingerprint
|
||||
|
||||
req = await self._query(endpoint.name, as_client, base_url, params=params)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
|
||||
statements = DalStatements()
|
||||
if "error" in data:
|
||||
return False, statements
|
||||
|
||||
statements._scrape(data)
|
||||
|
||||
found = bool(statements.statements)
|
||||
return found, statements
|
||||
@@ -1,7 +1,7 @@
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.errors import *
|
||||
import ghunt.globals as gb
|
||||
from ghunt.objects.apis import GAPI
|
||||
from ghunt.objects.apis import GAPI, EndpointConfig
|
||||
from ghunt.parsers.drive import DriveCommentList, DriveFile, DriveChildList
|
||||
from ghunt.knowledge import drive as drive_knowledge
|
||||
|
||||
@@ -34,25 +34,26 @@ class DriveHttp(GAPI):
|
||||
self.hostname = "www.googleapis.com"
|
||||
self.scheme = "https"
|
||||
|
||||
self.authentication_mode = "oauth" # sapisidhash, cookies_only, oauth or None
|
||||
self.require_key = None # key name, or None
|
||||
|
||||
self._load_api(creds, headers)
|
||||
|
||||
async def get_file(self, as_client: httpx.AsyncClient, file_id: str) -> Tuple[bool, DriveFile]:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "GET",
|
||||
data_type = None, # json, data or None
|
||||
authentication_mode = "oauth", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = None, # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
verb = "GET"
|
||||
base_url = f"/drive/v2internal/files/{file_id}"
|
||||
data_type = None # json, data or None
|
||||
|
||||
params = {
|
||||
"fields": ','.join(drive_knowledge.request_fields),
|
||||
"supportsAllDrives": True
|
||||
}
|
||||
|
||||
self._load_endpoint(endpoint_name)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, params, None, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url, params=params)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
@@ -65,11 +66,16 @@ class DriveHttp(GAPI):
|
||||
return True, drive_file
|
||||
|
||||
async def get_comments(self, as_client: httpx.AsyncClient, file_id: str, page_token: str="") -> Tuple[bool, str, DriveCommentList]:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "GET",
|
||||
data_type = None, # json, data or None
|
||||
authentication_mode = "oauth", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = None, # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
verb = "GET"
|
||||
base_url = f"/drive/v2internal/files/{file_id}/comments"
|
||||
data_type = None # json, data or None
|
||||
|
||||
params = {
|
||||
"supportsAllDrives": True,
|
||||
@@ -79,8 +85,7 @@ class DriveHttp(GAPI):
|
||||
if page_token:
|
||||
params["pageToken"] = page_token
|
||||
|
||||
self._load_endpoint(endpoint_name)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, params, None, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url, params=params)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
@@ -95,11 +100,16 @@ class DriveHttp(GAPI):
|
||||
return True, next_page_token, drive_comments
|
||||
|
||||
async def get_childs(self, as_client: httpx.AsyncClient, file_id: str, page_token: str="") -> Tuple[bool, str, DriveChildList]:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "GET",
|
||||
data_type = None, # json, data or None
|
||||
authentication_mode = "oauth", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = None, # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
verb = "GET"
|
||||
base_url = f"/drive/v2internal/files/{file_id}/children"
|
||||
data_type = None # json, data or None
|
||||
|
||||
params = {
|
||||
"supportsAllDrives": True,
|
||||
@@ -109,8 +119,7 @@ class DriveHttp(GAPI):
|
||||
if page_token:
|
||||
params["pageToken"] = page_token
|
||||
|
||||
self._load_endpoint(endpoint_name)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, params, None, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url, params=params)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
|
||||
57
ghunt/apis/fireconsolepa.py
Normal file
57
ghunt/apis/fireconsolepa.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.errors import *
|
||||
import ghunt.globals as gb
|
||||
from ghunt.objects.apis import GAPI, EndpointConfig
|
||||
from ghunt.parsers.clientauthconfig import CacBrand
|
||||
|
||||
import httpx
|
||||
|
||||
from typing import *
|
||||
import inspect
|
||||
import json
|
||||
|
||||
|
||||
class FireconsolePaHttp(GAPI):
|
||||
def __init__(self, creds: GHuntCreds, headers: Dict[str, str] = {}):
|
||||
super().__init__()
|
||||
|
||||
if not headers:
|
||||
headers = gb.config.headers
|
||||
|
||||
base_headers = {}
|
||||
|
||||
headers = {**headers, **base_headers}
|
||||
|
||||
self.hostname = "fireconsole-pa.clients6.google.com"
|
||||
self.scheme = "https"
|
||||
|
||||
self._load_api(creds, headers)
|
||||
|
||||
async def is_project_valid(self, as_client: httpx.AsyncClient, project_identifier: str) -> Tuple[bool, CacBrand]:
|
||||
"""
|
||||
Returns if the given project identifier is valid.
|
||||
The project identifier can be a project ID or a project number.
|
||||
"""
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "POST",
|
||||
data_type = "json", # json, data or None
|
||||
authentication_mode = "sapisidhash", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = "firebase_console", # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
base_url = "/v1/analytics:checkAccess"
|
||||
|
||||
params = {
|
||||
"alt": "json"
|
||||
}
|
||||
|
||||
post_data = {
|
||||
"entityKey": {},
|
||||
"firebaseProjectId": project_identifier
|
||||
}
|
||||
|
||||
req = await self._query(endpoint.name, as_client, base_url, params=params, data=post_data)
|
||||
|
||||
return req.status_code != 404
|
||||
@@ -1,7 +1,7 @@
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.errors import *
|
||||
import ghunt.globals as gb
|
||||
from ghunt.objects.apis import GAPI
|
||||
from ghunt.objects.apis import GAPI, EndpointConfig
|
||||
from ghunt.parsers.geolocate import GeolocationResponse
|
||||
|
||||
import httpx
|
||||
@@ -31,11 +31,16 @@ class GeolocationHttp(GAPI):
|
||||
self._load_api(creds, headers)
|
||||
|
||||
async def geolocate(self, as_client: httpx.AsyncClient, bssid: str, body: dict) -> Tuple[bool, GeolocationResponse]:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "POST",
|
||||
data_type = "json", # json, data or None
|
||||
authentication_mode = None, # sapisidhash, cookies_only, oauth or None
|
||||
require_key = "geolocation", # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
verb = "POST"
|
||||
base_url = f"/geolocation/v1/geolocate"
|
||||
data_type = "json" # json, data or None
|
||||
|
||||
if bssid:
|
||||
payload = {
|
||||
@@ -52,8 +57,7 @@ class GeolocationHttp(GAPI):
|
||||
else:
|
||||
payload = body
|
||||
|
||||
self._load_endpoint(endpoint_name)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, None, payload, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url, data=payload)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
|
||||
57
ghunt/apis/identitytoolkit.py
Normal file
57
ghunt/apis/identitytoolkit.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.errors import *
|
||||
import ghunt.globals as gb
|
||||
from ghunt.objects.apis import GAPI, EndpointConfig
|
||||
from ghunt.parsers.identitytoolkit import ITKProjectConfig
|
||||
|
||||
import httpx
|
||||
|
||||
from typing import *
|
||||
import inspect
|
||||
import json
|
||||
|
||||
|
||||
class IdentityToolkitHttp(GAPI):
|
||||
def __init__(self, creds: GHuntCreds, headers: Dict[str, str] = {}):
|
||||
super().__init__()
|
||||
|
||||
if not headers:
|
||||
headers = gb.config.headers
|
||||
|
||||
base_headers = {}
|
||||
|
||||
headers = {**headers, **base_headers}
|
||||
|
||||
self.hostname = "www.googleapis.com"
|
||||
self.scheme = "https"
|
||||
|
||||
self._load_api(creds, headers)
|
||||
|
||||
async def get_project_config(self, as_client: httpx.AsyncClient, api_key: str) -> Tuple[bool, ITKProjectConfig]:
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "GET",
|
||||
data_type = None, # json, data or None
|
||||
authentication_mode = None, # sapisidhash, cookies_only, oauth or None
|
||||
require_key = None, # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
base_url = "/identitytoolkit/v3/relyingparty/getProjectConfig"
|
||||
|
||||
params = {
|
||||
"key": api_key
|
||||
}
|
||||
|
||||
req = await self._query(endpoint.name, as_client, base_url, params=params)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
|
||||
project_config = ITKProjectConfig()
|
||||
if "error" in data:
|
||||
return False, project_config
|
||||
|
||||
project_config._scrape(data)
|
||||
|
||||
return True, project_config
|
||||
100
ghunt/apis/mobilesdk.py
Normal file
100
ghunt/apis/mobilesdk.py
Normal file
@@ -0,0 +1,100 @@
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.errors import *
|
||||
import ghunt.globals as gb
|
||||
from ghunt.objects.apis import GAPI, EndpointConfig
|
||||
from ghunt.parsers.mobilesdk import MobileSDKDynamicConfig
|
||||
|
||||
import httpx
|
||||
|
||||
from typing import *
|
||||
import inspect
|
||||
import json
|
||||
|
||||
|
||||
class MobileSDKPaHttp(GAPI):
|
||||
def __init__(self, creds: GHuntCreds, headers: Dict[str, str] = {}):
|
||||
super().__init__()
|
||||
|
||||
if not headers:
|
||||
headers = gb.config.headers
|
||||
|
||||
base_headers = {}
|
||||
|
||||
headers = {**headers, **base_headers}
|
||||
|
||||
self.hostname = "mobilesdk-pa.clients6.google.com"
|
||||
self.scheme = "https"
|
||||
|
||||
self._load_api(creds, headers)
|
||||
|
||||
async def test_iam_permissions(self, as_client: httpx.AsyncClient, project_identifier: str, permissions: List[str]) -> Tuple[bool, List[str]]:
|
||||
"""
|
||||
Returns the permissions you have against a project.
|
||||
The project identifier can be a project ID or a project number.
|
||||
"""
|
||||
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "POST",
|
||||
data_type = "json", # json, data or None
|
||||
authentication_mode = "sapisidhash", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = "firebase_console", # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
base_url = f"/v1/projects/{project_identifier}:testIamPermissions"
|
||||
|
||||
post_data = {
|
||||
"permissions": permissions
|
||||
}
|
||||
|
||||
req = await self._query(endpoint.name, as_client, base_url, data=post_data)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
|
||||
if "error" in data:
|
||||
return False, []
|
||||
|
||||
return True, data.get("permissions", [])
|
||||
|
||||
async def get_webapp_dynamic_config(self, as_client: httpx.AsyncClient, app_id: str) -> Tuple[bool, MobileSDKDynamicConfig]:
|
||||
"""
|
||||
Returns the dynamic config of a web app.
|
||||
|
||||
:param app_id: The app id
|
||||
"""
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "GET",
|
||||
data_type = None, # json, data or None
|
||||
authentication_mode = "sapisidhash", # sapisidhash, cookies_only, oauth or None,
|
||||
key_origin="firebase_console", # key name, or None
|
||||
# require_key = "firebase_console", # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
# Android OAuth fields
|
||||
self.api_name = "mobilesdk"
|
||||
self.package_name = "com.android.chrome"
|
||||
self.scopes = [
|
||||
"https://www.googleapis.com/auth/cloud-platform",
|
||||
"https://www.googleapis.com/auth/cloud-platform.read-only",
|
||||
"https://www.googleapis.com/auth/firebase",
|
||||
"https://www.googleapis.com/auth/firebase.readonly"
|
||||
]
|
||||
|
||||
base_url = f"/v1/config/webApps/{app_id}/dynamicConfig"
|
||||
|
||||
req = await self._query(endpoint.name, as_client, base_url)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
|
||||
dynamic_config = MobileSDKDynamicConfig()
|
||||
if "error" in data:
|
||||
return False, dynamic_config
|
||||
|
||||
dynamic_config._scrape(data)
|
||||
|
||||
return True, dynamic_config
|
||||
@@ -1,7 +1,7 @@
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.errors import *
|
||||
import ghunt.globals as gb
|
||||
from ghunt.objects.apis import GAPI
|
||||
from ghunt.objects.apis import GAPI, EndpointConfig
|
||||
from ghunt.parsers.people import Person
|
||||
|
||||
import httpx
|
||||
@@ -18,44 +18,54 @@ class PeoplePaHttp(GAPI):
|
||||
if not headers:
|
||||
headers = gb.config.headers
|
||||
|
||||
base_headers = {}
|
||||
base_headers = {
|
||||
"Host": "people-pa.clients6.google.com",
|
||||
}
|
||||
|
||||
headers = {**headers, **base_headers}
|
||||
|
||||
self.hostname = "people-pa.clients6.google.com"
|
||||
self.hostname = "googleapis.com"
|
||||
self.scheme = "https"
|
||||
|
||||
self.authentication_mode = "sapisidhash" # sapisidhash, cookies_only, oauth or None
|
||||
self.require_key = "photos" # key name, or None
|
||||
|
||||
self._load_api(creds, headers)
|
||||
|
||||
async def people_lookup(self, as_client: httpx.AsyncClient, email: str, params_template="just_gaia_id") -> Tuple[bool, Person]:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "GET",
|
||||
data_type = None, # json, data or None
|
||||
authentication_mode = "sapisidhash", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = "photos", # key name, or None
|
||||
# key_origin="photos"
|
||||
)
|
||||
|
||||
# Android OAuth fields
|
||||
self.api_name = "people"
|
||||
self.package_name = "com.google.android.gms"
|
||||
self.scopes = [
|
||||
"https://www.googleapis.com/auth/profile.agerange.read",
|
||||
"https://www.googleapis.com/auth/profile.language.read",
|
||||
"https://www.googleapis.com/auth/contacts",
|
||||
"https://www.googleapis.com/auth/peopleapi.legacy.readwrite"
|
||||
|
||||
]
|
||||
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
verb = "GET"
|
||||
base_url = "/v2/people/lookup"
|
||||
data_type = None # json, data or None
|
||||
|
||||
params_templates = {
|
||||
"just_gaia_id": {
|
||||
"id": email,
|
||||
"type": "EMAIL",
|
||||
"match_type": "EXACT",
|
||||
"request_mask.include_field.paths": "person.metadata",
|
||||
"request_mask.include_container": [
|
||||
"PROFILE",
|
||||
"DOMAIN_PROFILE",
|
||||
],
|
||||
"matchType": "EXACT",
|
||||
"requestMask.includeField.paths": "person.metadata"
|
||||
},
|
||||
"just_name": {
|
||||
"id": email,
|
||||
"type": "EMAIL",
|
||||
"match_type": "EXACT",
|
||||
"request_mask.include_field.paths": "person.name",
|
||||
"request_mask.include_container": [
|
||||
"PROFILE",
|
||||
"DOMAIN_PROFILE",
|
||||
],
|
||||
"matchType": "EXACT",
|
||||
"requestMask.includeField.paths": "person.name",
|
||||
"core_id_params.enable_private_names": True
|
||||
},
|
||||
"max_details": {
|
||||
@@ -64,8 +74,7 @@ class PeoplePaHttp(GAPI):
|
||||
"match_type": "EXACT",
|
||||
"extension_set.extension_names": [
|
||||
"DYNAMITE_ADDITIONAL_DATA",
|
||||
"DYNAMITE_ORGANIZATION_INFO",
|
||||
# "GPLUS_ADDITIONAL_DATA"
|
||||
"DYNAMITE_ORGANIZATION_INFO"
|
||||
],
|
||||
"request_mask.include_field.paths": [
|
||||
"person.metadata.best_display_name",
|
||||
@@ -74,7 +83,7 @@ class PeoplePaHttp(GAPI):
|
||||
"person.interaction_settings",
|
||||
"person.legacy_fields",
|
||||
"person.metadata",
|
||||
# "person.in_app_reachability",
|
||||
"person.in_app_reachability",
|
||||
"person.name",
|
||||
"person.read_only_profile_info",
|
||||
"person.sort_keys",
|
||||
@@ -97,10 +106,10 @@ class PeoplePaHttp(GAPI):
|
||||
}
|
||||
|
||||
if not params_templates.get(params_template):
|
||||
raise GHuntParamsTemplateError(f"The asked template {params_template} for the endpoint {endpoint_name} wasn't recognized by GHunt.")
|
||||
raise GHuntParamsTemplateError(f"The asked template {params_template} for the endpoint {endpoint.name} wasn't recognized by GHunt.")
|
||||
params = params_templates[params_template]
|
||||
|
||||
self._load_endpoint(endpoint_name)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, params_templates[params_template], None, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url, params=params)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
@@ -114,27 +123,40 @@ class PeoplePaHttp(GAPI):
|
||||
return True, person
|
||||
|
||||
async def people(self, as_client: httpx.AsyncClient, gaia_id: str, params_template="just_name") -> Tuple[bool, Person]:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "GET",
|
||||
data_type = None, # json, data or None
|
||||
authentication_mode = "sapisidhash", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = "photos", # key name, or None
|
||||
# key_origin="photos"
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
# Android OAuth fields
|
||||
self.api_name = "people"
|
||||
self.package_name = "com.google.android.gms"
|
||||
self.scopes = [
|
||||
"https://www.googleapis.com/auth/profile.agerange.read",
|
||||
"https://www.googleapis.com/auth/profile.language.read",
|
||||
"https://www.googleapis.com/auth/contacts",
|
||||
"https://www.googleapis.com/auth/peopleapi.legacy.readwrite"
|
||||
|
||||
]
|
||||
|
||||
verb = "GET"
|
||||
base_url = "/v2/people"
|
||||
data_type = None # json, data or None
|
||||
|
||||
params_templates = {
|
||||
"just_name": {
|
||||
"person_id": gaia_id,
|
||||
"request_mask.include_field.paths": "person.name",
|
||||
"request_mask.include_container": [
|
||||
"PROFILE",
|
||||
"DOMAIN_PROFILE",
|
||||
],
|
||||
"requestMask.includeField.paths": "person.name",
|
||||
"core_id_params.enable_private_names": True
|
||||
},
|
||||
"max_details": {
|
||||
"person_id": gaia_id,
|
||||
"extension_set.extension_names": [
|
||||
"DYNAMITE_ADDITIONAL_DATA",
|
||||
"DYNAMITE_ORGANIZATION_INFO",
|
||||
# "GPLUS_ADDITIONAL_DATA"
|
||||
"DYNAMITE_ORGANIZATION_INFO"
|
||||
],
|
||||
"request_mask.include_field.paths": [
|
||||
"person.metadata.best_display_name",
|
||||
@@ -143,7 +165,7 @@ class PeoplePaHttp(GAPI):
|
||||
"person.interaction_settings",
|
||||
"person.legacy_fields",
|
||||
"person.metadata",
|
||||
# "person.in_app_reachability",
|
||||
"person.in_app_reachability",
|
||||
"person.name",
|
||||
"person.read_only_profile_info",
|
||||
"person.sort_keys",
|
||||
@@ -166,10 +188,10 @@ class PeoplePaHttp(GAPI):
|
||||
}
|
||||
|
||||
if not params_templates.get(params_template):
|
||||
raise GHuntParamsTemplateError(f"The asked template {params_template} for the endpoint {endpoint_name} wasn't recognized by GHunt.")
|
||||
raise GHuntParamsTemplateError(f"The asked template {params_template} for the endpoint {endpoint.name} wasn't recognized by GHunt.")
|
||||
params = params_templates[params_template]
|
||||
|
||||
self._load_endpoint(endpoint_name)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, params_templates[params_template], None, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url, params=params)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.errors import *
|
||||
import ghunt.globals as gb
|
||||
from ghunt.objects.apis import GAPI
|
||||
from ghunt.objects.apis import GAPI, EndpointConfig
|
||||
from ghunt.parsers.playgames import PlayedGames, PlayerAchievements, PlayerProfile
|
||||
|
||||
import httpx
|
||||
@@ -33,20 +33,21 @@ class PlayGames(GAPI):
|
||||
self.hostname = "www.googleapis.com"
|
||||
self.scheme = "https"
|
||||
|
||||
self.authentication_mode = "oauth" # sapisidhash, cookies_only, oauth or None
|
||||
self.require_key = None # key name, or None
|
||||
|
||||
self._load_api(creds, headers)
|
||||
|
||||
async def get_profile(self, as_client: httpx.AsyncClient, player_id: str) -> Tuple[bool, PlayerProfile]:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "GET",
|
||||
data_type = None, # json, data or None
|
||||
authentication_mode = "oauth", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = None, # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
verb = "GET"
|
||||
base_url = f"/games/v1whitelisted/players/{player_id}"
|
||||
data_type = None # json, data or None
|
||||
|
||||
self._load_endpoint(endpoint_name)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, None, None, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
@@ -60,25 +61,27 @@ class PlayGames(GAPI):
|
||||
return True, player_profile
|
||||
|
||||
async def get_played_games(self, as_client: httpx.AsyncClient, player_id: str, page_token: str="") -> Tuple[bool, str, PlayedGames]:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "GET",
|
||||
data_type = None, # json, data or None
|
||||
authentication_mode = "oauth", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = None, # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
verb = "GET"
|
||||
base_url = f"/games/v1whitelisted/players/{player_id}/applications/played"
|
||||
data_type = None # json, data or None
|
||||
|
||||
params = {}
|
||||
if page_token:
|
||||
params = {"pageToken": page_token}
|
||||
|
||||
self._load_endpoint(endpoint_name)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, params, None, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url, params=params)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
played_games = PlayedGames()
|
||||
if not "items" in data:
|
||||
print(req)
|
||||
print(req.text)
|
||||
return False, "", played_games
|
||||
|
||||
next_page_token = data.get("nextPageToken", "")
|
||||
@@ -88,11 +91,16 @@ class PlayGames(GAPI):
|
||||
return True, next_page_token, played_games
|
||||
|
||||
async def get_achievements(self, as_client: httpx.AsyncClient, player_id: str, page_token: str="") -> Tuple[bool, str, PlayerAchievements]:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "POST",
|
||||
data_type = "json", # json, data or None
|
||||
authentication_mode = "oauth", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = None, # key name, or None
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
verb = "POST"
|
||||
base_url = f"/games/v1whitelisted/players/{player_id}/achievements"
|
||||
data_type = "json" # json, data or None
|
||||
|
||||
params = {
|
||||
"state": "UNLOCKED",
|
||||
@@ -100,20 +108,15 @@ class PlayGames(GAPI):
|
||||
"sortOrder": "RECENT_FIRST"
|
||||
}
|
||||
|
||||
data = {}
|
||||
|
||||
if page_token:
|
||||
params["pageToken"] = page_token
|
||||
|
||||
self._load_endpoint(endpoint_name)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, params, data, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url, params=params)
|
||||
|
||||
# Parsing
|
||||
data = json.loads(req.text)
|
||||
achievements = PlayerAchievements()
|
||||
if not "items" in data:
|
||||
print(req)
|
||||
print(req.text)
|
||||
return False, "", achievements
|
||||
|
||||
next_page_token = ""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from ghunt.objects.apis import GAPI
|
||||
from ghunt.objects.apis import GAPI, EndpointConfig
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt import globals as gb
|
||||
from ghunt.protos.playgatewaypa.search_player_pb2 import PlayerSearchProto
|
||||
@@ -29,7 +29,6 @@ class PlayGatewayPaGrpc(GAPI):
|
||||
|
||||
if not headers:
|
||||
headers = gb.config.android_headers
|
||||
headers["User-Agent"] = headers["User-Agent"].format(self.package_name)
|
||||
|
||||
headers = {**headers, **{
|
||||
"Content-Type": "application/grpc",
|
||||
@@ -41,24 +40,25 @@ class PlayGatewayPaGrpc(GAPI):
|
||||
self.hostname = "playgateway-pa.googleapis.com"
|
||||
self.scheme = "https"
|
||||
|
||||
self.authentication_mode = "oauth" # sapisidhash, cookies_only, oauth or None
|
||||
self.require_key = None # key name, or None
|
||||
|
||||
self._load_api(creds, headers)
|
||||
|
||||
async def search_player(self, as_client: httpx.AsyncClient, query: str) -> PlayerSearchResults:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
|
||||
verb = "POST"
|
||||
base_url = "/play.gateway.adapter.interplay.v1.PlayGatewayInterplayService/GetPage"
|
||||
data_type = "data"
|
||||
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "POST",
|
||||
data_type = "data", # json, data or None
|
||||
authentication_mode = "oauth", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = None, # key name, or None
|
||||
ext_metadata = {
|
||||
"bin": {
|
||||
"158709649": "CggaBgj22K2aARo4EgoI+aKnlZf996E/GhcQHhoPUkQyQS4yMTEwMDEuMDAyIgIxMToICgZJZ0pHVWdCB1BpeGVsIDU",
|
||||
"173715354": "CgEx"
|
||||
}
|
||||
}
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
base_url = "/play.gateway.adapter.interplay.v1.PlayGatewayInterplayService/GetPage"
|
||||
|
||||
player_search = PlayerSearchProto()
|
||||
player_search.search_form.query.text = query
|
||||
@@ -67,8 +67,7 @@ class PlayGatewayPaGrpc(GAPI):
|
||||
prefix = bytes(1) + pack(">i", len(payload))
|
||||
data = prefix + payload
|
||||
|
||||
self._load_endpoint(endpoint_name, {}, ext_metadata)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, None, data, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url, data=data)
|
||||
|
||||
# Parsing
|
||||
player_search_results = PlayerSearchResultsProto()
|
||||
@@ -85,18 +84,22 @@ class PlayGatewayPaGrpc(GAPI):
|
||||
To get all the details about a player, please use get_player method of PlayGames (HTTP API).
|
||||
"""
|
||||
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
|
||||
verb = "POST"
|
||||
base_url = "/play.gateway.adapter.interplay.v1.PlayGatewayInterplayService/GetPage"
|
||||
data_type = "data"
|
||||
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "POST",
|
||||
data_type = "data", # json, data or None
|
||||
authentication_mode = "oauth", # sapisidhash, cookies_only, oauth or None
|
||||
require_key = None, # key name, or None
|
||||
ext_metadata = {
|
||||
"bin": {
|
||||
"158709649": "CggaBgj22K2aARo4EgoI+aKnlZf996E/GhcQHhoPUkQyQS4yMTEwMDEuMDAyIgIxMToICgZJZ0pHVWdCB1BpeGVsIDU",
|
||||
"173715354": "CgEx"
|
||||
}
|
||||
}
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
base_url = "/play.gateway.adapter.interplay.v1.PlayGatewayInterplayService/GetPage"
|
||||
|
||||
player_profile = GetPlayerProto()
|
||||
player_profile.form.query.id = player_id
|
||||
@@ -105,8 +108,7 @@ class PlayGatewayPaGrpc(GAPI):
|
||||
prefix = bytes(1) + pack(">i", len(payload))
|
||||
data = prefix + payload
|
||||
|
||||
self._load_endpoint(endpoint_name, {}, ext_metadata)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, None, data, data_type)
|
||||
req = await self._query(endpoint.name, as_client, base_url, data=data)
|
||||
|
||||
# Parsing
|
||||
player_profile = GetPlayerResponseProto()
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.errors import *
|
||||
import ghunt.globals as gb
|
||||
from ghunt.objects.apis import GAPI
|
||||
from ghunt.objects.apis import GAPI, EndpointConfig
|
||||
from ghunt.parsers.vision import VisionFaceDetection
|
||||
|
||||
import httpx
|
||||
@@ -27,23 +27,26 @@ class VisionHttp(GAPI):
|
||||
self.hostname = "content-vision.googleapis.com"
|
||||
self.scheme = "https"
|
||||
|
||||
self.authentication_mode = None # sapisidhash, cookies_only, oauth or None
|
||||
self.require_key = "apis_explorer" # key name, or None
|
||||
self.key_origin = "https://content-vision.googleapis.com"
|
||||
|
||||
self._load_api(creds, headers)
|
||||
|
||||
async def detect_faces(self, as_client: httpx.AsyncClient, image_url: str = "", image_content: str = "",
|
||||
params_template="default") -> Tuple[bool, bool, VisionFaceDetection]:
|
||||
endpoint_name = inspect.currentframe().f_code.co_name
|
||||
data_template="default") -> Tuple[bool, bool, VisionFaceDetection]:
|
||||
endpoint = EndpointConfig(
|
||||
name = inspect.currentframe().f_code.co_name,
|
||||
verb = "POST",
|
||||
data_type = "json", # json, data or None
|
||||
authentication_mode = None, # sapisidhash, cookies_only, oauth or None
|
||||
require_key = "apis_explorer", # key name, or None
|
||||
key_origin = "https://content-vision.googleapis.com"
|
||||
)
|
||||
self._load_endpoint(endpoint)
|
||||
|
||||
base_url = "/v1/images:annotate"
|
||||
|
||||
# image_url can cause errors with vision_api, so we prefer using image_content
|
||||
# See => https://cloud.google.com/vision/docs/detecting-faces?#detect_faces_in_a_remote_image
|
||||
|
||||
verb = "POST"
|
||||
base_url = "/v1/images:annotate"
|
||||
data_type = "json" # json, data or None
|
||||
params_templates = {
|
||||
data_templates = {
|
||||
"default": {
|
||||
"requests":[
|
||||
{
|
||||
@@ -59,8 +62,8 @@ class VisionHttp(GAPI):
|
||||
}
|
||||
}
|
||||
|
||||
if not params_templates.get(params_template):
|
||||
raise GHuntParamsTemplateError(f"The asked template {params_template} for the endpoint {endpoint_name} wasn't recognized by GHunt.")
|
||||
if not data_templates.get(data_template):
|
||||
raise GHuntParamsTemplateError(f"The asked template {data_template} for the endpoint {endpoint.name} wasn't recognized by GHunt.")
|
||||
|
||||
# Inputs checks
|
||||
if image_url and image_content:
|
||||
@@ -68,19 +71,21 @@ class VisionHttp(GAPI):
|
||||
elif not image_url and not image_content:
|
||||
raise GHuntParamsInputError("[Vision API faces detection] Please choose at least one parameter between image_url and image_content.")
|
||||
|
||||
if data_template == "default":
|
||||
if image_url:
|
||||
params_templates["default"]["requests"][0]["image"] = {
|
||||
data_templates["default"]["requests"][0]["image"] = {
|
||||
"source": {
|
||||
"imageUri": image_url
|
||||
}
|
||||
}
|
||||
elif image_content:
|
||||
params_templates["default"]["requests"][0]["image"] = {
|
||||
data_templates["default"]["requests"][0]["image"] = {
|
||||
"content": image_content
|
||||
}
|
||||
|
||||
self._load_endpoint(endpoint_name)
|
||||
req = await self._query(as_client, verb, endpoint_name, base_url, None, params_templates[params_template], data_type)
|
||||
data = data_templates[data_template]
|
||||
req = await self._query(endpoint.name, as_client, base_url, data=data)
|
||||
|
||||
rate_limited = req.status_code == 429 # API Explorer sometimes rate-limit because they set their DefaultRequestsPerMinutePerProject to 1800
|
||||
|
||||
vision_face_detection = VisionFaceDetection()
|
||||
|
||||
15
ghunt/cli.py
15
ghunt/cli.py
@@ -42,12 +42,20 @@ def parse_and_run():
|
||||
geolocate_group.add_argument("-f", "--file", type=Path, help="File containing a raw request body, useful to put many BSSIDs. ([italic light_steel_blue1][link=https://developers.google.com/maps/documentation/geolocation/requests-geolocation?#sample-requests]Reference format[/link][/italic light_steel_blue1])")
|
||||
parser_geolocate.add_argument('--json', type=Path, help="File to write the JSON output to.")
|
||||
|
||||
### Spiderdal module
|
||||
parser_spiderdal = subparsers.add_parser('spiderdal', help="Find assets using Digital Assets Links.", formatter_class=RichHelpFormatter)
|
||||
parser_spiderdal.add_argument("-p", "--package", help="Example: com.squareup.cash")
|
||||
parser_spiderdal.add_argument("-f", "--fingerprint", help="Example: 21:A7:46:75:96:C1:68:65:0F:D7:B6:31:B6:54:22:EB:56:3E:1D:21:AF:F2:2D:DE:73:89:BA:0D:5D:73:87:48")
|
||||
parser_spiderdal.add_argument("-u", "--url", help="Example: https://cash.app. If a domain is given, it will convert it to a URL, and also try the \"www\" subdomain.")
|
||||
parser_spiderdal.add_argument("-s", "--strict", action='store_true', help="Don't attempt to convert the domain to a URL, and don't try the \"www\" subdomain.")
|
||||
parser_spiderdal.add_argument('--json', type=Path, help="File to write the JSON output to.")
|
||||
|
||||
### Parsing
|
||||
args = None
|
||||
if not sys.argv[1:]:
|
||||
parser.parse_args(["--help"])
|
||||
else:
|
||||
for mod in ["email", "gaia", "drive", "geolocate"]:
|
||||
for mod in ["email", "gaia", "drive", "geolocate", "spiderdal"]:
|
||||
if sys.argv[1] == mod and not sys.argv[2:]:
|
||||
parser.parse_args([mod, "--help"])
|
||||
|
||||
@@ -72,3 +80,8 @@ def process_args(args: argparse.Namespace):
|
||||
case "geolocate":
|
||||
from ghunt.modules import geolocate
|
||||
asyncio.run(geolocate.main(None, args.bssid, args.file, args.json))
|
||||
case "spiderdal":
|
||||
if any([args.package, args.fingerprint]) and not all([args.package, args.fingerprint]):
|
||||
exit("[!] You must provide both a package name and a certificate fingerprint.")
|
||||
from ghunt.modules import spiderdal
|
||||
asyncio.run(spiderdal.main(args.url, args.package, args.fingerprint, args.strict, args.json))
|
||||
@@ -1,50 +1,50 @@
|
||||
class GHuntKnowledgeError(BaseException):
|
||||
class GHuntKnowledgeError(Exception):
|
||||
pass
|
||||
|
||||
class GHuntCorruptedHeadersError(BaseException):
|
||||
class GHuntCorruptedHeadersError(Exception):
|
||||
pass
|
||||
|
||||
class GHuntUnknownVerbError(BaseException):
|
||||
class GHuntUnknownVerbError(Exception):
|
||||
pass
|
||||
|
||||
class GHuntUnknownRequestDataTypeError(BaseException):
|
||||
class GHuntUnknownRequestDataTypeError(Exception):
|
||||
pass
|
||||
|
||||
class GHuntInsufficientCreds(BaseException):
|
||||
class GHuntInsufficientCreds(Exception):
|
||||
pass
|
||||
|
||||
class GHuntParamsTemplateError(BaseException):
|
||||
class GHuntParamsTemplateError(Exception):
|
||||
pass
|
||||
|
||||
class GHuntParamsInputError(BaseException):
|
||||
class GHuntParamsInputError(Exception):
|
||||
pass
|
||||
|
||||
class GHuntAPIResponseParsingError(BaseException):
|
||||
class GHuntAPIResponseParsingError(Exception):
|
||||
pass
|
||||
|
||||
class GHuntObjectsMergingError(BaseException):
|
||||
class GHuntObjectsMergingError(Exception):
|
||||
pass
|
||||
|
||||
class GHuntAndroidMasterAuthError(BaseException):
|
||||
class GHuntAndroidMasterAuthError(Exception):
|
||||
pass
|
||||
|
||||
class GHuntAndroidAppOAuth2Error(BaseException):
|
||||
class GHuntAndroidAppOAuth2Error(Exception):
|
||||
pass
|
||||
|
||||
class GHuntOSIDAuthError(BaseException):
|
||||
class GHuntOSIDAuthError(Exception):
|
||||
pass
|
||||
|
||||
class GHuntCredsNotLoaded(BaseException):
|
||||
class GHuntCredsNotLoaded(Exception):
|
||||
pass
|
||||
|
||||
class GHuntInvalidSession(BaseException):
|
||||
class GHuntInvalidSession(Exception):
|
||||
pass
|
||||
|
||||
class GHuntNotAuthenticated(BaseException):
|
||||
class GHuntNotAuthenticated(Exception):
|
||||
pass
|
||||
|
||||
class GHuntInvalidTarget(BaseException):
|
||||
class GHuntInvalidTarget(Exception):
|
||||
pass
|
||||
|
||||
class GHuntLoginError(BaseException):
|
||||
class GHuntLoginError(Exception):
|
||||
pass
|
||||
29
ghunt/helpers/gcp.py
Normal file
29
ghunt/helpers/gcp.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import dns.message
|
||||
import dns.asyncquery
|
||||
import httpx
|
||||
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.apis.identitytoolkit import IdentityToolkitHttp
|
||||
|
||||
|
||||
async def is_cloud_functions_panel_existing(project_id: str):
|
||||
q = dns.message.make_query(f"endpoints.{project_id}.cloud.goog", "A")
|
||||
r = await dns.asyncquery.tcp(q, "8.8.8.8")
|
||||
return bool(r.answer)
|
||||
|
||||
async def project_nb_from_key(as_client: httpx.AsyncClient, ghunt_creds: GHuntCreds, api_key: str, fallback=True) -> str|None:
|
||||
identitytoolkit_api = IdentityToolkitHttp(ghunt_creds)
|
||||
found, project_config = await identitytoolkit_api.get_project_config(as_client, api_key)
|
||||
if found:
|
||||
return project_config.project_id
|
||||
if fallback:
|
||||
# Fallback on fetching the project number by producing an error
|
||||
import json
|
||||
import re
|
||||
req = await as_client.get("https://blobcomments-pa.clients6.google.com/$discovery/rest", params={"key": api_key})
|
||||
try:
|
||||
data = json.loads(req.text)
|
||||
return re.findall(r'\d{12}', data["error"]["message"])[0]
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
29
ghunt/helpers/iam.py
Normal file
29
ghunt/helpers/iam.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import httpx
|
||||
import asyncio
|
||||
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.apis.mobilesdk import MobileSDKPaHttp
|
||||
from ghunt.knowledge import iam
|
||||
from ghunt.helpers.utils import chunkify
|
||||
|
||||
from typing import *
|
||||
|
||||
|
||||
async def test_all_permissions(as_client: httpx.AsyncClient, ghunt_creds: GHuntCreds, project_identifier: str):
|
||||
|
||||
async def test_permission(as_client: httpx.AsyncClient, mobilesdk_api: MobileSDKPaHttp, limiter: asyncio.Semaphore,
|
||||
project_identifier: str, permissions: List[str], results: List[str]):
|
||||
async with limiter:
|
||||
_, perms = await mobilesdk_api.test_iam_permissions(as_client, project_identifier, permissions)
|
||||
results.extend(perms)
|
||||
|
||||
mobilesdk_api = MobileSDKPaHttp(ghunt_creds)
|
||||
results: List[str] = []
|
||||
limiter = asyncio.Semaphore(20)
|
||||
tasks = []
|
||||
for perms_chunk in chunkify(iam.permissions, 100): # Max 100 permissions per request
|
||||
tasks.append(test_permission(as_client, mobilesdk_api, limiter, project_identifier, perms_chunk, results))
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
results = list(set(results))
|
||||
print(results)
|
||||
9
ghunt/helpers/playstore.py
Normal file
9
ghunt/helpers/playstore.py
Normal file
@@ -0,0 +1,9 @@
|
||||
import httpx
|
||||
|
||||
|
||||
async def app_exists(as_client: httpx.AsyncClient, package: str) -> bool:
|
||||
params = {
|
||||
"id": package
|
||||
}
|
||||
req = await as_client.head(f"https://play.google.com/store/apps/details", params=params)
|
||||
return req.status_code == 200
|
||||
@@ -31,6 +31,14 @@ def oprint(obj: any) -> str:
|
||||
pretty_output = json.dumps(json.loads(serialized), indent=2)
|
||||
print(pretty_output)
|
||||
|
||||
def chunkify(lst, n):
|
||||
"""
|
||||
Cut a given list to chunks of n items.
|
||||
"""
|
||||
k, m = divmod(len(lst), n)
|
||||
for i in range(n):
|
||||
yield lst[i*k+min(i, m):(i+1)*k+min(i+1, m)]
|
||||
|
||||
def within_docker() -> bool:
|
||||
return Path('/.dockerenv').is_file()
|
||||
|
||||
|
||||
6834
ghunt/knowledge/iam.py
Normal file
6834
ghunt/knowledge/iam.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,9 @@
|
||||
sigs = {
|
||||
"com.google.android.play.games": "38918a453d07199354f8b19af05ec6562ced5788",
|
||||
"com.google.android.apps.docs": "38918a453d07199354f8b19af05ec6562ced5788",
|
||||
"com.android.vending": "38918a453d07199354f8b19af05ec6562ced5788",
|
||||
"com.google.android.youtube": "24bb24c05e47e0aefa68a58a766179d9b613a600",
|
||||
"com.android.chrome": "38918a453d07199354f8b19af05ec6562ced5788"
|
||||
"com.google.android.apps.photos": "38918a453d07199354f8b19af05ec6562ced5788",
|
||||
"com.google.android.gms": "38918a453d07199354f8b19af05ec6562ced5788",
|
||||
"com.android.chrome": "38918a453d07199354f8b19af05ec6562ced5788",
|
||||
}
|
||||
195
ghunt/modules/spiderdal.py
Normal file
195
ghunt/modules/spiderdal.py
Normal file
@@ -0,0 +1,195 @@
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
from ghunt import globals as gb
|
||||
from ghunt.objects.base import GHuntCreds
|
||||
from ghunt.objects.utils import TMPrinter
|
||||
from ghunt.helpers.utils import get_httpx_client
|
||||
from ghunt.apis.digitalassetslinks import DigitalAssetsLinksHttp
|
||||
from ghunt.helpers.playstore import app_exists
|
||||
|
||||
import httpx
|
||||
|
||||
|
||||
@dataclass
|
||||
class Asset:
|
||||
site: str
|
||||
package_name: str
|
||||
certificate: str
|
||||
|
||||
async def identify_public_pkgs(as_client: httpx.AsyncClient, pkg_name: str, pkgs: dict[str, str], limiter: asyncio.Semaphore):
|
||||
async with limiter:
|
||||
if await app_exists(as_client, pkg_name):
|
||||
pkgs[pkg_name] = "public"
|
||||
else:
|
||||
pkgs[pkg_name] = "private"
|
||||
|
||||
async def analyze_single(as_client: httpx.AsyncClient, dal: DigitalAssetsLinksHttp, current_target: Asset, sites: dict[str, dict], pkgs: dict[str, dict], visited: set, limiter: asyncio.Semaphore):
|
||||
short_pkg_name = f"{current_target.package_name}${current_target.certificate}"
|
||||
|
||||
async with limiter:
|
||||
if current_target.site:
|
||||
_, res = await dal.list_statements(as_client, website=current_target.site)
|
||||
elif current_target.package_name:
|
||||
_, res = await dal.list_statements(as_client, android_package_name=current_target.package_name, android_cert_fingerprint=current_target.certificate)
|
||||
|
||||
for item in res.statements:
|
||||
if item.target.web.site:
|
||||
clean_site = item.target.web.site.strip('.')
|
||||
if clean_site not in sites:
|
||||
sites[clean_site] = {
|
||||
"asset": Asset(site=clean_site, package_name=None, certificate=None),
|
||||
"first_origin": current_target,
|
||||
"origins": set(),
|
||||
}
|
||||
sites[clean_site]["origins"].add(current_target.site if current_target.site else short_pkg_name)
|
||||
|
||||
if item.target.android_app.package_name:
|
||||
temp_name = f"{item.target.android_app.package_name}${item.target.android_app.certificate.sha_fingerprint}"
|
||||
|
||||
if temp_name not in pkgs:
|
||||
pkgs[temp_name] = {
|
||||
"asset": Asset(site=None, package_name=item.target.android_app.package_name, certificate=item.target.android_app.certificate.sha_fingerprint),
|
||||
"first_origin": current_target,
|
||||
"origins": set(),
|
||||
}
|
||||
pkgs[temp_name]["origins"].add(current_target.site if current_target.site else short_pkg_name)
|
||||
|
||||
if current_target.site:
|
||||
visited.add(current_target.site)
|
||||
if res.statements and current_target.site not in sites:
|
||||
sites[current_target.site] = {
|
||||
"asset": current_target,
|
||||
"first_origin": None,
|
||||
"origins": set(),
|
||||
}
|
||||
if current_target.package_name:
|
||||
visited.add(short_pkg_name)
|
||||
if res.statements and short_pkg_name not in pkgs:
|
||||
pkgs[short_pkg_name] = {
|
||||
"asset": current_target,
|
||||
"first_origin": None,
|
||||
"origins": set(),
|
||||
}
|
||||
|
||||
async def main(url: str, package: str, fingerprint: str, strict: bool, json_file: Path):
|
||||
ghunt_creds = GHuntCreds()
|
||||
ghunt_creds.load_creds()
|
||||
|
||||
as_client = get_httpx_client()
|
||||
digitalassetslink = DigitalAssetsLinksHttp(ghunt_creds)
|
||||
|
||||
tmprinter = TMPrinter()
|
||||
|
||||
sites: dict = {}
|
||||
pkgs: dict = {}
|
||||
visited = set()
|
||||
|
||||
limiter = asyncio.Semaphore(10)
|
||||
|
||||
current_targets: list[Asset] = []
|
||||
|
||||
if url:
|
||||
http = False
|
||||
if url.startswith("http"):
|
||||
http = True
|
||||
|
||||
if url.startswith(("http://", "https://")):
|
||||
domain = url.split("//")[1]
|
||||
else:
|
||||
domain = url
|
||||
|
||||
temp_targets = []
|
||||
temp_targets.append(f"https://{domain}")
|
||||
if http:
|
||||
temp_targets.append(f"http://{domain}")
|
||||
if not strict:
|
||||
temp_targets.append(f"https://www.{domain}")
|
||||
if http:
|
||||
temp_targets.append(f"http://www.{domain}")
|
||||
|
||||
for target in temp_targets:
|
||||
current_targets.append(Asset(site=target, package_name=None, certificate=None))
|
||||
|
||||
if package and fingerprint:
|
||||
current_targets.append(Asset(site=None, package_name=package, certificate=fingerprint))
|
||||
|
||||
round = 0
|
||||
total_scanned = 0
|
||||
print()
|
||||
while current_targets:
|
||||
round += 1
|
||||
total_scanned += len(current_targets)
|
||||
|
||||
tmprinter.out(f"🕷️ [R{round}]: Investigating {len(current_targets)} targets...", style="bold magenta")
|
||||
|
||||
await asyncio.gather(
|
||||
*[
|
||||
analyze_single(as_client, digitalassetslink, target, sites, pkgs, visited, limiter)
|
||||
for target in current_targets
|
||||
]
|
||||
)
|
||||
|
||||
# Next candidates
|
||||
next_sites = [site["asset"] for name,site in sites.items() if not name in visited]
|
||||
next_pkgs = [pkg["asset"] for name,pkg in pkgs.items() if not name in visited]
|
||||
current_targets = next_sites + next_pkgs
|
||||
|
||||
tmprinter.clear()
|
||||
gb.rc.print(f"🕷️ [R{round}]: Investigation done ! {total_scanned} assets scanned.", style="bold magenta")
|
||||
|
||||
# Sort
|
||||
pkgs_names = {x:None for x in set([x["asset"].package_name for x in pkgs.values()])}
|
||||
await asyncio.gather(
|
||||
*[
|
||||
identify_public_pkgs(as_client, pkg_name, pkgs_names, limiter)
|
||||
for pkg_name in pkgs_names
|
||||
]
|
||||
)
|
||||
|
||||
# Print results
|
||||
if sites:
|
||||
gb.rc.print(f"\n🌐 {len(sites)} site{'s' if len(sites) > 1 else ''} found !", style="white")
|
||||
for site_url, site in sites.items():
|
||||
if site["first_origin"]:
|
||||
if site["first_origin"].site:
|
||||
gb.rc.print(f"- [deep_sky_blue1][link={site_url}]{site_url}[/link][/deep_sky_blue1] [steel_blue italic](leaked by : {site['first_origin'].site})[/steel_blue italic]")
|
||||
else:
|
||||
gb.rc.print(f"- [deep_sky_blue1][link={site_url}]{site_url}[/link][/deep_sky_blue1] [steel_blue italic](leaked by : {site['first_origin'].package_name})[/steel_blue italic]")
|
||||
else:
|
||||
gb.rc.print(f"- [deep_sky_blue1][link={site_url}]{site_url}[/link][/deep_sky_blue1]")
|
||||
else:
|
||||
gb.rc.print("\nNo sites found.", style="italic bright_black")
|
||||
|
||||
if pkgs:
|
||||
gb.rc.print(f"\n📦 {len(pkgs_names)} Android package{'s' if len(pkgs) > 1 else ''} found !", style="white")
|
||||
for pkg_name, state in pkgs_names.items():
|
||||
if state == "public":
|
||||
gb.rc.print(f"- 🏪 {pkg_name}", style="light_steel_blue")
|
||||
else:
|
||||
gb.rc.print(f"- 🥷 {pkg_name}", style="light_steel_blue")
|
||||
gb.rc.print("\tFingerprints (SHA256) :", style="steel_blue")
|
||||
for pkg in pkgs.values():
|
||||
fingerprints_cache = set()
|
||||
if pkg["asset"].package_name == pkg_name:
|
||||
if pkg["asset"].certificate not in fingerprints_cache:
|
||||
if pkg["first_origin"].site:
|
||||
gb.rc.print(f"\t\t- {pkg['asset'].certificate} (leaked by : {pkg['first_origin'].site})", style="steel_blue italic", emoji=False)
|
||||
else:
|
||||
gb.rc.print(f"\t\t- {pkg['asset'].certificate} (leaked by : {pkg['first_origin'].package_name})", style="steel_blue italic", emoji=False)
|
||||
fingerprints_cache.add(pkg["asset"].certificate)
|
||||
else:
|
||||
gb.rc.print("\nNo packages found.", style="bright_black italic")
|
||||
|
||||
if json_file:
|
||||
import json
|
||||
from ghunt.objects.encoders import GHuntEncoder;
|
||||
with open(json_file, "w", encoding="utf-8") as f:
|
||||
f.write(json.dumps({
|
||||
"sites": sites,
|
||||
"packages": pkgs
|
||||
}, cls=GHuntEncoder, indent=4))
|
||||
gb.rc.print(f"\n[+] JSON output wrote to {json_file} !\n", style="italic")
|
||||
else:
|
||||
print()
|
||||
@@ -6,30 +6,55 @@ from ghunt.errors import *
|
||||
from ghunt.helpers.auth import *
|
||||
|
||||
import httpx
|
||||
import asyncio
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from typing import *
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
# APIs objects
|
||||
|
||||
@dataclass
|
||||
class EndpointConfig(SmartObj):
|
||||
def __init__(self, headers: Dict[str, str], cookies: str):
|
||||
def __init__(self,
|
||||
name: str="",
|
||||
headers: Dict[str, str] = {},
|
||||
cookies: Dict[str, str] = {},
|
||||
ext_metadata: Dict[str, Dict[str, str]] = {},
|
||||
verb: str = "",
|
||||
data_type: str|None = None,
|
||||
authentication_mode: str|None = None,
|
||||
require_key: str | None = None,
|
||||
key_origin: str | None = None,
|
||||
_computed_headers: Dict[str, str] = {},
|
||||
_computed_cookies: Dict[str, str] = {}
|
||||
):
|
||||
self.name = name
|
||||
self.headers = headers
|
||||
self.cookies = cookies
|
||||
self.ext_metadata = ext_metadata
|
||||
self.verb = verb
|
||||
self.data_type = data_type
|
||||
self.authentication_mode = authentication_mode
|
||||
self.require_key = require_key
|
||||
self.key_origin = key_origin
|
||||
self._computed_headers = _computed_headers
|
||||
self._computed_cookies = _computed_cookies
|
||||
|
||||
class GAPI(SmartObj):
|
||||
def __init__(self):
|
||||
self.api_name: str = ""
|
||||
self.package_name: str = ""
|
||||
self.scopes: List[str] = []
|
||||
|
||||
self.hostname: str = ""
|
||||
self.scheme: str = ""
|
||||
self.loaded_endpoints: Dict[str, EndpointConfig] = {}
|
||||
self.creds: GHuntCreds = None
|
||||
self.headers: Dict[str, str] = {}
|
||||
self.cookies: Dict[str, str] = {}
|
||||
self.gen_token_lock: asyncio.Semaphore = None
|
||||
|
||||
self.authentication_mode: str = ""
|
||||
self.require_key: str = ""
|
||||
self.key_origin: str = ""
|
||||
self.gen_token_lock: asyncio.Lock = None
|
||||
|
||||
def _load_api(self, creds: GHuntCreds, headers: Dict[str, str]):
|
||||
if not creds.are_creds_loaded():
|
||||
@@ -38,47 +63,47 @@ class GAPI(SmartObj):
|
||||
if not is_headers_syntax_good(headers):
|
||||
raise GHuntCorruptedHeadersError(f"The provided headers when loading the endpoint seems corrupted, please check it : {headers}")
|
||||
|
||||
if self.authentication_mode == "oauth":
|
||||
self.gen_token_lock = asyncio.Semaphore(1)
|
||||
|
||||
cookies = {}
|
||||
if self.authentication_mode in ["sapisidhash", "cookies_only"]:
|
||||
if not (cookies := creds.cookies):
|
||||
raise GHuntInsufficientCreds(f"This endpoint requires the cookies in the GHuntCreds object, but they aren't loaded.")
|
||||
|
||||
if (key_name := self.require_key):
|
||||
if not (api_key := get_api_key(key_name)):
|
||||
raise GHuntInsufficientCreds(f"This API requires the {key_name} API key in the GHuntCreds object, but it isn't loaded.")
|
||||
if not self.key_origin:
|
||||
self.key_origin = get_origin_of_key(key_name)
|
||||
headers = {**headers, "X-Goog-Api-Key": api_key, **headers, "Origin": self.key_origin, "Referer": self.key_origin}
|
||||
|
||||
if self.authentication_mode == "sapisidhash":
|
||||
if not (sapisidhash := creds.cookies.get("SAPISID")):
|
||||
raise GHuntInsufficientCreds(f"This endpoint requires the SAPISID cookie in the GHuntCreds object, but it isn't loaded.")
|
||||
|
||||
headers = {**headers, "Authorization": f"SAPISIDHASH {gen_sapisidhash(sapisidhash, self.key_origin)}"}
|
||||
|
||||
self.creds = creds
|
||||
self.headers = headers
|
||||
self.cookies = cookies
|
||||
|
||||
def _load_endpoint(self, endpoint_name: str,
|
||||
headers: Dict[str, str]={}, ext_metadata: Dict[str, str]={}):
|
||||
if endpoint_name in self.loaded_endpoints:
|
||||
def _load_endpoint(self, endpoint: EndpointConfig):
|
||||
if endpoint.name in self.loaded_endpoints:
|
||||
return
|
||||
|
||||
headers = {**headers, **self.headers}
|
||||
headers = {**endpoint.headers, **self.headers}
|
||||
|
||||
if endpoint.authentication_mode == "oauth":
|
||||
self.gen_token_lock = asyncio.Lock()
|
||||
|
||||
cookies = {}
|
||||
if endpoint.authentication_mode in ["sapisidhash", "cookies_only"]:
|
||||
if not (cookies := self.creds.cookies):
|
||||
raise GHuntInsufficientCreds(f"This endpoint requires the cookies in the GHuntCreds object, but they aren't loaded.")
|
||||
|
||||
if (key_name := endpoint.require_key):
|
||||
if not (api_key := get_api_key(key_name)):
|
||||
raise GHuntInsufficientCreds(f"This endpoint requires the {key_name} API key in the GHuntCreds object, but it isn't loaded.")
|
||||
if not endpoint.key_origin:
|
||||
endpoint.key_origin = get_origin_of_key(key_name)
|
||||
headers = {**headers, "X-Goog-Api-Key": api_key, **headers, "Origin": endpoint.key_origin, "Referer": endpoint.key_origin}
|
||||
|
||||
if endpoint.authentication_mode == "sapisidhash":
|
||||
if not (sapisidhash := cookies.get("SAPISID")):
|
||||
raise GHuntInsufficientCreds(f"This endpoint requires the SAPISID cookie in the GHuntCreds object, but it isn't loaded.")
|
||||
|
||||
headers = {**headers, "Authorization": f"SAPISIDHASH {gen_sapisidhash(sapisidhash, endpoint.key_origin)}"}
|
||||
|
||||
# https://github.com/googleapis/googleapis/blob/f8a290120b3a67e652742a221f73778626dc3081/google/api/context.proto#L43
|
||||
for ext_type,ext_value in ext_metadata.items():
|
||||
for ext_type,ext_value in endpoint.ext_metadata.items():
|
||||
ext_bin_headers = {f"X-Goog-Ext-{k}-{ext_type.title()}":v for k,v in ext_value.items()}
|
||||
headers = {**headers, **ext_bin_headers}
|
||||
|
||||
if not is_headers_syntax_good(headers):
|
||||
raise GHuntCorruptedHeadersError(f"The provided headers when loading the endpoint seems corrupted, please check it : {headers}")
|
||||
|
||||
self.loaded_endpoints[endpoint_name] = EndpointConfig(headers, self.cookies)
|
||||
endpoint._computed_headers = headers
|
||||
endpoint._computed_cookies = cookies
|
||||
self.loaded_endpoints[endpoint.name] = endpoint
|
||||
|
||||
async def _check_and_gen_authorization_token(self, as_client: httpx.AsyncClient, creds: GHuntCreds):
|
||||
async with self.gen_token_lock:
|
||||
@@ -99,54 +124,35 @@ class GAPI(SmartObj):
|
||||
gb.rc.print(f"\n[+] New token for {self.api_name} has been generated", style="italic")
|
||||
return token
|
||||
|
||||
async def _query(self, as_client: httpx.AsyncClient, verb: str, endpoint_name: str, base_url: str, params: Dict[str, Any], data: Any, data_type: str) -> httpx.Response:
|
||||
async def _query(self, endpoint_name: str, as_client: httpx.AsyncClient, base_url: str, params: Dict[str, Any]={}, data: Any=None) -> httpx.Response:
|
||||
endpoint = self.loaded_endpoints[endpoint_name]
|
||||
headers = endpoint.headers
|
||||
if self.authentication_mode == "oauth":
|
||||
headers = endpoint._computed_headers
|
||||
if endpoint.authentication_mode == "oauth":
|
||||
token = await self._check_and_gen_authorization_token(as_client, self.creds)
|
||||
headers = {**headers, "Authorization": f"OAuth {token}"}
|
||||
|
||||
if verb == "GET":
|
||||
if endpoint.verb == "GET":
|
||||
req = await as_client.get(f"{self.scheme}://{self.hostname}{base_url}",
|
||||
params=params, headers=headers, cookies=endpoint.cookies)
|
||||
elif verb == "POST":
|
||||
if data_type == "data":
|
||||
params=params, headers=headers, cookies=endpoint._computed_cookies)
|
||||
elif endpoint.verb == "POST":
|
||||
if endpoint.data_type == "data":
|
||||
req = await as_client.post(f"{self.scheme}://{self.hostname}{base_url}",
|
||||
params=params, data=data, headers=headers, cookies=endpoint.cookies)
|
||||
elif data_type == "json":
|
||||
params=params, data=data, headers=headers, cookies=endpoint._computed_cookies)
|
||||
elif endpoint.data_type == "json":
|
||||
req = await as_client.post(f"{self.scheme}://{self.hostname}{base_url}",
|
||||
params=params, json=data, headers=headers, cookies=endpoint.cookies)
|
||||
params=params, json=data, headers=headers, cookies=endpoint._computed_cookies)
|
||||
else:
|
||||
raise GHuntUnknownRequestDataTypeError(f"The provided data type {data_type} wasn't recognized by GHunt.")
|
||||
raise GHuntUnknownRequestDataTypeError(f"The provided data type {endpoint.data_type} wasn't recognized by GHunt.")
|
||||
else:
|
||||
raise GHuntUnknownVerbError(f"The provided verb {verb} wasn't recognized by GHunt.")
|
||||
raise GHuntUnknownVerbError(f"The provided verb {endpoint.verb} wasn't recognized by GHunt.")
|
||||
|
||||
return req
|
||||
|
||||
# Others
|
||||
|
||||
class Parser(SmartObj):
|
||||
def _merge(self, obj) -> any:
|
||||
"""Merging two objects of the same class."""
|
||||
|
||||
def recursive_merge(obj1, obj2, module_name: str) -> any:
|
||||
directions = [(obj1, obj2), (obj2, obj1)]
|
||||
for direction in directions:
|
||||
from_obj, target_obj = direction
|
||||
for attr_name, attr_value in from_obj.__dict__.items():
|
||||
class_name = get_class_name(attr_value)
|
||||
if class_name.startswith(module_name) and attr_name in target_obj.__dict__:
|
||||
merged_obj = recursive_merge(attr_value, target_obj.__dict__[attr_name], module_name)
|
||||
target_obj.__dict__[attr_name] = merged_obj
|
||||
|
||||
elif not attr_name in target_obj.__dict__ or \
|
||||
(attr_value and not target_obj.__dict__.get(attr_name)):
|
||||
target_obj.__dict__[attr_name] = attr_value
|
||||
return obj1
|
||||
|
||||
class_name = get_class_name(self)
|
||||
module_name = self.__module__
|
||||
if not get_class_name(obj).startswith(class_name):
|
||||
raise GHuntObjectsMergingError("The two objects being merged aren't from the same class.")
|
||||
|
||||
self = recursive_merge(self, obj, module_name)
|
||||
"""
|
||||
The class that is used to initialize every parser class.
|
||||
It will automatically manage the __slots__ attribute.
|
||||
"""
|
||||
pass
|
||||
@@ -6,11 +6,12 @@ from datetime import datetime
|
||||
import base64
|
||||
|
||||
from autoslot import Slots
|
||||
import httpx
|
||||
|
||||
from ghunt.errors import GHuntInvalidSession
|
||||
|
||||
|
||||
# class SmartObj(Slots): # Not Python 3.13 compatible
|
||||
# class SmartObj(Slots): # Not Python 3.13 compatible so FUCK it fr fr
|
||||
# pass
|
||||
|
||||
class SmartObj():
|
||||
|
||||
20
ghunt/objects/session.py
Normal file
20
ghunt/objects/session.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from typing import *
|
||||
|
||||
import httpx
|
||||
from ghunt.helpers.utils import get_httpx_client
|
||||
from ghunt.helpers import auth
|
||||
from ghunt.objects.base import GHuntCreds, SmartObj
|
||||
|
||||
|
||||
# class Session(SmartObj):
|
||||
# def __init__(self, client: httpx.AsyncClient = None) -> None:
|
||||
# self.creds: GHuntCreds = None
|
||||
# self.client: httpx.AsyncClient = client or get_httpx_client()
|
||||
|
||||
# @staticmethod
|
||||
# async def new(client: httpx.AsyncClient = None, authentify=False) -> "Session":
|
||||
# cls = Session(client=client)
|
||||
|
||||
# if authentify:
|
||||
# cls.creds = await auth.load_and_auth(client)
|
||||
# return cls
|
||||
@@ -5,16 +5,21 @@ from ghunt.objects.base import SmartObj
|
||||
from typing import *
|
||||
|
||||
|
||||
class TMPrinter(SmartObj):
|
||||
def __init__(self):
|
||||
self.max_len = 0
|
||||
from rich.console import Console
|
||||
|
||||
def out(self, text: str):
|
||||
class TMPrinter():
|
||||
"""
|
||||
Print temporary text, on the same line.
|
||||
"""
|
||||
def __init__(self, rc: Console=Console(highlight=False)):
|
||||
self.max_len = 0
|
||||
self.rc = rc
|
||||
|
||||
def out(self, text: str, style: str=""):
|
||||
if len(text) > self.max_len:
|
||||
self.max_len = len(text)
|
||||
else:
|
||||
text += (" " * (self.max_len - len(text)))
|
||||
print(text, end='\r')
|
||||
|
||||
self.rc.print(text, end='\r', style=style)
|
||||
def clear(self):
|
||||
print(" " * self.max_len, end="\r")
|
||||
self.rc.print(" " * self.max_len, end="\r")
|
||||
74
ghunt/parsers/digitalassetslinks.py
Normal file
74
ghunt/parsers/digitalassetslinks.py
Normal file
@@ -0,0 +1,74 @@
|
||||
from typing import *
|
||||
from ghunt.objects.apis import Parser
|
||||
|
||||
|
||||
class DalStatements(Parser):
|
||||
def __init__(self):
|
||||
self.statements: List[DalStatement] = []
|
||||
self.max_age: str = ""
|
||||
self.debug_string: str = ""
|
||||
|
||||
def _scrape(self, digital_assets_links_base_model_data: Dict[str, any]):
|
||||
if (statements_data := digital_assets_links_base_model_data.get('statements')):
|
||||
for statements_data_item in statements_data:
|
||||
statements_item = DalStatement()
|
||||
statements_item._scrape(statements_data_item)
|
||||
self.statements.append(statements_item)
|
||||
self.max_age = digital_assets_links_base_model_data.get('maxAge')
|
||||
self.debug_string = digital_assets_links_base_model_data.get('debugString')
|
||||
|
||||
class DalStatement(Parser):
|
||||
def __init__(self):
|
||||
self.source: DalSource = DalSource()
|
||||
self.relation: str = ""
|
||||
self.target: DalTarget = DalTarget()
|
||||
|
||||
def _scrape(self, digital_assets_links_unknown_model1_data: Dict[str, any]):
|
||||
if (source_data := digital_assets_links_unknown_model1_data.get('source')):
|
||||
self.source._scrape(source_data)
|
||||
self.relation = digital_assets_links_unknown_model1_data.get('relation')
|
||||
if (target_data := digital_assets_links_unknown_model1_data.get('target')):
|
||||
self.target._scrape(target_data)
|
||||
|
||||
class DalSource(Parser):
|
||||
def __init__(self):
|
||||
self.web: DalWeb = DalWeb()
|
||||
|
||||
def _scrape(self, digital_assets_links_source_data: Dict[str, any]):
|
||||
if (web_data := digital_assets_links_source_data.get('web')):
|
||||
self.web._scrape(web_data)
|
||||
|
||||
class DalWeb(Parser):
|
||||
def __init__(self):
|
||||
self.site: str = ""
|
||||
|
||||
def _scrape(self, digital_assets_links_web_data: Dict[str, str]):
|
||||
self.site = digital_assets_links_web_data.get('site')
|
||||
|
||||
class DalTarget(Parser):
|
||||
def __init__(self):
|
||||
self.android_app: DalAndroidApp = DalAndroidApp()
|
||||
self.web: DalWeb = DalWeb()
|
||||
|
||||
def _scrape(self, digital_assets_links_target_data: Dict[str, any]):
|
||||
if (android_app_data := digital_assets_links_target_data.get('androidApp')):
|
||||
self.android_app._scrape(android_app_data)
|
||||
if (web_data := digital_assets_links_target_data.get('web')):
|
||||
self.web._scrape(web_data)
|
||||
|
||||
class DalAndroidApp(Parser):
|
||||
def __init__(self):
|
||||
self.package_name: str = ""
|
||||
self.certificate: DalCertificate = DalCertificate()
|
||||
|
||||
def _scrape(self, digital_assets_links_android_app_data: Dict[str, any]):
|
||||
self.package_name = digital_assets_links_android_app_data.get('packageName')
|
||||
if (certificate_data := digital_assets_links_android_app_data.get('certificate')):
|
||||
self.certificate._scrape(certificate_data)
|
||||
|
||||
class DalCertificate(Parser):
|
||||
def __init__(self):
|
||||
self.sha_fingerprint: str = ""
|
||||
|
||||
def _scrape(self, digital_assets_links_certificate_data: Dict[str, str]):
|
||||
self.sha_fingerprint = digital_assets_links_certificate_data.get('sha256Fingerprint')
|
||||
94
ghunt/parsers/identitytoolkit.py
Normal file
94
ghunt/parsers/identitytoolkit.py
Normal file
@@ -0,0 +1,94 @@
|
||||
from typing import *
|
||||
from ghunt.objects.apis import Parser
|
||||
|
||||
|
||||
class ITKProjectConfig(Parser):
|
||||
def __init__(self):
|
||||
self.project_id: str = ""
|
||||
self.authorized_domains: List[str] = []
|
||||
|
||||
def _scrape(self, itk_project_config_data: Dict[str, any]):
|
||||
self.project_id = itk_project_config_data.get('projectId')
|
||||
self.authorized_domains = itk_project_config_data.get('authorizedDomains')
|
||||
|
||||
class ITKPublicKeys(Parser):
|
||||
def __init__(self):
|
||||
self.sk_ib_ng: str = ""
|
||||
self.t_xew: str = ""
|
||||
self.p_r_ww: str = ""
|
||||
self.t_bma: str = ""
|
||||
self.tl_gyha: str = ""
|
||||
|
||||
def _scrape(self, itk_public_keys_data: Dict[str, str]):
|
||||
self.sk_ib_ng = itk_public_keys_data.get('skIBNg')
|
||||
self.t_xew = itk_public_keys_data.get('7TX2ew')
|
||||
self.p_r_ww = itk_public_keys_data.get('0pR3Ww')
|
||||
self.t_bma = itk_public_keys_data.get('tB0M2A')
|
||||
self.tl_gyha = itk_public_keys_data.get('tlGYHA')
|
||||
|
||||
class ITKSessionCookiePublicKeys(Parser):
|
||||
def __init__(self):
|
||||
self.keys: List[ITKSessionCookiePublicKey] = []
|
||||
|
||||
def _scrape(self, itk_session_cookie_public_keys_data: Dict[str, list]):
|
||||
if (keys_data := itk_session_cookie_public_keys_data.get('keys')):
|
||||
for keys_data_item in keys_data:
|
||||
keys_item = ITKSessionCookiePublicKey()
|
||||
keys_item._scrape(keys_data_item)
|
||||
self.keys.append(keys_item)
|
||||
|
||||
class ITKSessionCookiePublicKey(Parser):
|
||||
def __init__(self):
|
||||
self.kty: str = ""
|
||||
self.alg: str = ""
|
||||
self.use: str = ""
|
||||
self.kid: str = ""
|
||||
self.n: str = ""
|
||||
self.e: str = ""
|
||||
|
||||
def _scrape(self, itk_session_cookie_public_key_data: Dict[str, str]):
|
||||
self.kty = itk_session_cookie_public_key_data.get('kty')
|
||||
self.alg = itk_session_cookie_public_key_data.get('alg')
|
||||
self.use = itk_session_cookie_public_key_data.get('use')
|
||||
self.kid = itk_session_cookie_public_key_data.get('kid')
|
||||
self.n = itk_session_cookie_public_key_data.get('n')
|
||||
self.e = itk_session_cookie_public_key_data.get('e')
|
||||
|
||||
class ITKSignupNewUser(Parser):
|
||||
def __init__(self):
|
||||
self.kind: str = ""
|
||||
self.id_token: str = ""
|
||||
self.email: str = ""
|
||||
self.refresh_token: str = ""
|
||||
self.expires_in: str = ""
|
||||
self.local_id: str = ""
|
||||
|
||||
def _scrape(self, itk_signup_data: Dict[str, str]):
|
||||
self.kind = itk_signup_data.get('kind')
|
||||
self.id_token = itk_signup_data.get('idToken')
|
||||
self.email = itk_signup_data.get('email')
|
||||
self.refresh_token = itk_signup_data.get('refreshToken')
|
||||
self.expires_in = itk_signup_data.get('expiresIn')
|
||||
self.local_id = itk_signup_data.get('localId')
|
||||
|
||||
class ITKVerifyPassword(Parser):
|
||||
def __init__(self):
|
||||
self.kind: str = ""
|
||||
self.local_id: str = ""
|
||||
self.email: str = ""
|
||||
self.display_name: str = ""
|
||||
self.id_token: str = ""
|
||||
self.registered: bool = False
|
||||
self.refresh_token: str = ""
|
||||
self.expires_in: str = ""
|
||||
|
||||
def _scrape(self, itk_verify_password_data: Dict[str, any]):
|
||||
self.kind = itk_verify_password_data.get('kind')
|
||||
self.local_id = itk_verify_password_data.get('localId')
|
||||
self.email = itk_verify_password_data.get('email')
|
||||
self.display_name = itk_verify_password_data.get('displayName')
|
||||
self.id_token = itk_verify_password_data.get('idToken')
|
||||
self.registered = itk_verify_password_data.get('registered')
|
||||
self.refresh_token = itk_verify_password_data.get('refreshToken')
|
||||
self.expires_in = itk_verify_password_data.get('expiresIn')
|
||||
|
||||
19
ghunt/parsers/mobilesdk.py
Normal file
19
ghunt/parsers/mobilesdk.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from typing import *
|
||||
from ghunt.objects.apis import Parser
|
||||
|
||||
|
||||
class MobileSDKDynamicConfig(Parser):
|
||||
def __init__(self):
|
||||
self.database_url: str = ""
|
||||
self.storage_bucket: str = ""
|
||||
self.auth_domain: str = ""
|
||||
self.messaging_sender_id: str = ""
|
||||
self.project_id: str = ""
|
||||
|
||||
def _scrape(self, dynamic_config_base_model_data: Dict[str, str]):
|
||||
self.database_url = dynamic_config_base_model_data.get('databaseURL')
|
||||
self.storage_bucket = dynamic_config_base_model_data.get('storageBucket')
|
||||
self.auth_domain = dynamic_config_base_model_data.get('authDomain')
|
||||
self.messaging_sender_id = dynamic_config_base_model_data.get('messagingSenderId')
|
||||
self.project_id = dynamic_config_base_model_data.get('projectId')
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
metadata = {
|
||||
"version": "2.2.1",
|
||||
"version": "2.3.2",
|
||||
"name": "Wardriving Edition"
|
||||
}
|
||||
22
poetry.lock
generated
22
poetry.lock
generated
@@ -110,6 +110,26 @@ files = [
|
||||
{file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dnspython"
|
||||
version = "2.7.0"
|
||||
description = "DNS toolkit"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"},
|
||||
{file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"]
|
||||
dnssec = ["cryptography (>=43)"]
|
||||
doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"]
|
||||
doq = ["aioquic (>=1.0.0)"]
|
||||
idna = ["idna (>=3.7)"]
|
||||
trio = ["trio (>=0.23)"]
|
||||
wmi = ["wmi (>=1.5.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "geographiclib"
|
||||
version = "2.0"
|
||||
@@ -757,4 +777,4 @@ files = [
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.11"
|
||||
content-hash = "26f590db442e3544788dd091345a919c04a85ddda542bbf1f8b18b5fb246cd4c"
|
||||
content-hash = "d3fb4f6ff1e00b0769adaf4de91bfd9e50e1d41104d601c44fb970f3a7736ef6"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "ghunt"
|
||||
version = "2.2.1"
|
||||
version = "2.3.2"
|
||||
authors = [
|
||||
{name = "mxrch", email = "mxrch.dev@pm.me"},
|
||||
]
|
||||
@@ -31,7 +31,7 @@ include = ["ghunt", "ghunt.*"]
|
||||
|
||||
[tool.poetry]
|
||||
name = "ghunt"
|
||||
version = "2.2.1"
|
||||
version = "2.3.2"
|
||||
description = "An offensive Google framework."
|
||||
authors = ["mxrch <mxrch.dev@pm.me>"]
|
||||
license = "AGPL-3.0"
|
||||
@@ -55,6 +55,7 @@ inflection = "^0.5.1"
|
||||
jsonpickle = "^3.3.0"
|
||||
packaging = "^24.1"
|
||||
rich-argparse = "^1.5.2"
|
||||
dnspython = "^2.7.0"
|
||||
|
||||
|
||||
[build-system]
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
geopy==2.3.0
|
||||
httpx[http2]==0.23.1
|
||||
imagehash==4.3.1
|
||||
pillow==9.3.0
|
||||
python-dateutil==2.8.2
|
||||
rich==12.6.0
|
||||
beautifultable==1.1.0
|
||||
beautifulsoup4==4.11.1
|
||||
alive-progress==2.4.1
|
||||
protobuf==4.21.9
|
||||
autoslot==2021.10.1
|
||||
humanize==4.4.0
|
||||
inflection==0.5.1
|
||||
jsonpickle==2.2.0
|
||||
packaging==23.0
|
||||
jsonpickle==2.2.0
|
||||
rich_argparse==1.5.0
|
||||
Reference in New Issue
Block a user