From c536e9cf91c9dfae3a34cee0bb6791362a4227c4 Mon Sep 17 00:00:00 2001 From: theSoberSobber <109434814+theSoberSobber@users.noreply.github.com> Date: Thu, 26 Sep 2024 00:24:24 +0530 Subject: [PATCH 1/3] Added Decorator Update --- src/search.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/search.py b/src/search.py index 50558a7..f7f7894 100644 --- a/src/search.py +++ b/src/search.py @@ -5,6 +5,7 @@ from time import sleep from botasaurus import * from .utils import default_request_options +from botasaurus.request_decorator import request import requests FAILED_DUE_TO_CREDITS_EXHAUSTED = "FAILED_DUE_TO_CREDITS_EXHAUSTED" From e30bb19acba9b9c5e8e69ca4b9e8885c95b64899 Mon Sep 17 00:00:00 2001 From: theSoberSobber <109434814+theSoberSobber@users.noreply.github.com> Date: Thu, 26 Sep 2024 00:24:50 +0530 Subject: [PATCH 2/3] Updated Imports --- src/write_output.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/write_output.py b/src/write_output.py index 6546bba..2cf95e6 100644 --- a/src/write_output.py +++ b/src/write_output.py @@ -1,5 +1,5 @@ from botasaurus import bt -from botasaurus.decorators import print_filenames +from botasaurus.decorators_common import print_filenames from .write_output_utils import kebab_case, make_folders From 2f10564e14aaf08c9712ef24a6889ddf8b1e2e9f Mon Sep 17 00:00:00 2001 From: theSoberSobber <109434814+theSoberSobber@users.noreply.github.com> Date: Thu, 26 Sep 2024 00:28:53 +0530 Subject: [PATCH 3/3] Global Config --- src/search.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/search.py b/src/search.py index f7f7894..830453e 100644 --- a/src/search.py +++ b/src/search.py @@ -13,6 +13,12 @@ FAILED_DUE_TO_NO_KEY = "FAILED_DUE_TO_NO_KEY" FAILED_DUE_TO_UNKNOWN_ERROR = "FAILED_DUE_TO_UNKNOWN_ERROR" +PROTOCOL = "https" + +HOST = "google-scraper.p.rapidapi.com" +API_PATH = "search/" +HOST_URL = f"{PROTOCOL}://{HOST}/{API_PATH}" + def update_credits(): credits_used = bt.LocalStorage.get_item("credits_used", 0) bt.LocalStorage.set_item("credits_used", credits_used + 1) @@ -37,7 +43,7 @@ def do_request(data, retry_count=3): headers = { "X-RapidAPI-Key": key, - "X-RapidAPI-Host": "google-scraper.p.rapidapi.com" + "X-RapidAPI-Host": HOST } @@ -100,7 +106,7 @@ def search(_, data, metadata): "error":FAILED_DUE_TO_NO_KEY }) max_items = data['max'] - url = "https://google-scraper.p.rapidapi.com/search/" + url = HOST_URL qp = {"query": data['query']} params = {**qp, 'link':cl.join_link(url, query_params=qp)}