From 1d5283d3828ddc6624e9e09157d07c1abe8bb291 Mon Sep 17 00:00:00 2001 From: iwonder Date: Tue, 18 Aug 2020 21:49:19 +0000 Subject: [PATCH] New URL with robots.txt --- utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils.py b/utils.py index 6e4df2b..c38f61a 100644 --- a/utils.py +++ b/utils.py @@ -125,13 +125,13 @@ def get_fileio(date: DTDate, card_type: List[str] = CARD_VISA) -> BinaryIO: # py # ua = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:64.0) Gecko/20100101 Firefox/64.0' # b.set_header('User-Agent', ua) # Ignore robots.txt - # b.set_handle_robots(False) + b.set_handle_robots(False) # Debugging flags # b.set_debug_http(True) # b.set_debug_redirects(True) # b.set_debug_responses(True) # PDF URL - b.open('https://misc.firstdata.eu/CurrencyCalculator/fremdwaehrungskurse/pdf') + b.open('https://online.firstdata.com/CurrencyCalculator/fremdwaehrungskurse/pdf') fm = b.forms()[0] # This must be done because I can't change the options otherwise fm.set_all_readonly(False)