@@ -264,14 +264,14 @@ def upload_sample_from_file(self, file_source, custom_filename=None, archive_pas
264264
265265 return response
266266
267- def upload_sample_from_url (self , file_url , crawler = None , archive_password = None , rl_cloud_sandbox_platform = None ):
268- """Accepts a file url and returns a response .
267+ def submit_url_for_analysis (self , url_string , crawler = None , archive_password = None , rl_cloud_sandbox_platform = None ):
268+ """Sends a URL for analysis on A1000 .
269269 Additional parameters can be provided.
270- :param file_url : URL from which the appliance should download the data
271- :type file_url : str
270+ :param url_string : URL to analyze
271+ :type url_string : str
272272 :param crawler: crawler method (local or cloud)
273273 :type crawler: str
274- :param archive_password: password, if file is a password-protected archive
274+ :param archive_password: password, if it is a password-protected archive
275275 :type archive_password: str
276276 :param rl_cloud_sandbox_platform: Cloud Sandbox platform (windows7, windows10 or macos_11)
277277 :type rl_cloud_sandbox_platform: str
@@ -283,7 +283,7 @@ def upload_sample_from_url(self, file_url, crawler=None, archive_password=None,
283283 crawler = crawler ,
284284 archive_password = archive_password ,
285285 rl_cloud_sandbox_platform = rl_cloud_sandbox_platform ,
286- file_url = file_url
286+ url_string = url_string
287287 )
288288
289289 url = self ._url .format (endpoint = self .__UPLOAD_ENDPOINT )
@@ -381,30 +381,30 @@ def get_submitted_url_report(self, task_id, retry):
381381 raise RequestTimeoutError ("Report fetching attempts finished - The analysis report is still not ready "
382382 "or the sample does not exist on the appliance." )
383383
384- def upload_sample_from_url_and_get_report (self , file_url , retry = True , crawler = "local" , archive_password = None ,
385- rl_cloud_sandbox_platform = None ):
386- """Accepts a file url for file upload and returns a report response .
387- This method combines uploading a sample from url and obtaining the summary analysis report.
384+ def submit_url_for_analysis_and_get_report (self , url_string , retry = True , crawler = "local" , archive_password = None ,
385+ rl_cloud_sandbox_platform = None ):
386+ """Sends a URL for analysis on A1000 .
387+ This method combines submitting a URL for analysis and obtaining the summary analysis report.
388388 Additional fields can be provided.
389389 The result fetching action of this method utilizes the set number of retries and wait time in seconds to time
390390 out if the analysis results are not ready.
391- :param file_url : URL from which the appliance should download the data
392- :type file_url : str
391+ :param url_string : URL to analyze
392+ :type url_string : str
393393 :param retry: if set to False there will only be one try at obtaining the analysis report
394394 :type retry: bool
395395 :param crawler: crawler method (local or cloud)
396396 :type crawler: string
397- :param archive_password: password, if file is a password-protected archive
397+ :param archive_password: password, if it is a password-protected archive
398398 :type archive_password: str
399399 :param rl_cloud_sandbox_platform: Cloud Sandbox platform (windows7, windows10 or macos_11)
400400 :type rl_cloud_sandbox_platform: str
401401 :return: :class:`Response <Response>` object
402402 :rtype: requests.Response
403403 """
404404
405- upload_response = self .upload_sample_from_url ( file_url = file_url , crawler = crawler ,
406- archive_password = archive_password ,
407- rl_cloud_sandbox_platform = rl_cloud_sandbox_platform )
405+ upload_response = self .submit_url_for_analysis ( url_string = url_string , crawler = crawler ,
406+ archive_password = archive_password ,
407+ rl_cloud_sandbox_platform = rl_cloud_sandbox_platform )
408408
409409 response_detail = upload_response .json ().get ("detail" )
410410 task_id = response_detail .get ("id" )
@@ -2287,15 +2287,15 @@ def __get_token(self, username, password):
22872287 return token
22882288
22892289 @staticmethod
2290- def __create_post_payload (custom_filename = None , file_url = None , crawler = None , archive_password = None ,
2290+ def __create_post_payload (custom_filename = None , url_string = None , crawler = None , archive_password = None ,
22912291 rl_cloud_sandbox_platform = None , tags = None , comment = None , cloud_analysis = True ,
22922292 classification = None , risk_score = None , threat_platform = None , threat_type = None ,
22932293 threat_name = None , name = None , content = None , publish = None , ticloud = None ):
22942294 """Accepts optional fields and returns a formed dictionary of those fields.
22952295 :param custom_filename: custom file name for upload
22962296 :type custom_filename: str
2297- :param file_url : URL from which the appliance should download the data
2298- :type file_url : str
2297+ :param url_string : URL to analyze
2298+ :type url_string : str
22992299 :param crawler: crawler method (local or cloud)
23002300 :type crawler: str
23012301 :param archive_password: password, if file is a password-protected archive
@@ -2340,10 +2340,10 @@ def __create_post_payload(custom_filename=None, file_url=None, crawler=None, ar
23402340 if tags and not isinstance (tags , str ):
23412341 raise WrongInputError ("tags parameter must be string." )
23422342
2343- if file_url :
2344- if not isinstance (file_url , str ):
2343+ if url_string :
2344+ if not isinstance (url_string , str ):
23452345 raise WrongInputError ("file_url parameter must be string." )
2346- if not file_url .startswith (("http://" , "https://" )):
2346+ if not url_string .startswith (("http://" , "https://" )):
23472347 raise WrongInputError ("Supported file_url protocols are HTTP and HTTPS." )
23482348
23492349 if crawler and crawler not in ("cloud" , "local" ):
@@ -2417,8 +2417,8 @@ def __create_post_payload(custom_filename=None, file_url=None, crawler=None, ar
24172417 if cloud_analysis :
24182418 data ["analysis" ] = "cloud"
24192419
2420- if file_url :
2421- data ["url" ] = file_url
2420+ if url_string :
2421+ data ["url" ] = url_string
24222422
24232423 if classification :
24242424 data ['classification' ] = classification
0 commit comments