You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Marks below specific file as "retry later" and eventually comes to the below status.. all other crawls seem to get done ok. because of this stopping here, doesnt even get around to downloading. did take the recommendation of restricting workers but that didnt help either.
2020-04-08 19:10:26,601 - 3 - [Failed] priority=C, ttl=3. crawl_supplement: course=decision-making, supplement=types-of-analytics
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/dl_coursera/lib/TaskScheduler.py", line 167, in _func_work
task.run()
File "/usr/local/lib/python3.7/site-packages/dl_coursera/lib/TaskScheduler.py", line 44, in run
self.go()
File "/usr/local/lib/python3.7/site-packages/dl_coursera/lib/TaskScheduler.py", line 75, in go
self._func(**self._kwargs)
File "/usr/local/lib/python3.7/site-packages/dl_coursera/Crawler.py", line 264, in crawl_supplement
assets += crawl_assets(assetIDs)
File "/usr/local/lib/python3.7/site-packages/dl_coursera/Crawler.py", line 287, in crawl_assets
assert len(assets) == len(ids)
AssertionError
Traceback (most recent call last):
File "/usr/local/bin/dl_coursera", line 8, in
sys.exit(main())
File "/usr/local/lib/python3.7/site-packages/dl_coursera_run.py", line 178, in main
soc = crawl(args['cookies'], args['slug'], args['isSpec'], args['outdir'], args['n_worker'])
File "/usr/local/lib/python3.7/site-packages/dl_coursera_run.py", line 71, in crawl
soc = crawler.crawl(slug=slug, isSpec=isSpec)
File "/usr/local/lib/python3.7/site-packages/dl_coursera/Crawler.py", line 313, in crawl
assert len(failures) == 0
AssertionError
The text was updated successfully, but these errors were encountered:
Marks below specific file as "retry later" and eventually comes to the below status.. all other crawls seem to get done ok. because of this stopping here, doesnt even get around to downloading. did take the recommendation of restricting workers but that didnt help either.
2020-04-08 19:10:26,601 - 3 - [Failed] priority=C, ttl=3. crawl_supplement: course=decision-making, supplement=types-of-analytics
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/dl_coursera/lib/TaskScheduler.py", line 167, in _func_work
task.run()
File "/usr/local/lib/python3.7/site-packages/dl_coursera/lib/TaskScheduler.py", line 44, in run
self.go()
File "/usr/local/lib/python3.7/site-packages/dl_coursera/lib/TaskScheduler.py", line 75, in go
self._func(**self._kwargs)
File "/usr/local/lib/python3.7/site-packages/dl_coursera/Crawler.py", line 264, in crawl_supplement
assets += crawl_assets(assetIDs)
File "/usr/local/lib/python3.7/site-packages/dl_coursera/Crawler.py", line 287, in crawl_assets
assert len(assets) == len(ids)
AssertionError
Traceback (most recent call last):
File "/usr/local/bin/dl_coursera", line 8, in
sys.exit(main())
File "/usr/local/lib/python3.7/site-packages/dl_coursera_run.py", line 178, in main
soc = crawl(args['cookies'], args['slug'], args['isSpec'], args['outdir'], args['n_worker'])
File "/usr/local/lib/python3.7/site-packages/dl_coursera_run.py", line 71, in crawl
soc = crawler.crawl(slug=slug, isSpec=isSpec)
File "/usr/local/lib/python3.7/site-packages/dl_coursera/Crawler.py", line 313, in crawl
assert len(failures) == 0
AssertionError
The text was updated successfully, but these errors were encountered: