You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
E:\Python-projects\tap4-ai-crawler\venv\Scripts\python.exe E:\Python-projects\tap4-ai-crawler\main_api.py
None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.
Traceback (most recent call last):
File "E:\Python-projects\tap4-ai-crawler\main_api.py", line 10, in
from website_crawler import WebsitCrawler
File "E:\Python-projects\tap4-ai-crawler\website_crawler.py", line 8, in
from util.llm_util import LLMUtil
File "E:\Python-projects\tap4-ai-crawler\util\llm_util.py", line 18, in
tokenizer = LlamaTokenizer.from_pretrained("huggyllama/llama-65b")
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\transformers\tokenization_utils_base.py", line 1951, in from_pretrained
resolved_config_file = cached_file(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\transformers\utils\hub.py", line 403, in cached_file
resolved_file = hf_hub_download(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\utils_validators.py", line 114, in _inner_fn
return fn(*args, **kwargs)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\file_download.py", line 862, in hf_hub_download
return _hf_hub_download_to_cache_dir(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\file_download.py", line 925, in _hf_hub_download_to_cache_dir
(url_to_download, etag, commit_hash, expected_size, head_call_error) = _get_metadata_or_catch_error(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\file_download.py", line 1376, in _get_metadata_or_catch_error
metadata = get_hf_file_metadata(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\utils_validators.py", line 114, in _inner_fn
return fn(*args, **kwargs)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\file_download.py", line 1296, in get_hf_file_metadata
r = _request_wrapper(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\file_download.py", line 277, in _request_wrapper
response = _request_wrapper(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\file_download.py", line 300, in _request_wrapper
response = get_session().request(method=method, url=url, **params)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\requests\sessions.py", line 529, in request
resp = self.send(prep, **send_kwargs)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\requests\sessions.py", line 645, in send
r = adapter.send(request, **kwargs)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\utils_http.py", line 93, in send
return super().send(request, *args, **kwargs)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\requests\adapters.py", line 440, in send
resp = conn.urlopen(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\urllib3\connectionpool.py", line 696, in urlopen
self._prepare_proxy(conn)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\urllib3\connectionpool.py", line 964, in _prepare_proxy
conn.connect()
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\urllib3\connection.py", line 359, in connect
conn = self._connect_tls_proxy(hostname, conn)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\urllib3\connection.py", line 500, in connect_tls_proxy
return ssl_wrap_socket(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\urllib3\util\ssl.py", line 453, in ssl_wrap_socket
ssl_sock = ssl_wrap_socket_impl(sock, context, tls_in_tls)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\urllib3\util\ssl.py", line 495, in _ssl_wrap_socket_impl
return ssl_context.wrap_socket(sock)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 512, in wrap_socket
return self.sslsocket_class._create(
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 1027, in _create
raise ValueError("check_hostname requires server_hostname")
ValueError: check_hostname requires server_hostname
Process finished with exit code 1
The text was updated successfully, but these errors were encountered:
E:\Python-projects\tap4-ai-crawler\venv\Scripts\python.exe E:\Python-projects\tap4-ai-crawler\main_api.py
None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.
Traceback (most recent call last):
File "E:\Python-projects\tap4-ai-crawler\main_api.py", line 10, in
from website_crawler import WebsitCrawler
File "E:\Python-projects\tap4-ai-crawler\website_crawler.py", line 8, in
from util.llm_util import LLMUtil
File "E:\Python-projects\tap4-ai-crawler\util\llm_util.py", line 18, in
tokenizer = LlamaTokenizer.from_pretrained("huggyllama/llama-65b")
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\transformers\tokenization_utils_base.py", line 1951, in from_pretrained
resolved_config_file = cached_file(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\transformers\utils\hub.py", line 403, in cached_file
resolved_file = hf_hub_download(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\utils_validators.py", line 114, in _inner_fn
return fn(*args, **kwargs)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\file_download.py", line 862, in hf_hub_download
return _hf_hub_download_to_cache_dir(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\file_download.py", line 925, in _hf_hub_download_to_cache_dir
(url_to_download, etag, commit_hash, expected_size, head_call_error) = _get_metadata_or_catch_error(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\file_download.py", line 1376, in _get_metadata_or_catch_error
metadata = get_hf_file_metadata(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\utils_validators.py", line 114, in _inner_fn
return fn(*args, **kwargs)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\file_download.py", line 1296, in get_hf_file_metadata
r = _request_wrapper(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\file_download.py", line 277, in _request_wrapper
response = _request_wrapper(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\file_download.py", line 300, in _request_wrapper
response = get_session().request(method=method, url=url, **params)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\requests\sessions.py", line 529, in request
resp = self.send(prep, **send_kwargs)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\requests\sessions.py", line 645, in send
r = adapter.send(request, **kwargs)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\huggingface_hub\utils_http.py", line 93, in send
return super().send(request, *args, **kwargs)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\requests\adapters.py", line 440, in send
resp = conn.urlopen(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\urllib3\connectionpool.py", line 696, in urlopen
self._prepare_proxy(conn)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\urllib3\connectionpool.py", line 964, in _prepare_proxy
conn.connect()
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\urllib3\connection.py", line 359, in connect
conn = self._connect_tls_proxy(hostname, conn)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\urllib3\connection.py", line 500, in connect_tls_proxy
return ssl_wrap_socket(
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\urllib3\util\ssl.py", line 453, in ssl_wrap_socket
ssl_sock = ssl_wrap_socket_impl(sock, context, tls_in_tls)
File "E:\Python-projects\tap4-ai-crawler\venv\lib\site-packages\urllib3\util\ssl.py", line 495, in _ssl_wrap_socket_impl
return ssl_context.wrap_socket(sock)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 512, in wrap_socket
return self.sslsocket_class._create(
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 1027, in _create
raise ValueError("check_hostname requires server_hostname")
ValueError: check_hostname requires server_hostname
Process finished with exit code 1
The text was updated successfully, but these errors were encountered: