Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged

vc ssr #1208

Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
fix create_commit login (#1210)
  • Loading branch information
tastelikefeet authored Feb 6, 2025
commit 1cf7f4ff525e8f711d31462986069410fb6023a6
91 changes: 52 additions & 39 deletions modelscope/hub/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -497,7 +497,7 @@ def list_models(self,
raise_for_http_status(r)
return None

def _check_cookie(self, use_cookies: Union[bool, CookieJar] = False) -> CookieJar: # noqa
def _check_cookie(self, use_cookies: Union[bool, CookieJar] = False) -> CookieJar: # noqa
cookies = None
if isinstance(use_cookies, CookieJar):
cookies = use_cookies
Expand Down Expand Up @@ -1212,10 +1212,7 @@ def create_repo(
if not repo_id:
raise ValueError('Repo id cannot be empty!')

if token:
self.login(access_token=token)
else:
logger.warning('No token provided, will use the cached token.')
self.login(access_token=token)

repo_id_list = repo_id.split('/')
if len(repo_id_list) != 2:
Expand Down Expand Up @@ -1287,8 +1284,7 @@ def create_commit(
commit_message = commit_message or f'Commit to {repo_id}'
commit_description = commit_description or ''

if token:
self.login(access_token=token)
self.login(access_token=token)

# Construct payload
payload = self._prepare_commit_payload(
Expand Down Expand Up @@ -1361,8 +1357,7 @@ def upload_file(
repo_type=repo_type,
)

if token:
self.login(access_token=token)
self.login(access_token=token)

commit_message = (
commit_message if commit_message is not None else f'Upload {path_in_repo} to ModelScope hub'
Expand Down Expand Up @@ -1414,7 +1409,7 @@ def upload_folder(
self,
*,
repo_id: str,
folder_path: Union[str, Path],
folder_path: Union[str, Path, List[str], List[Path]] = None,
path_in_repo: Optional[str] = '',
commit_message: Optional[str] = None,
commit_description: Optional[str] = None,
Expand All @@ -1423,41 +1418,38 @@ def upload_folder(
allow_patterns: Optional[Union[List[str], str]] = None,
ignore_patterns: Optional[Union[List[str], str]] = None,
max_workers: int = DEFAULT_MAX_WORKERS,
revision: Optional[str] = DEFAULT_REPOSITORY_REVISION,
) -> CommitInfo:

if repo_type not in REPO_TYPE_SUPPORT:
raise ValueError(f'Invalid repo type: {repo_type}, supported repos: {REPO_TYPE_SUPPORT}')

allow_patterns = allow_patterns if allow_patterns else None
ignore_patterns = ignore_patterns if ignore_patterns else None

self.upload_checker.check_folder(folder_path)

# Ignore .git folder
if ignore_patterns is None:
ignore_patterns = []
elif isinstance(ignore_patterns, str):
ignore_patterns = [ignore_patterns]
ignore_patterns += DEFAULT_IGNORE_PATTERNS

if token:
self.login(access_token=token)
self.login(access_token=token)

commit_message = (
commit_message if commit_message is not None else f'Upload folder to {repo_id} on ModelScope hub'
commit_message if commit_message is not None else f'Upload to {repo_id} on ModelScope hub'
)
commit_description = commit_description or 'Uploading folder'
commit_description = commit_description or 'Uploading files'

# Get the list of files to upload, e.g. [('data/abc.png', '/path/to/abc.png'), ...]
prepared_repo_objects = HubApi._prepare_upload_folder(
folder_path=folder_path,
prepared_repo_objects = self._prepare_upload_folder(
folder_path_or_files=folder_path,
path_in_repo=path_in_repo,
allow_patterns=allow_patterns,
ignore_patterns=ignore_patterns,
)

self.upload_checker.check_normal_files(
file_path_list = [item for _, item in prepared_repo_objects],
file_path_list=[item for _, item in prepared_repo_objects],
repo_type=repo_type,
)

Expand Down Expand Up @@ -1526,6 +1518,7 @@ def _upload_items(item_pair, **kwargs):
commit_description=commit_description,
token=token,
repo_type=repo_type,
revision=revision,
)

return commit_info
Expand Down Expand Up @@ -1668,7 +1661,7 @@ def _validate_blob(
resp = response.json()
raise_on_error(resp)

upload_objects = [] # list of objects to upload, [{'url': 'xxx', 'oid': 'xxx'}, ...]
upload_objects = [] # list of objects to upload, [{'url': 'xxx', 'oid': 'xxx'}, ...]
resp_objects = resp['Data']['objects']
for obj in resp_objects:
upload_objects.append(
Expand All @@ -1678,24 +1671,44 @@ def _validate_blob(

return upload_objects

@staticmethod
def _prepare_upload_folder(
folder_path: Union[str, Path],
path_in_repo: str,
allow_patterns: Optional[Union[List[str], str]] = None,
ignore_patterns: Optional[Union[List[str], str]] = None,
self,
folder_path_or_files: Union[str, Path, List[str], List[Path]],
path_in_repo: str,
allow_patterns: Optional[Union[List[str], str]] = None,
ignore_patterns: Optional[Union[List[str], str]] = None,
) -> List[Union[tuple, list]]:

folder_path = Path(folder_path).expanduser().resolve()
if not folder_path.is_dir():
raise ValueError(f"Provided path: '{folder_path}' is not a directory")

# List files from folder
relpath_to_abspath = {
path.relative_to(folder_path).as_posix(): path
for path in sorted(folder_path.glob('**/*')) # sorted to be deterministic
if path.is_file()
}
folder_path = None
files_path = None
if isinstance(folder_path_or_files, list):
if os.path.isfile(folder_path_or_files[0]):
files_path = folder_path_or_files
else:
raise ValueError('Uploading multiple folders is not supported now.')
else:
if os.path.isfile(folder_path_or_files):
files_path = [folder_path_or_files]
else:
folder_path = folder_path_or_files

if files_path is None:
self.upload_checker.check_folder(folder_path)
folder_path = Path(folder_path).expanduser().resolve()
if not folder_path.is_dir():
raise ValueError(f"Provided path: '{folder_path}' is not a directory")

# List files from folder
relpath_to_abspath = {
path.relative_to(folder_path).as_posix(): path
for path in sorted(folder_path.glob('**/*')) # sorted to be deterministic
if path.is_file()
}
else:
relpath_to_abspath = {}
for path in files_path:
if os.path.isfile(path):
self.upload_checker.check_file(path)
relpath_to_abspath[os.path.basename(path)] = path

# Filter files
filtered_repo_objects = list(
Expand Down Expand Up @@ -2004,5 +2017,5 @@ def check_normal_files(self, file_path_list: List[Union[str, Path]], repo_type:
total_size = sum([get_file_size(item) for item in normal_file_list])

if total_size > self.normal_file_size_total_limit:
raise ValueError(f'Total size of non-lfs files {total_size/(1024 * 1024)}MB '
f'and exceeds limit: {self.normal_file_size_total_limit/(1024 * 1024)}MB')
raise ValueError(f'Total size of non-lfs files {total_size / (1024 * 1024)}MB '
f'and exceeds limit: {self.normal_file_size_total_limit / (1024 * 1024)}MB')
26 changes: 6 additions & 20 deletions modelscope/utils/hf_util/patcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -466,30 +466,16 @@ def create_commit(
if any(['Add' not in op.__class__.__name__ for op in operations]):
raise ValueError(
'ModelScope create_commit only support Add operation for now.')
ms_operations = []
for op in operations:
_op = CommitOperationAdd(
path_in_repo=op.path_in_repo,
path_or_fileobj=op.path_or_fileobj)
_op._upload_mode = op._upload_mode
if any([
re.search(pattern, _op.path_in_repo or _op.path_or_fileobj)
is not None for pattern in ignore_file_pattern
]):
_op._upload_mode = 'lfs'
else:
_op._upload_mode = 'normal'
ms_operations.append(_op)
operations = ms_operations
return api.create_commit(
repo_id,
operations,

all_files = [op.path_or_fileobj for op in operations]
api.upload_folder(
repo_id=repo_id,
folder_path=all_files,
commit_message=commit_message,
commit_description=commit_description,
token=token,
repo_type=repo_type,
revision=revision,
)
repo_type=repo_type or 'model')

# Patch repocard.validate
from huggingface_hub import repocard
Expand Down
1 change: 1 addition & 0 deletions tests/utils/test_hf_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,7 @@ def test_who_am_i(self):
from huggingface_hub import whoami
self.assertTrue(whoami()['name'] == self.user)

@unittest.skipUnless(test_level() >= 1, 'skip test in current test level')
def test_push_to_hub(self):
with patch_context():
from transformers import AutoModelForCausalLM
Expand Down
Loading