Skip to content

Commit

Permalink
Added --meta-override optional flag for backup import command
Browse files Browse the repository at this point in the history
  • Loading branch information
Fallen-Breath committed Jan 4, 2024
1 parent 2cebc06 commit af5f2d5
Show file tree
Hide file tree
Showing 11 changed files with 128 additions and 23 deletions.
9 changes: 8 additions & 1 deletion lang/en_us.yml
Original file line number Diff line number Diff line change
Expand Up @@ -373,6 +373,7 @@ prime_backup:
§3<backup_format>§r: Available options: {backup_formats}. If not specified, try inferring from the file name
§d[Optional flags]§r
§7--auto-meta§r: If the backup metadata file does not exist, create an auto-generated one based on the file content
§7--meta-override §e<meta_json>§r: An optional json object string. It overrides the metadata of the imported backup, regardless of whether the backup metadata file exists or not
§d[Examples]§r
§7{prefix} import /path/to/the/backup.tar.gz§r
§7{prefix} import /path/to/a/tarball/foo.bar tar§r
Expand Down Expand Up @@ -451,6 +452,11 @@ prime_backup:
bad_date: Bad input date
bad_id_range: Bad ID Range
bad_hex_string: Bad hex string
invalid_json:
empty: Empty input
prefix: '"{{" prefix not found'
suffix: '"}}" suffix not found'
value: 'Json decode error: {}'
permission_denied: Permission denied
too_much_ongoing_task:
exclusive: 'Please wait for the ongoing task {} to finish'
Expand Down Expand Up @@ -500,9 +506,10 @@ prime_backup:
player: Player {}
console: Console
command_source: Command source {}
unknown: '(Unknown) {}'
unknown: Unknown
prime_backup:
.: Prime Backup
import: Import
test: Test
scheduled_backup: Scheduled backup
pre_restore: Automatic backup before restore
Expand Down
13 changes: 10 additions & 3 deletions lang/zh_cn.yml
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ prime_backup:
§7--overwrite§r: 覆盖已存在的备份导出文件。默认情况下,若输出文件已存在则不导出
§7--fail-soft§r: 在导出过程中跳过导出失败的文件,因此单个文件的失败不会导致整个导出的失败。注意: 损坏的文件可能会破坏tar一类的导出文件
§7--no-verify§r: 不校验导出文件的内容
§7--no-meta§r: 在导出的文件中不添加备份元信息数据文件{backup_meta_file_name}
§7--no-meta§r: 在导出的文件中不添加备份元信息文件{backup_meta_file_name}
§d【例子】§r
§7{prefix} export 12§r: 使用默认的§3tar§r格式导出备份§612§r
§7{prefix} export 12 tar_gz§r: 使用§3tar_gz§r格式导出备份§612§r
Expand All @@ -372,7 +372,8 @@ prime_backup:
如果路径中含有空格字符,你需要把整个路径用英文双引号包起来
§3<备份格式>§r: 可用选项: {backup_formats}。若未指定,则尝试从文件名推断
§d【可选参数】§r
§7--auto-meta§r: 若备份元信息数据文件不存在,基于文件内容自动生成一个
§7--auto-meta§r: 若备份元信息文件不存在,基于文件内容自动生成一个
§7--meta-override §e<备份元信息json>§r: 一个json对象字符串。若给定,无论备份元信息文件是否存在,都会用给定的值作为导入的备份的元数据内容
§d【例子】§r
§7{prefix} import /path/to/the/backup.tar.gz§r
§7{prefix} import /path/to/a/tarball/foo.bar tar§r
Expand Down Expand Up @@ -451,6 +452,11 @@ prime_backup:
bad_date: 非法日期格式
bad_id_range: 非法ID范围
bad_hex_string: 非法的十六进制字符串
invalid_json:
empty: 空输入
prefix: 未找到"{{"前缀
suffix: 未找到"}}"后缀
value: 'Json解析失败: {}'
permission_denied: 权限不足
too_much_ongoing_task:
exclusive: '请等待当前任务{}完成'
Expand Down Expand Up @@ -500,9 +506,10 @@ prime_backup:
player: 玩家{}
console: 控制台
command_source: 指令源{}
unknown: '(未知) {}'
unknown: 未知
prime_backup:
.: Prime Backup
import: 导入
test: 测试
scheduled_backup: 定时备份
pre_restore: 回档前的自动备份
Expand Down
24 changes: 20 additions & 4 deletions prime_backup/action/import_backup_action.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from prime_backup.exceptions import PrimeBackupError
from prime_backup.types.backup_info import BackupInfo
from prime_backup.types.backup_meta import BackupMeta
from prime_backup.types.operator import Operator, PrimeBackupOperatorNames
from prime_backup.types.standalone_backup_format import StandaloneBackupFormat
from prime_backup.types.tar_format import TarFormat
from prime_backup.types.units import ByteCount
Expand All @@ -37,6 +38,10 @@ class BackupMetadataNotFound(PrimeBackupError):
pass


class BackupMetadataInvalid(PrimeBackupError):
pass


class _FileDescription(NamedTuple):
blob: Optional[schema.Blob]
hash: str
Expand Down Expand Up @@ -271,7 +276,10 @@ def open_file(self, path: Path) -> ContextManager[ZipFileHolder]:


class ImportBackupAction(CreateBackupActionBase):
def __init__(self, file_path: Path, backup_format: Optional[StandaloneBackupFormat] = None, *, ensure_meta: bool = True):
def __init__(
self, file_path: Path, backup_format: Optional[StandaloneBackupFormat] = None, *,
ensure_meta: bool = True, meta_override: Optional[dict] = None,
):
super().__init__()

if backup_format is None:
Expand All @@ -282,6 +290,7 @@ def __init__(self, file_path: Path, backup_format: Optional[StandaloneBackupForm
self.file_path = file_path
self.backup_format = backup_format
self.ensure_meta = ensure_meta
self.meta_override = meta_override

self.__blob_cache: Dict[str, schema.Blob] = {}

Expand Down Expand Up @@ -351,15 +360,20 @@ def __import_member(
def __import_packed_backup_file(self, session: DbSession, file_holder: PackedBackupFileHandler.FileHolder) -> schema.Backup:
meta: Optional[BackupMeta] = None

if (meta_obj := file_holder.get_member(BACKUP_META_FILE_NAME)) is not None:
if self.meta_override is not None:
try:
meta = BackupMeta.from_dict(self.meta_override)
except Exception as e:
self.logger.error('Read backup meta from meta_override {!r} failed: {}'.format(self.meta_override, e))
raise BackupMetadataInvalid(e)
elif (meta_obj := file_holder.get_member(BACKUP_META_FILE_NAME)) is not None:
with meta_obj.open() as meta_reader:
try:
meta_dict = json.load(meta_reader)
meta = BackupMeta.from_dict(meta_dict)
except Exception as e:
self.logger.error('Read backup meta from {!r} failed: {}'.format(BACKUP_META_FILE_NAME, e))
if self.ensure_meta:
raise BackupMetadataNotFound(e)
raise BackupMetadataInvalid(e)
else:
self.logger.info('Read backup meta from {!r} ok'.format(BACKUP_META_FILE_NAME))
else:
Expand Down Expand Up @@ -387,6 +401,8 @@ def __import_packed_backup_file(self, session: DbSession, file_holder: PackedBac
self.logger.warning('Found extra files inside {!r}: {}. They are not included in the targets {}'.format(
self.file_path.name, extra_files, meta.targets,
))
if meta.creator == str(Operator.unknown()):
meta.creator = str(Operator.pb(PrimeBackupOperatorNames.import_))

backup = session.create_backup(**meta.to_backup_kwargs())

Expand Down
16 changes: 15 additions & 1 deletion prime_backup/cli/entrypoint.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import argparse
import enum
import json
import shutil
import sys
from pathlib import Path
Expand Down Expand Up @@ -128,9 +129,21 @@ def cmd_import(self):
fmt = self.get_ebf(input_path)
self.init_environment()

if self.args.meta_override is not None:
try:
meta_override = json.loads(self.args.meta_override)
except ValueError as e:
logger.error('Bad json {!r}: {}'.format(self.args.meta_override, e))
sys.exit(1)
if not isinstance(meta_override, dict):
logger.error('meta_override should be a dict, but found {}: {!r}'.format(type(meta_override), meta_override))
sys.exit(1)
else:
meta_override = None

logger.info('Importing backup from {}, format: {}'.format(str(input_path.as_posix()), fmt.name))
try:
ImportBackupAction(input_path, fmt, ensure_meta=not self.args.auto_meta).run()
ImportBackupAction(input_path, fmt, ensure_meta=not self.args.auto_meta, meta_override=meta_override).run()
except BackupMetadataNotFound as e:
logger.error('Import failed due to backup metadata not found: {}'.format(e))
logger.error('Please make sure the file is a valid backup create by Prime Backup. You can also use the --auto-meta flag for a workaround')
Expand Down Expand Up @@ -211,6 +224,7 @@ def entrypoint(cls):
parser_import.add_argument('input', help='The file name of the backup to be imported. Example: my_backup.tar')
parser_import.add_argument('-f', '--format', help='The format of the input file. If not given, attempt to infer from the input file name. Options: {}'.format(enum_options(StandaloneBackupFormat)))
parser_import.add_argument('--auto-meta', action='store_true', help='If the backup metadata file does not exist, create an auto-generated one based on the file content')
parser_import.add_argument('--meta-override', help='An optional json object string. It overrides the metadata of the imported backup, regardless of whether the backup metadata file exists or not')

desc = 'Export the given backup to a single file'
parser_export = subparsers.add_parser('export', help=desc, description=desc)
Expand Down
6 changes: 4 additions & 2 deletions prime_backup/mcdr/command/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from prime_backup.compressors import CompressMethod
from prime_backup.config.config import Config
from prime_backup.mcdr.command.backup_id_suggestor import BackupIdSuggestor
from prime_backup.mcdr.command.nodes import DateNode, IdRangeNode, MultiIntegerNode, HexStringNode
from prime_backup.mcdr.command.nodes import DateNode, IdRangeNode, MultiIntegerNode, HexStringNode, JsonObjectNode
from prime_backup.mcdr.crontab_job import CrontabJobEvent, CrontabJobId
from prime_backup.mcdr.crontab_manager import CrontabManager
from prime_backup.mcdr.task.backup.create_backup_task import CreateBackupTask
Expand Down Expand Up @@ -176,7 +176,8 @@ def cmd_import(self, source: CommandSource, context: CommandContext):
file_path = Path(context['file_path'])
backup_format = context.get('backup_format')
ensure_meta = context.get('auto_meta', 0) == 0
self.task_manager.add_task(ImportBackupTask(source, file_path, backup_format, ensure_meta=ensure_meta))
meta_override = context.get('meta_override')
self.task_manager.add_task(ImportBackupTask(source, file_path, backup_format, ensure_meta=ensure_meta, meta_override=meta_override))

def cmd_crontab_show(self, source: CommandSource, context: CommandContext):
job_id = context.get('job_id')
Expand Down Expand Up @@ -372,6 +373,7 @@ def make_import_cmd() -> Literal:
node_fp.then(node_bf)
for node in [node_fp, node_bf]:
node.then(CountingLiteral('--auto-meta', 'auto_meta').redirects(node))
node.then(Literal('--meta-override').then(JsonObjectNode('meta_override').redirects(node)))
node.runs(self.cmd_import)
return node_sc

Expand Down
42 changes: 42 additions & 0 deletions prime_backup/mcdr/command/nodes.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import json
import re
from typing import NamedTuple, Optional

Expand Down Expand Up @@ -66,3 +67,44 @@ def parse(self, text: str) -> ParseResult:
if not self.__pattern.fullmatch(h):
raise IllegalArgument(tr('error.node.bad_hex_string'), result.char_read)
return ParseResult(h, result.char_read)


class InvalidJson(IllegalArgument):
pass


class JsonObjectNode(ArgumentNode):
def parse(self, text: str) -> ParseResult:
if len(text) == 0:
raise InvalidJson(tr('error.node.invalid_json.empty'), 0)
if text[0] != '{':
raise InvalidJson(tr('error.node.invalid_json.prefix'), 1)

in_string = False
is_escape = False
level = 0
for i, c in enumerate(text):
if in_string:
if is_escape:
is_escape = False
elif c == '\\':
is_escape = True
elif c == '"':
in_string = False
else:
if c == '"':
in_string = True
elif c == '{':
level += 1
elif c == '}':
level -= 1
if level == 0:
n = i + 1
try:
data = json.loads(text[:n])
except ValueError as e:
raise InvalidJson(tr('error.node.invalid_json.value', e), n)
else:
return ParseResult(data, n)

raise InvalidJson(tr('error.node.invalid_json.suffix'), len(text))
8 changes: 6 additions & 2 deletions prime_backup/mcdr/task/backup/import_backup_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,15 @@


class ImportBackupTask(HeavyTask[None]):
def __init__(self, source: CommandSource, file_path: Path, backup_format: Optional[StandaloneBackupFormat] = None, *, ensure_meta: bool):
def __init__(
self, source: CommandSource, file_path: Path, backup_format: Optional[StandaloneBackupFormat] = None, *,
ensure_meta: bool = True, meta_override: Optional[dict] = None,
):
super().__init__(source)
self.file_path = file_path
self.backup_format = backup_format
self.ensure_meta = ensure_meta
self.meta_override = meta_override

@property
def id(self) -> str:
Expand All @@ -39,7 +43,7 @@ def run(self) -> None:

self.reply_tr('start', t_fp, RText(backup_format.name, RColor.dark_aqua))
try:
backup = self.run_action(ImportBackupAction(self.file_path, backup_format, ensure_meta=self.ensure_meta))
backup = self.run_action(ImportBackupAction(self.file_path, backup_format, ensure_meta=self.ensure_meta, meta_override=self.meta_override))
except BackupMetadataNotFound as e:
self.reply(self.tr('backup_metadata_not_found', t_fp, str(e)).set_color(RColor.red))
self.reply_tr('backup_metadata_not_found.suggestion', name=mcdr_globals.metadata.name)
Expand Down
3 changes: 2 additions & 1 deletion prime_backup/mcdr/task/backup/prune_backup_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from prime_backup.types.backup_filter import BackupFilter
from prime_backup.types.backup_info import BackupInfo
from prime_backup.types.blob_info import BlobListSummary
from prime_backup.types.operator import PrimeBackupOperatorNames
from prime_backup.types.units import ByteCount
from prime_backup.utils import misc_utils, log_utils

Expand Down Expand Up @@ -320,7 +321,7 @@ def add(dt: datetime.datetime):
from prime_backup.types.operator import Operator
backups.append(BackupInfo(
id=id_counter, timestamp_ns=int(dt.timestamp() * 1e9),
creator=Operator.pb('test'), comment='', targets=[], tags=BackupTags(), raw_size=0, stored_size=0,
creator=Operator.pb(PrimeBackupOperatorNames.test), comment='', targets=[], tags=BackupTags(), raw_size=0, stored_size=0,
files=[],
))

Expand Down
4 changes: 2 additions & 2 deletions prime_backup/mcdr/text_components.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,9 +268,9 @@ def number_list(cls, values: Iterable[Any]) -> RTextBase:
@classmethod
def operator(cls, op: Operator) -> RTextBase:
tr_key = f'operator.{op.type}'
if op.type in ['player', 'command_source', 'unknown']:
if op.type in ['player', 'command_source']:
return cls.tr(tr_key, op.name)
elif op.type in ['console']:
elif op.type in ['console', 'unknown']:
return cls.tr(tr_key)
elif op.type == constants.PLUGIN_ID:
from prime_backup.mcdr import mcdr_globals
Expand Down
2 changes: 1 addition & 1 deletion prime_backup/types/backup_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@


class BackupMeta(Serializable):
creator: str = str(Operator.pb('import'))
creator: str = str(Operator.unknown())
comment: str = ''
timestamp_ns: int
targets: List[str] = []
Expand Down
24 changes: 18 additions & 6 deletions prime_backup/types/operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,31 @@
from mcdreforged.api.all import CommandSource, RTextBase


class _PrimeBackupOperatorName(str):
pass


class PrimeBackupOperatorNames:
pre_restore = 'pre_restore'
scheduled_backup = 'scheduled_backup'
test = 'test'
"""
For :meth:`prime_backup.types.operator.Operator.pb`
"""
import_ = _PrimeBackupOperatorName('import')
pre_restore = _PrimeBackupOperatorName('pre_restore')
scheduled_backup = _PrimeBackupOperatorName('scheduled_backup')
test = _PrimeBackupOperatorName('test')


class Operator(NamedTuple):
type: str
name: str

@classmethod
def pb(cls, what: str) -> 'Operator':
return Operator(constants.PLUGIN_ID, what)
def unknown(cls) -> 'Operator':
return Operator('unknown', '')

@classmethod
def pb(cls, pb_op_name: _PrimeBackupOperatorName) -> 'Operator':
return Operator(constants.PLUGIN_ID, str(pb_op_name))

@classmethod
def player(cls, name: str) -> 'Operator':
Expand All @@ -44,7 +56,7 @@ def of(cls, value: Union[str, 'CommandSource']) -> 'Operator':
t, n = value.split(':', 1)
return Operator(type=t, name=n)
else:
return Operator(type='unknown', name=value)
return Operator(type=value, name='')
else:
raise TypeError(value)

Expand Down

0 comments on commit af5f2d5

Please sign in to comment.