logo
57
7
WeChat Login

请求增加一个通过comfyui manager的custom-node-list.json和extension-node-map.json直接读取工作流所需节点的脚本#3

Open
created 2025-05-03
Edit

8ddaed51-8912-4b11-84aa-e985ed63e566.txt
我让ai写了个雏形
import json
import argparse
import os
import sys
import pathlib
import re
from collections import OrderedDict
import urllib.parse

--- 配置 ---

MANAGER_RELATIVE_PATH = "custom_nodes/ComfyUI-Manager"
CUSTOM_NODE_LIST_FILE = "custom-node-list.json"
EXTENSION_NODE_MAP_FILE = "extension-node-map.json"
NODE_INSTALL_BASE_PATH_BUILDER = "/opt/ComfyUI/custom_nodes"
NODE_INSTALL_BASE_PATH_FINAL = "/app/ComfyUI/custom_nodes"
COMFYUI_BASE_PATH_FINAL = "/app/ComfyUI"
CORE_GENERIC_IDS = {"comfy-core"} # 通用的核心标识符

--- 核心节点类名列表 (基于 nodes.py 及常见核心功能) ---

这个列表可以根据需要扩展

CORE_NODE_CLASS_NAMES = {
# 加载器 Loaders
"CheckpointLoader", "CheckpointLoaderSimple", "VAELoader", "CLIPLoader", "LoraLoader",
"ControlNetLoader", "DiffControlNetLoader", "StyleModelLoader", "UpscaleModelLoader",
"CLIPVisionLoader", "GLIGENLoader", "UNETLoader", "DualCLIPLoader", "HypernetworkLoader",
# 采样器 Samplers & Scheduling
"KSampler", "KSamplerAdvanced", "SamplerCustom", "KSamplerSDTurbo", # KSamplerCycle (可能来自外部)
"SamplerDPMAdaptative", "SamplerDPMPP_2M", "SamplerDPMPP_2S_Ancestral",
"SamplerDPMPP_SDE", "SamplerEuler", "SamplerEulerAncestral", "SamplerLMS",
"SamplerDDIM", "SamplerUniPC", "ConditioningSetTimestepRange", "TimestampConditioning",
# CLIP / 文本编码
"CLIPTextEncode", "CLIPSetLastLayer", "DualCLIPTextEncode", "CLIPAttention",
"CLIPVisionEncode", "GetTextTokens", "TokenMerge", "TokenWeight", "ConditioningCombine",
"ConditioningAverage", "ConditioningConcat", "ConditioningSetArea", "ConditioningSetAreaPercentage",
"ConditioningSetAreaStrength", "ConditioningSetMask", "ConditioningSetMaskAndCombine",
# VAE 操作
"VAEEncode", "VAEDecode", "VAEEncodeForInpaint", "VAEEncodeTiled", "VAEDecodeTiled",
# Latent 操作
"EmptyLatentImage", "LatentUpscale", "LatentUpscaleBy", "LatentRotate", "LatentFlip",
"LatentComposite", "LatentFromBatch", "RepeatLatentBatch", "RescaleLatent", "LatentBlend",
"SetLatentNoiseMask", "LatentCrop",
# 图像操作
"LoadImage", "LoadImageMask", "SaveImage", "PreviewImage", "ImageScale", "ImageScaleBy",
"ImagePadForOutpaint", "ImageInvert", "ImageBatch", "ImageChannelCombine", "ImageChannelSplit",
"ImageBlur", "ImageQuantize", "ImageSharpen", "ImageLevels", "ImageContrast",
"ImageCrop", "ImageFromBatch", "ImageToLatent", "MaskToImage", "ImageBlend", "ImageColorToMask",
"LoadBase64Image", "LoadSVGImage", "SVGToMask",
# ControlNet 操作
"ControlNetApply", "ControlNetApplyAdvanced", "ControlNetApplyAttention",
# 模型操作
"PatchModelAddAttention", "ModelMergeBlocks", "ModelMergeSimple", "ModelMergeUNET",
"CheckpointSave", "FreeU", "FreeU_v2",
# 噪声/Mask 操作
"SaltAndPepperNoise", "SaltNoiseMask", "FeatherMask", "GrowMask", "MaskComposite",
"SolidMask", "ColorToMask", "InvertMask", "CropMask", "MaskFromBatch", "CombineMasks",
"SubtractMask", "BooleanMask", "SolidColorMask",
# 采样逻辑相关
"DifferentialDiffusion", "RefinerSampler", "SamplerMoments", "SamplerState", "CalculateGuider",
# 工具/杂项
"PrimitiveNode", "PrimitiveString", # 注意: PrimitiveNode 也被一些自定义节点用作输入
"Note", "Reroute", "LayerStyleInput",
# 内部或已废弃节点 (可能)
"LatentFromConditioning", "ConditioningFromLatent", "UpscaleImage", "ImageResize",
"NODE_MODE_MUTE", "NODE_MODE_BYPASS", # 模式节点
# 添加更多...
}

--- 默认已知节点映射 (备用) ---

DEFAULT_NODE_MAP = {
"pr-was-node-suite-comfyui-47064894": {"url": "https://github.com/WASasquatch/WAS_Node_Suite.git"},
"was-node-suite-comfyui": {"url": "https://github.com/WASasquatch/WAS_Node_Suite.git"},
"KohakuBlueleaf/PixelOE": {"url": "https://github.com/KohakuBlueleaf/ComfyUI-PixelOE.git"},
"shiimizu/ComfyUI-TiledDiffusion": {"url": "https://github.com/shiimizu/ComfyUI-TiledDiffusion.git"},
}

--- Dockerfile 模板 (保持不变) ---

... (DOCKERFILE_HEADER, DOCKERFILE_INSTALL_LAYER_TEMPLATE, DOCKERFILE_FINAL_STAGE 不变) ...

DOCKERFILE_HEADER = """# 由 workflow_to_dockerfile.py 脚本自动生成

使用多阶段构建来优化最终镜像大小

FROM python:3.11-slim as builder
RUN apt-get update && apt-get install -y --no-install-recommends git curl wget ca-certificates && \
apt-get clean && rm -rf /var/lib/apt/lists/*
WORKDIR {node_base_path_builder}

--- 克隆并尝试安装自定义节点依赖 ---

"""
DOCKERFILE_INSTALL_LAYER_TEMPLATE = """

节点标识: {node_id} (来源: {source}, 匹配键: '{match_key}')

仓库名: {repo_name}, 克隆 URL: {clone_url} {mirror_notice}

RUN echo "Cloning {repo_name} from {original_url}..." && \
git clone --depth 1 {clone_url} ./{repo_name} && \
cd ./{repo_name} && \
echo "Checking dependencies for {repo_name}..." && \
(if [ -f requirements.txt ]; then \
echo "Found requirements.txt, attempting pip install..."; \
pip install -r requirements.txt --no-cache-dir || echo "警告: '{repo_name}' 的 pip install 失败,请检查日志。"; \
else \
echo "No requirements.txt found for {repo_name}."; \
fi) && \
(if [ -f install.py ]; then \
echo "Found install.py, attempting to run..."; \
python install.py || echo "警告: '{repo_name}' 的 python install.py 执行失败,请检查日志。"; \
else \
echo "No install.py found for {repo_name}."; \
fi) && \
echo "Cleaning up .git directory for {repo_name}..." && \
(rm -rf ./.git || echo "无法移除 .git 目录 for {repo_name}") && \
echo "Finished processing {repo_name}."
"""
DOCKERFILE_FINAL_STAGE = """

---- 最终阶段 (Final Stage) ----

FROM comfyui/comfyui
ARG COMFYUI_CUSTOM_NODES_DIR={node_base_path_final}
ARG COMFYUI_APP_DIR={comfyui_base_path_final}
RUN mkdir -p $COMFYUI_CUSTOM_NODES_DIR
COPY --from=builder {node_base_path_builder}/ $COMFYUI_CUSTOM_NODES_DIR/
RUN echo "已安装的自定义节点列表:" && ls -l $COMFYUI_CUSTOM_NODES_DIR || echo "无法列出自定义节点目录。"
WORKDIR $COMFYUI_APP_DIR

CMD ["python", "main.py"]

"""

--- Helper 函数 (保持不变) ---

... (sanitize_repo_name, parse_custom_node_list, parse_extension_node_map, load_user_map, get_potential_identifiers 不变) ...

def sanitize_repo_name(repo_name):
"""更严格地过滤仓库名"""
if not isinstance(repo_name, str): return None
try:
parsed_url = urllib.parse.urlparse(repo_name)
if parsed_url.scheme and parsed_url.netloc:
path_parts = parsed_url.path.split('/')
if len(path_parts) > 1: repo_name = path_parts[-1].replace('.git', '')
except ValueError: pass
repo_name = re.sub(r'^[.-]+', '', repo_name)
repo_name = re.sub(r'[^\w-.]+', '', repo_name); repo_name = re.sub(r'+', '', repo_name)
repo_name = repo_name.strip('
')
if not repo_name: return None
if repo_name.lower() in ['custom_nodes', 'models', 'comfyui', 'lib', 'bin', 'py']: return f"repo_{repo_name}"
return repo_name

def parse_custom_node_list(manager_data_path):
"""解析 custom-node-list.json (再次修正)"""
node_list_path = manager_data_path / CUSTOM_NODE_LIST_FILE
if not node_list_path.exists(): print(f"警告:找不到文件 {CUSTOM_NODE_LIST_FILE}。"); return {}
print(f"正在读取 {CUSTOM_NODE_LIST_FILE}...")
try:
with open(node_list_path, 'r', encoding='utf-8') as f: data = json.load(f)
except Exception as e: print(f"错误:读取或解析 {CUSTOM_NODE_LIST_FILE} 失败:{e}"); return {}
node_map = {}; nodes_to_process = []
if isinstance(data, dict) and 'custom_nodes' in data and isinstance(data['custom_nodes'], list): nodes_to_process = data['custom_nodes']; print(f"从 {CUSTOM_NODE_LIST_FILE} (字典格式) 中找到 {len(nodes_to_process)} 个条目。")
elif isinstance(data, list): nodes_to_process = data; print(f"从 {CUSTOM_NODE_LIST_FILE} (列表格式) 中找到 {len(nodes_to_process)} 个条目。")
else: print(f"错误:无法识别 {CUSTOM_NODE_LIST_FILE} 的格式。"); return {}
count = 0
for node_entry in nodes_to_process:
if not isinstance(node_entry, dict): continue
try:
install_type = node_entry.get('install_type')
if install_type != 'git-clone': continue
identifier = node_entry.get('id') or node_entry.get('dirname')
if not identifier or not isinstance(identifier, str): continue
clean_id = identifier.strip(); git_url = None; reference = node_entry.get('reference')
if isinstance(reference, str) and reference.startswith("https://github.com/"):
parts = reference.split('/')
if len(parts) >= 5 and '.' not in parts[4].split('#')[0].split('?')[0]: git_url = f"https://github.com/{parts[3]}/{parts[4].split('#')[0].split('?')[0]}.git"
if not git_url and isinstance(node_entry.get('files'), list):
for file_url in node_entry['files']:
if isinstance(file_url, str):
clean_file_url = file_url.strip()
if clean_file_url.startswith("https://github.com/") and clean_file_url.endswith(".git"): git_url = clean_file_url; break
if not git_url:
for file_url in node_entry['files']:
if isinstance(file_url, str):
clean_file_url = file_url.strip()
if clean_file_url.startswith("https://github.com/"):
parts = clean_file_url.split('/')
if len(parts) >= 5 and '.' not in parts[4].split('#')[0].split('?')[0]: git_url = f"https://github.com/{parts[3]}/{parts[4].split('#')[0].split('?')[0]}.git"; break
if git_url and isinstance(git_url, str):
clean_url = git_url.strip()
if clean_url and clean_id and clean_id not in node_map: node_map[clean_id] = clean_url; count += 1
except Exception as e: print(f"警告:处理 {CUSTOM_NODE_LIST_FILE} 条目 '{node_entry.get('id', '未知')}' 时出错:{e}")
print(f"从 {CUSTOM_NODE_LIST_FILE} 加载了 {count} 个 git-clone 类型的节点映射 (id/dirname -> URL)。")
return node_map

def parse_extension_node_map(manager_data_path):
"""解析 extension-node-map.json 并创建 Node Name -> URL 反向映射"""
node_map_path = manager_data_path / EXTENSION_NODE_MAP_FILE
if not node_map_path.exists(): print(f"警告:找不到文件 {EXTENSION_NODE_MAP_FILE}。"); return {}
print(f"正在读取 {EXTENSION_NODE_MAP_FILE}...")
try:
with open(node_map_path, 'r', encoding='utf-8') as f: data = json.load(f)
except Exception as e: print(f"错误:读取或解析 {EXTENSION_NODE_MAP_FILE} 失败:{e}"); return {}
reverse_node_map = {}; count = 0; total_node_names = 0
if not isinstance(data, dict): print(f"错误: {EXTENSION_NODE_MAP_FILE} 顶层不是预期的字典格式。"); return {}
for url_key, value in data.items():
if not isinstance(value, list) or len(value) < 1 or not isinstance(value[0], list): continue
node_names = value[0]; git_url = None
try:
if url_key.startswith("https://github.com/") and not url_key.endswith((".py", ".js")):
parts = url_key.strip('/').split('/');
if len(parts) >= 5: git_url = f"https://github.com/{parts[3]}/{parts[4].replace('.git','')}.git"
elif url_key.startswith("https://raw.githubusercontent.com/"):
parts = url_key.split('/');
if len(parts) >= 6: git_url = f"https://github.com/{parts[3]}/{parts[4]}.git"
except Exception as e: print(f"警告:从 URL key '{url_key}' 推断 Git URL 时出错: {e}")
if not git_url: continue
if isinstance(node_names, list):
for node_name in node_names:
if isinstance(node_name, str) and node_name.strip():
clean_node_name = node_name.strip(); total_node_names += 1
reverse_node_map[clean_node_name] = git_url.strip(); count += 1
print(f"从 {EXTENSION_NODE_MAP_FILE} 加载了 {count} 个节点名称到 Git URL 的反向映射 (共处理 {total_node_names} 个节点名)。")
return reverse_node_map

def load_user_map(map_file_path):
"""加载用户提供的自定义映射文件"""
if not map_file_path: return {}
abs_map_path = pathlib.Path(map_file_path).resolve()
if not abs_map_path.exists(): print(f"警告:指定的自定义映射文件不存在:{abs_map_path}"); return {}
try:
with open(abs_map_path, 'r', encoding='utf-8') as f: user_map = json.load(f)
print(f"成功加载自定义映射文件:{abs_map_path}")
if not isinstance(user_map, dict): print(f"警告:自定义映射文件 '{map_file_path}' 不是有效的 JSON 对象,已忽略。"); return {}
return user_map
except Exception as e: print(f"错误:读取或解析自定义映射文件 '{map_file_path}' 失败:{e}"); return {}

def get_potential_identifiers(node_properties, node_type):
"""从节点属性和类型中提取可能的标识符"""
identifiers = []
if node_type and isinstance(node_type, str): identifiers.append(node_type.strip())
node_name_sr = node_properties.get('Node name for S&R')
if node_name_sr and isinstance(node_name_sr, str):
base_name = node_name_sr.split('|')[0].strip(); identifiers.append(base_name)
if base_name != node_name_sr.strip(): identifiers.append(node_name_sr.strip())
aux_id = node_properties.get('aux_id')
if aux_id and isinstance(aux_id, str) and '/' in aux_id:
match = re.search(r'([a-zA-Z0-9.-]+/[a-zA-Z0-9.-]+)', aux_id)
if match: potential_repo_path = match.group(1); potential_dirname = potential_repo_path.split('/')[-1]; identifiers.append(potential_dirname.strip()); identifiers.append(potential_repo_path.strip())
cnr_id = node_properties.get('cnr_id')
if cnr_id and isinstance(cnr_id, str):
identifiers.append(cnr_id.strip()); potential_dirname_cnr = cnr_id.split('/')[-1]
if potential_dirname_cnr != cnr_id: identifiers.append(potential_dirname_cnr.strip())
return list(OrderedDict.fromkeys(filter(None, identifiers)))

def main(workflow_file, output_file="Dockerfile.generated", map_file=None, mirror=None, comfyui_path=None):
# --- 确定路径 ---
if comfyui_path:
comfyui_root_dir = pathlib.Path(comfyui_path).resolve(); # ... (路径逻辑不变)
else:
comfyui_root_dir = pathlib.Path.cwd() # 简化:如果未提供,则使用当前工作目录,依赖用户在正确位置运行
print(f"未提供 --comfyui-path,假定 ComfyUI 根目录为当前工作目录: {comfyui_root_dir}")

manager_path = comfyui_root_dir / MANAGER_RELATIVE_PATH
abs_workflow_path = pathlib.Path(workflow_file).resolve()
if not abs_workflow_path.exists(): print(f"错误:输入的工作流文件不存在: {abs_workflow_path}"); sys.exit(1)
workflow_file = str(abs_workflow_path)

# --- 加载所有映射数据 ---
user_node_map = load_user_map(map_file)
repo_map = parse_custom_node_list(manager_path)
node_name_map = parse_extension_node_map(manager_path)

# --- 读取工作流 ---
try:
    with open(workflow_file, 'r', encoding='utf-8') as f: workflow_data = json.load(f)
except Exception as e: print(f"错误: 读取或解析工作流文件失败 '{workflow_file}': {e}"); sys.exit(1)
nodes = workflow_data.get('nodes', [])
if not nodes: print("警告:工作流文件中未找到节点。"); return

# --- 识别节点并进行核心节点过滤 (更新) ---
all_node_lookups = {}
skipped_core_nodes = set()
print("\n正在识别工作流中的节点并过滤核心节点...")
for node in nodes:
    node_type = node.get('type')
    properties = node.get('properties', {})
    potential_ids = get_potential_identifiers(properties, node_type)
    is_core = False
    primary_id_for_core_check = potential_ids[0] if potential_ids else None

    # 检查节点类型是否在核心列表
    if node_type and node_type in CORE_NODE_CLASS_NAMES:
        is_core = True
        skipped_core_nodes.add(node_type)
    else:
         # 检查是否有通用核心标识符
         for pid in potential_ids:
              if pid in CORE_GENERIC_IDS:
                   is_core = True
                   skipped_core_nodes.add(pid if pid in CORE_GENERIC_IDS else primary_id_for_core_check or node_type or "未知核心节点")
                   break
    if is_core: continue # 跳过核心节点

    # 存储非核心节点以供查找
    valid_ids = [pid for pid in potential_ids if pid]
    if valid_ids:
        primary_id = valid_ids[0]
        if primary_id not in all_node_lookups: all_node_lookups[primary_id] = {"all_ids": valid_ids}

print(f"过滤后,识别出 {len(all_node_lookups)} 个需要查找来源的潜在自定义节点。")
if skipped_core_nodes: print(f"跳过了 {len(skipped_core_nodes)} 种被识别为核心/内置的节点类型。")

print("\n将按以下顺序尝试查找仓库 URL:自定义映射 -> Manager(节点名) -> Manager(仓库名/ID)")
processed_repo_urls = set(); not_found_nodes = []; dockerfile_build_layers = []; repo_name_map = {}

# --- 启发式排序 ---
priority_order = ["ComfyUI_essentials", "WAS_Node_Suite", "comfyui_controlnet_aux", "ComfyUI-Impact-Pack", "ComfyUI-Easy-Use"]
sorted_primary_ids = sorted(list(all_node_lookups.keys()), key=lambda p_id: min([priority_order.index(key) for key in all_node_lookups.get(p_id, {}).get("all_ids", []) if key in priority_order] + [len(priority_order)]))

# --- 生成 Dockerfile 层 ---
for primary_id in sorted_primary_ids:
    node_info = all_node_lookups[primary_id]; found_url = None; found_source = "未找到"; found_key = None
    lookup_order = [(user_node_map, "自定义映射"), (node_name_map, "Manager(节点名)"), (repo_map, "Manager(仓库名/ID)")];
    for current_map, source_name in lookup_order:
        if found_url: break
        for identifier in node_info["all_ids"]:
            entry = current_map.get(identifier); url_candidate = None
            if source_name == "自定义映射":
                if isinstance(entry, dict) and "url" in entry and isinstance(entry["url"], str): url_candidate = entry["url"]
                elif isinstance(entry, str) and entry.endswith(".git"): url_candidate = entry
            else:
                 if isinstance(entry, str) and entry.endswith(".git"): url_candidate = entry
            if url_candidate:
                found_url = url_candidate; found_source = source_name; found_key = identifier
                print(f"  [找到] 通过 {found_source} 使用键 '{found_key}' 找到节点 '{primary_id}' -> {found_url}")
                break
    if found_url:
        clean_url = found_url.strip().replace('.git', '') + '.git'
        if clean_url in processed_repo_urls: print(f"  [跳过] 仓库 {clean_url} 已添加过 (由节点 '{repo_name_map.get(clean_url, '未知')}' 添加)。"); continue
        repo_name_guess = found_key if found_source == "Manager(仓库名/ID)" else clean_url.split('/')[-1].replace('.git', '')
        safe_repo_name = sanitize_repo_name(repo_name_guess)
        if safe_repo_name:
            original_url = clean_url; clone_url = clean_url; mirror_notice = ""
            if mirror and clone_url.startswith("https://github.com/"):
                 mirror_domain = mirror.replace("https://", "").replace("http://", "").strip('/')
                 if mirror_domain: clone_url = f"https://{mirror_domain}/https://github.com/{clone_url[19:]}"; mirror_notice = f"(使用镜像: {mirror_domain})"
                 else: print(f"警告:提供的镜像地址 '{mirror}' 格式不正确,将使用原始 GitHub 地址。")
            dockerfile_build_layers.append(DOCKERFILE_INSTALL_LAYER_TEMPLATE.format(node_id=primary_id, source=found_source, match_key=found_key, repo_name=safe_repo_name, original_url=original_url, clone_url=clone_url, mirror_notice=mirror_notice))
            processed_repo_urls.add(clean_url); repo_name_map[clean_url] = primary_id
        else: print(f"  [错误] 节点 '{primary_id}' 的仓库名 '{repo_name_guess}' 无效,已跳过。")
    else:
        print(f"  [未找到] 无法找到节点 '{primary_id}' 的仓库 URL (尝试过的标识符: {node_info['all_ids']})")
        not_found_nodes.append({"id": primary_id, "tried_ids": node_info['all_ids'], "reason": "未在任何映射中找到"})

# --- 组装最终 Dockerfile ---
final_dockerfile = DOCKERFILE_HEADER.format(node_base_path_builder=NODE_INSTALL_BASE_PATH_BUILDER) + "\n".join(dockerfile_build_layers) + DOCKERFILE_FINAL_STAGE.format(node_base_path_final=NODE_INSTALL_BASE_PATH_FINAL, comfyui_base_path_final=COMFYUI_BASE_PATH_FINAL, node_base_path_builder=NODE_INSTALL_BASE_PATH_BUILDER)

# --- 写入文件并打印总结 ---
try:
    with open(output_file, 'w', encoding='utf-8') as f: f.write(final_dockerfile)
    print(f"\nDockerfile 已生成到 '{output_file}'")
    print("\n构建提示:") # ... (提示不变) ...
except Exception as e: print(f"\n错误:无法写入 Dockerfile 文件 '{output_file}': {e}")
if not_found_nodes:
    print("\n--- 警告:以下自定义节点未能自动找到仓库 URL ---") # ... (提示不变) ...
    for node in not_found_nodes: print(f"- {node['id']} (尝试过的标识符: {node['tried_ids']}, 原因: {node['reason']})")

if name == "main":
# ... (Argparse 不变) ...
parser = argparse.ArgumentParser(description="从 ComfyUI 工作流 JSON 生成多阶段 Dockerfile...", formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument("workflow_file", help="输入的 ComfyUI 工作流 JSON 文件路径。")
parser.add_argument("-o", "--output", default="Dockerfile.generated", help="输出的 Dockerfile 文件名。")
parser.add_argument("--map-file", help="用户自定义的节点映射 JSON 文件路径。", default=None)
parser.add_argument("--mirror", help="用于替换 GitHub URL 的镜像前缀 (例如 'ghproxy.com')", default=None)
parser.add_argument("--comfyui-path", help="ComfyUI 安装根目录的路径。", default=None)
args = parser.parse_args()
main(args.workflow_file, args.output, args.map_file, args.mirror, args.comfyui_path)

Owner

这个没看懂, 我不准备改 comfyui 的代码. 这个做了能有什么用处?

Creator

这个没看懂, 我不准备改 comfyui 的代码. 这个做了能有什么用处?

没有改comfyui代码的大大,这个是用来快速写入工作流中缺失的节点的,读取了comfyui manager的两个json,巨型工作流安装节点太麻烦了😭

Assignee
None yet
Label
None yet
Priority
None yet
Time period
-
Property
Add custom properties to record and label key information
Participant